repo_name
stringlengths
5
92
path
stringlengths
4
221
copies
stringclasses
19 values
size
stringlengths
4
6
content
stringlengths
766
896k
license
stringclasses
15 values
hash
int64
-9,223,277,421,539,062,000
9,223,102,107B
line_mean
float64
6.51
99.9
line_max
int64
32
997
alpha_frac
float64
0.25
0.96
autogenerated
bool
1 class
ratio
float64
1.5
13.6
config_test
bool
2 classes
has_no_keywords
bool
2 classes
few_assignments
bool
1 class
manahl/pytest-plugins
pytest-shutil/pytest_shutil/run.py
1
8562
""" Testing tools for running cmdline methods """ import sys import os import imp import logging from functools import update_wrapper import inspect import textwrap from contextlib import closing import subprocess from mock import patch import execnet from six.moves import cPickle # @UnresolvedImport from . import cmdline try: # Python 3 from contextlib import ExitStack except ImportError: from contextlib2 import ExitStack try: # Python 2 str_type = basestring except NameError: # Python 3 str_type = str log = logging.getLogger(__name__) # TODO: add option to return results as a pipe to avoid buffering # large amounts of output def run(cmd, stdin=None, capture_stdout=True, capture_stderr=False, check_rc=True, background=False, **kwargs): """ Run a command; raises `subprocess.CalledProcessError` on failure. Parameters ---------- stdin : file object text piped to standard input capture_stdout : `bool` or `stream` If set, stdout will be captured and returned capture_stderr : `bool` If set, stderr will be piped to stdout and returned **kwargs : optional arguments Other arguments are passed to Popen() """ log.debug('exec: %s' % str(cmd)) stdout = subprocess.PIPE if capture_stdout is True else capture_stdout if capture_stdout else None stderr = subprocess.STDOUT if capture_stderr else None stdin_arg = None if stdin is None else subprocess.PIPE p = subprocess.Popen(cmd, stdin=stdin_arg, stdout=stdout, stderr=stderr, **kwargs) if background: return p (out, _) = p.communicate(stdin) if out is not None and not isinstance(out, str_type): try: out = out.decode('utf-8') except: log.warning("Unable to decode command output to UTF-8") if check_rc and p.returncode != 0: err_msg = ((out if out else 'No output') if capture_stdout is True else '<not captured>') cmd = cmd if isinstance(cmd, str) else ' '.join(cmd) log.error("Command failed: \"%s\"\n%s" % (cmd, err_msg.strip())) ex = subprocess.CalledProcessError(p.returncode, cmd) ex.output = err_msg raise ex return out def run_as_main(fn, *argv): """ Run a given function as if it was the system entry point, eg for testing scripts. Eg:: from scripts.Foo import main run_as_main(main, 'foo','bar') This is equivalent to ``Foo foo bar``, assuming ``scripts.Foo.main`` is registered as an entry point. """ with patch("sys.argv", new=['progname'] + list(argv)): log.info("run_as_main: %s" % str(argv)) return fn() def run_module_as_main(module, argv=[]): """ Run a given module as if it was the system entry point. """ where = os.path.dirname(module.__file__) filename = os.path.basename(module.__file__) filename = os.path.splitext(filename)[0] + ".py" with patch("sys.argv", new=argv): imp.load_source('__main__', os.path.join(where, filename)) def _evaluate_fn_source(src, *args, **kwargs): locals_ = {} eval(compile(src, '<string>', 'single'), {}, locals_) fn = next(iter(locals_.values())) if isinstance(fn, staticmethod): fn = fn.__get__(None, object) return fn(*args, **kwargs) def _invoke_method(obj, name, *args, **kwargs): return getattr(obj, name)(*args, **kwargs) def _find_class_from_staticmethod(fn): for _, cls in inspect.getmembers(sys.modules[fn.__module__], inspect.isclass): for name, member in inspect.getmembers(cls): if member is fn or (isinstance(member, staticmethod) and member.__get__(None, object) is fn): return cls, name return None, None def _make_pickleable(fn): # return a pickleable function followed by a tuple of initial arguments # could use partial but this is more efficient try: cPickle.dumps(fn, protocol=0) except (TypeError, cPickle.PickleError, AttributeError): pass else: return fn, () if inspect.ismethod(fn): name, self_ = fn.__name__, fn.__self__ if self_ is None: # Python 2 unbound method self_ = fn.im_class return _invoke_method, (self_, name) elif inspect.isfunction(fn) and fn.__module__ in sys.modules: cls, name = _find_class_from_staticmethod(fn) if (cls, name) != (None, None): try: cPickle.dumps((cls, name), protocol=0) except cPickle.PicklingError: pass else: return _invoke_method, (cls, name) # Fall back to sending the source code return _evaluate_fn_source, (textwrap.dedent(inspect.getsource(fn)),) def _run_in_subprocess_redirect_stdout(fd): import os # @Reimport import sys # @Reimport sys.stdout.close() os.dup2(fd, 1) os.close(fd) sys.stdout = os.fdopen(1, 'w', 1) def _run_in_subprocess_remote_fn(channel): from six.moves import cPickle # @UnresolvedImport @Reimport # NOQA fn, args, kwargs = cPickle.loads(channel.receive(None)) channel.send(cPickle.dumps(fn(*args, **kwargs), protocol=0)) def run_in_subprocess(fn, python=sys.executable, cd=None, timeout=None): """ Wrap a function to run in a subprocess. The function must be pickleable or otherwise must be totally self-contained; it must not reference a closure or any globals. It can also be the source of a function (def fn(...): ...). Raises execnet.RemoteError on exception. """ pkl_fn, preargs = (_evaluate_fn_source, (fn,)) if isinstance(fn, str) else _make_pickleable(fn) spec = '//'.join(filter(None, ['popen', 'python=' + python, 'chdir=' + cd if cd else None])) def inner(*args, **kwargs): # execnet sends stdout to /dev/null :( fix_stdout = sys.version_info < (3, 0, 0) # Python 3 passes close_fds=True to subprocess.Popen with ExitStack() as stack: with ExitStack() as stack2: if fix_stdout: fd = os.dup(1) stack2.callback(os.close, fd) gw = execnet.makegateway(spec) # @UndefinedVariable stack.callback(gw.exit) if fix_stdout: with closing(gw.remote_exec(_run_in_subprocess_remote_fn)) as chan: chan.send(cPickle.dumps((_run_in_subprocess_redirect_stdout, (fd,), {}), protocol=0)) chan.receive(None) with closing(gw.remote_exec(_run_in_subprocess_remote_fn)) as chan: payload = (pkl_fn, tuple(i for t in (preargs, args) for i in t), kwargs) chan.send(cPickle.dumps(payload, protocol=0)) return cPickle.loads(chan.receive(timeout)) return inner if isinstance(fn, str) else update_wrapper(inner, fn) def run_with_coverage(cmd, pytestconfig, coverage=None, cd=None, **kwargs): """ Run a given command with coverage enabled. This won't make any sense if the command isn't a python script. This must be run within a pytest session that has been setup with the '--cov=xxx' options, and therefore requires the pytestconfig argument that can be retrieved from the standard pytest funcarg of the same name. Parameters ---------- cmd: `List` Command to run pytestconfig: `pytest._config.Config` Pytest configuration object coverage: `str` Path to the coverage executable cd: `str` If not None, will change to this directory before running the cmd. This is the directory that the coverage files will be created in. kwargs: keyword arguments Any extra arguments to pass to `pkglib.cmdline.run` Returns ------- `str` standard output Examples -------- >>> def test_example(pytestconfig): ... cmd = ['python','myscript.py'] ... run_with_coverage(cmd, pytestconfig) """ if isinstance(cmd, str): cmd = [cmd] if coverage is None: coverage = [sys.executable, '-mcoverage.__main__'] elif isinstance(coverage, str): coverage = [coverage] args = coverage + ['run', '-p'] if getattr(pytestconfig.option, 'cov_source', None): source_dirs = ",".join(pytestconfig.option.cov_source) args += ['--source=%s' % source_dirs] args += cmd if cd: with cmdline.chdir(cd): return run(args, **kwargs) return run(args, **kwargs)
mit
-3,054,477,449,442,501,600
32.057915
105
0.620649
false
3.884755
true
false
false
fennekki/unikko
unikko/output/html.py
1
3802
from yattag import Doc, indent from sys import stderr def _inner_recurse_tags(obj, tree, doc, tag, text): """Execute inner loop structure of HTML generation. Params: obj (Object): The object currently being looped over tree (Object): A VOTL Object containing the subtree-to-be -generated doc: yattag Doc().doc tag: yattag Doc().tag text: yattag Doc().text """ if obj.object_type == "header": # Only generate a header element for headers with tag("h{:d}".format(obj.level + 1)): text(obj.first_line) elif obj.object_type == "body": paragraphs = [] current = [] for line in obj.lines: if line == "": if len(current) > 0: paragraphs.append("\n".join(current)) current = [] else: current.append(line) # Last paragraph if len(current) > 0: paragraphs.append("\n".join(current)) for paragraph in paragraphs: with tag("p"): text(paragraph) elif obj.object_type == "body-pre": # Just print the content in a pre-tag with tag("pre"): for line in obj.lines: text(line, "\n") elif obj.object_type[-4:] == "-pre": # Class is name without -pre klass = obj.object_type[:-4] # Custom preformatted with tag("pre", klass=klass): for line in obj.lines: text(line, "\n") else: # Body, but custom klass = obj.object_type paragraphs = [] current = [] for line in obj.lines: # Construct paragraphs into paragraphs from lines in obj if line == "": if len(current) > 0: paragraphs.append("\n".join(current)) current = [] else: current.append(line) if len(current) > 0: paragraphs.append("\n".join(current)) for paragraph in paragraphs: # Each generated paragraph becomes a <p> tag with tag("p", klass=klass): text(paragraph) _recurse_tags(obj, doc, tag, text) def _recurse_tags(tree, doc, tag, text): """Recursively generate HTML from children. Params: tree (Object): A VOTL Object containing the subtree-to-be -generated doc: yattag Doc().doc tag: yattag Doc().tag text: yattag Doc().text """ if len(tree.children) > 0: for o in tree.children: if len(o.children) > 0: # Only create divs for objects with children with tag("div"): _inner_recurse_tags(o, tree, doc, tag, text) else: _inner_recurse_tags(o, tree, doc, tag, text) def html_from_tree(tree): """Generate indented HTML from VOTL Object tree. Params: tree (Object): A VOTL Object containing the tree, from which HTML will be generated. Returns: String containing a pretty-formatted HTML document. """ doc, tag, text = Doc().tagtext() doc.asis("<!DOCTYPE html>") try: first = tree.children[0] except IndexError: print("Error generating markdown: Tree has no children!", file=stderr) return "" if first.object_type == "header": title = first.first_line else: title = "Untitled" with tag("html"): with tag("head"): with tag("title"): text(title) doc.stag("meta", charset="utf-8") with tag("body"): _recurse_tags(tree, doc, tag, text) return indent(doc.getvalue())
bsd-2-clause
-3,682,860,216,058,355,000
29.416
78
0.522357
false
4.252796
false
false
false
ricardog/raster-project
projections/r2py/rparser.py
1
5180
from pyparsing import * import re ParserElement.enablePackrat() from .tree import Node, Operator import pdb def rparser(): expr = Forward() lparen = Literal("(").suppress() rparen = Literal(")").suppress() double = Word(nums + ".").setParseAction(lambda t:float(t[0])) integer = pyparsing_common.signed_integer number = pyparsing_common.number ident = Word(initChars = alphas + "_", bodyChars = alphanums + "_" + ".") string = dblQuotedString funccall = Group(ident + lparen + Group(Optional(delimitedList(expr))) + rparen + Optional(integer)).setResultsName("funccall") operand = number | string | funccall | ident expop = Literal('^') multop = oneOf('* /') plusop = oneOf('+ -') introp = oneOf('| :') expr << infixNotation(operand, [(expop, 2, opAssoc.RIGHT), (introp, 2, opAssoc.LEFT), (multop, 2, opAssoc.LEFT), (plusop, 2, opAssoc.LEFT),]).setResultsName('expr') return expr PARSER = rparser() def parse(text): def walk(l): ## ['log', [['cropland', '+', 1]]] ## ['poly', [['log', [['cropland', '+', 1]]], 3], 3] ## [[['factor', ['unSub'], 21], ':', ['poly', [['log', [['cropland', '+', 1]]], 3], 3], ':', ['poly', [['log', [['hpd', '+', 1]]], 3], 2]]] if type(l) in (int, float): return l if isinstance(l, str): if l == 'Intercept' or l == '"Intercept"': return 1 elif l[0] == '"' and l[-1] == '"': return l[1:-1] else: return l if len(l) == 1 and type(l[0]) in (int, str, float, ParseResults): return walk(l[0]) if l[0] == 'factor': assert len(l) == 3, "unexpected number of arguments to factor" assert len(l[1]) == 1, "argument to factor is an expression" assert type(l[2]) == int, "second argument to factor is not an int" return Node(Operator('=='), (Node(Operator('in'), (l[1][0], 'float32[:]')), l[2])) if l[0] == 'poly': assert len(l) in (2, 3), "unexpected number of arguments to poly" assert isinstance(l[1][1], int), "degree argument to poly is not an int" inner = walk(l[1][0]) degree = l[1][1] if len(l) == 2: pwr = 1 else: assert type(l[2]) == int, "power argument to poly is not an int" pwr = l[2] return Node(Operator('sel'), (Node(Operator('poly'), (inner, degree)), pwr)) if l[0] == 'log': assert len(l) == 2, "unexpected number of arguments to log" args = walk(l[1]) return Node(Operator('log'), [args]) if l[0] == 'scale': assert len(l[1]) in (3, 5), "unexpected number of arguments to scale" args = walk(l[1][0]) return Node(Operator('scale'), [args] + l[1][1:]) if l[0] == 'I': assert len(l) == 2, "unexpected number of arguments to I" args = walk(l[1]) return Node(Operator('I'), [args]) # Only used for testing if l[0] in ('sin', 'tan'): assert len(l) == 2, "unexpected number of arguments to %s" % l[0] args = walk(l[1]) return Node(Operator(l[0]), [args]) if l[0] in ('max', 'min', 'pow'): assert len(l) == 2, "unexpected number of arguments to %s" % l[0] assert len(l[1]) == 2, "unexpected number of arguments to %s" % l[0] left = walk(l[1][0]) right = walk(l[1][1]) return Node(Operator(l[0]), (left, right)) if l[0] == 'exp': assert len(l) == 2, "unexpected number of arguments to exp" args = walk(l[1]) return Node(Operator('exp'), [args]) if l[0] == 'clip': assert len(l) == 2, "unexpected number of arguments to %s" % l[0] assert len(l[1]) == 3, "unexpected number of arguments to %s" % l[0] left = walk(l[1][0]) low = walk(l[1][1]) high = walk(l[1][2]) return Node(Operator(l[0]), (left, low, high)) if l[0] == 'inv_logit': assert len(l) == 2, "unexpected number of arguments to inv_logit" args = walk(l[1]) return Node(Operator('inv_logit'), [args]) ## Only binary operators left if len(l) == 1: pdb.set_trace() pass assert len(l) % 2 == 1, "unexpected number of arguments for binary operator" assert len(l) != 1, "unexpected number of arguments for binary operator" ## FIXME: this only works for associative operators. Need to either ## special-case division or include an attribute that specifies ## whether the op is associative. left = walk(l.pop(0)) op = l.pop(0) right = walk(l) if type(right) != Node: return Node(Operator(op), (left, right)) elif right.type.type == op: return Node(Operator(op), (left, ) + right.args) return Node(Operator(op), (left, right)) ### FIXME: hack if not isinstance(text, str): text = str(text) new_text = re.sub('newrange = c\((\d), (\d+)\)', '\\1, \\2', text) new_text = new_text.replace('rescale(', 'scale(') nodes = PARSER.parseString(new_text, parseAll=True) tree = walk(nodes) if isinstance(tree, (str, int, float)): tree = Node(Operator('I'), [tree]) return tree
apache-2.0
8,386,587,708,140,975,000
36.266187
143
0.547104
false
3.301466
false
false
false
scdoshi/djutils
djutils/gis.py
1
2346
""" GIS: GIS related utilities. """ ############################################################################### ## Imports ############################################################################### import math ############################################################################### ## GIS Format Conversions ############################################################################### def GPRMC2DegDec(lat, latDirn, lng, lngDirn): """Converts GPRMC formats (Decimal Minutes) to Degrees Decimal Eg. """ x = float(lat[0:2]) + float(lat[2:]) / 60 y = float(lng[0:3]) + float(lng[3:]) / 60 if latDirn == 'S': x = -x if lngDirn == 'W': y = -y return x, y def TinyGPS2DegDec(lat, lng): """Converts TinyGPS formats (Decimal Degrees to e-5) to Degrees Decimal Eg. """ x = float(lat[:-5] + '.' + lat[-5:]) y = float(lng[:-5] + '.' + lng[-5:]) return x, y ############################################################################### ## Functions to convert miles to change in lat, long (approx) ############################################################################### # Distances are measured in miles. # Longitudes and latitudes are measured in degrees. # Earth is assumed to be perfectly spherical. earth_radius = 3960.0 degrees_to_radians = math.pi / 180.0 radians_to_degrees = 180.0 / math.pi def ChangeInLatitude(miles): """Given a distance north, return the change in latitude.""" return (miles / earth_radius) * radians_to_degrees def ChangeInLongitude(lat, miles): """Given a latitude and a distance west, return the change in longitude.""" # Find the radius of a circle around the earth at given latitude. r = earth_radius * math.cos(lat * degrees_to_radians) return (miles / r) * radians_to_degrees def CalculateBoundingBox(lng, lat, miles): """ Given a latitude, longitude and a distance in miles, calculate the co-ordinates of the bounding box 2*miles on long each side with the given co-ordinates at the center. """ latChange = ChangeInLatitude(miles) latSouth = lat - latChange latNorth = lat + latChange lngChange = ChangeInLongitude(lat, miles) lngWest = lng + lngChange lngEast = lng - lngChange return (lngWest, latSouth, lngEast, latNorth)
bsd-3-clause
8,272,118,917,767,031,000
27.962963
79
0.516624
false
3.884106
false
false
false
botswana-harvard/bais-subject
bais_subject/models/attitudes_towards_people.py
1
6192
from django.db import models from edc_base.model_fields import OtherCharField from edc_base.model_mixins import BaseUuidModel from ..choices import (YES_NO, TESTING_REASONS, TB_NONDISCLOSURE, HIV_TEST_RESULT, ARV_USAGE, ARV_TREATMENT_SOURCE, REASONS_ARV_NOT_TAKEN, TB_REACTION) class AttitudesTowardsPeople(BaseUuidModel): meal_sharing = models.CharField( verbose_name='Would you ever share a meal (from the same plate)' ' with a person you knew or suspected had HIV AND AIDS?', max_length=35, choices=YES_NO, ) aids_household_care = models.CharField( verbose_name='If a member of your family became sick with HIV AND AIDS,' ' would you be willing to care for him or her in your household?', max_length=35, choices=YES_NO, ) tb_household_care = models.CharField( verbose_name='If a member of your family became sick with TB,' ' would you be willing to care for him or her in your household?', max_length=35, choices=YES_NO, ) tb_household_empathy = models.CharField( verbose_name='If a member of your family got diagnosed with TB,' ' would you be willing to care for him or her in your household?', max_length=35, choices=YES_NO, ) aids_housekeeper = models.CharField( verbose_name='If your housekeeper, nanny or anybody looking' ' after your child has HIV but is not sick, ' 'would you allow him/her to continue' ' working/assisting with babysitting in your house? ', max_length=35, choices=YES_NO, ) aids_teacher = models.CharField( verbose_name='If a teacher has HIV but is not sick,' ' should s/he be allowed to continue teaching in school?', max_length=35, choices=YES_NO, ) aids_shopkeeper = models.CharField( verbose_name='If you knew that a shopkeeper or food seller had' ' HIV or AIDS, would you buy vegetables from them?', max_length=35, choices=YES_NO, ) aids_family_member = models.CharField( verbose_name='If a member of your family got infected' 'with HIV, would you want it to remain a secret?', max_length=35, choices=YES_NO, help_text="", ) aids_children = models.CharField( verbose_name='Do you think that children living with HIV ' 'should attend school with children who are HIV negative?', max_length=35, choices=YES_NO, ) aids_room_sharing = models.CharField( verbose_name='Would you share a room ' 'with a person you knew has been diagnosed with TB?', max_length=35, choices=YES_NO, ) aids_hiv_testing = models.CharField( verbose_name='Have you ever been tested for HIV?', max_length=35, choices=YES_NO, ) aids_hiv_times_tested = models.CharField( verbose_name='In the past 12 months how many times' ' have you been tested for HIV and received your results?', max_length=35, choices=YES_NO, ) aids_hiv_test_partner = models.CharField( verbose_name='Did you test together with your partner?', max_length=250, choices=YES_NO, ) aids_hiv_test_reason = models.CharField( verbose_name='What was the main reason for testing?', max_length=35, choices=TESTING_REASONS, ) aids_hiv_test_reason_other = OtherCharField( verbose_name='Specify Other', max_length=35, null=True, blank=True, ) aids_hiv_not_tested = models.CharField( verbose_name='Why haven’t you tested?', max_length=35, choices=TESTING_REASONS, ) aids_hiv_not_tested_other = OtherCharField( verbose_name='Other, Specify', max_length=35, null=True, blank=True, ) aids_hiv_test_result = models.CharField( verbose_name='What was the result of your last HIV test? ', max_length=35, choices=HIV_TEST_RESULT, ) aids_hiv_test_result_disclosure = models.CharField( verbose_name='Did you tell anyone the result of your the test? ', max_length=35, choices=YES_NO, ) current_arv_therapy = models.CharField( verbose_name='Are you currently taking ARVs?', max_length=35, choices=ARV_USAGE, ) current_arv_supplier = models.CharField( verbose_name='Where are you taking your ARVs?', max_length=35, choices=ARV_TREATMENT_SOURCE, ) current_arv_supplier_other = OtherCharField( verbose_name='Other Specify', max_length=35, null=True, blank=True, ) not_on_arv_therapy = models.CharField( verbose_name='Why aren\'t you taking your ARVs?', max_length=35, choices=REASONS_ARV_NOT_TAKEN, ) not_on_arv_therapy_other = OtherCharField( verbose_name='Other Specify', max_length=35, blank=True, null=True ) tb_reaction = models.CharField( verbose_name='What would be your reaction' ' if you found out you had TB ?', max_length=35, choices=TB_REACTION, ) tb_reaction_other = OtherCharField( verbose_name='Other Specify', max_length=35, null=True, blank=True, ) tb_diagnosis = models.CharField( verbose_name='If you were diagnosed with Tb,would you tell anyone?', max_length=35, choices=YES_NO, ) tb_diagnosis_disclosure = models.CharField( verbose_name='If yes, whom would you tell?', max_length=35, choices=YES_NO, ) tb_diagnosis_no_disclosure = models.CharField( verbose_name='If No,why not', max_length=35, choices=TB_NONDISCLOSURE, ) tb_diagnosis_no_disclosure_other = OtherCharField( verbose_name='Other, Specify', max_length=35, blank=True, null=True ) class Meta(BaseUuidModel.Meta): app_label = 'bais_subject'
gpl-3.0
-4,917,492,235,012,561,000
27.525346
80
0.607593
false
3.580104
true
false
false
fako/datascope
src/sources/models/google/text.py
1
2087
from datagrowth.exceptions import DGInvalidResource from sources.models.google.query import GoogleQuery class GoogleText(GoogleQuery): URI_TEMPLATE = 'https://www.googleapis.com/customsearch/v1?q={}' GET_SCHEMA = { "args": { "type": "array", "items": [ { "type": "string", # the query string }, { "type": "integer", # amount of desired images }, ], "additionalItems": False, "minItems": 1 }, "kwargs": None } def variables(self, *args): args = args or self.request.get("args") return { "url": (args[0],), "quantity": args[2] if len(args) > 2 else 0, } def auth_parameters(self): params = super(GoogleText, self).auth_parameters() params.update({ "cx": self.config.cx }) return params @property def content(self): content_type, data = super(GoogleText, self).content try: if data is not None: data["queries"]["request"][0]["searchTerms"] = data["queries"]["request"][0]["searchTerms"][1:-1] except (KeyError, IndexError): raise DGInvalidResource("Google Image resource does not specify searchTerms", self) return content_type, data def next_parameters(self): if self.request["quantity"] <= 0: return {} content_type, data = super(GoogleText, self).content missing_quantity = self.request["quantity"] - 10 try: nextData = data["queries"]["nextPage"][0] except KeyError: return {} return { "start": nextData["startIndex"], "quantity": missing_quantity } def _create_request(self, method, *args, **kwargs): request = super(GoogleText, self)._create_request(method, *args, **kwargs) request["quantity"] = self.variables(*args)["quantity"] return request
gpl-3.0
6,124,814,023,701,157,000
30.621212
113
0.529947
false
4.347917
false
false
false
SymbiFlow/prjxray
fuzzers/005-tilegrid/pcie/top.py
1
1574
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # Copyright (C) 2017-2020 The Project X-Ray Authors. # # Use of this source code is governed by a ISC-style # license that can be found in the LICENSE file or at # https://opensource.org/licenses/ISC # # SPDX-License-Identifier: ISC import os import random random.seed(int(os.getenv("SEED"), 16)) from prjxray import util from prjxray.db import Database def gen_sites(): db = Database(util.get_db_root(), util.get_part()) grid = db.grid() for tile_name in sorted(grid.tiles()): loc = grid.loc_of_tilename(tile_name) gridinfo = grid.gridinfo_at_loc(loc) for site_name, site_type in gridinfo.sites.items(): if site_type in ['PCIE_2_1']: yield tile_name, site_name def write_params(params): pinstr = 'tile,val,site\n' for tile, (site, val) in sorted(params.items()): pinstr += '%s,%s,%s\n' % (tile, val, site) open('params.csv', 'w').write(pinstr) def run(): print(''' module top(input wire in, output wire out); ''') params = {} sites = list(gen_sites()) for (tile_name, site_name), isone in zip(sites, util.gen_fuzz_states(len(sites))): params[tile_name] = (site_name, isone) attr = "FALSE" if isone else "TRUE" print( ''' (* KEEP, DONT_TOUCH*) PCIE_2_1 #( .AER_CAP_PERMIT_ROOTERR_UPDATE("{}") ) pcie ();'''.format(attr)) print("endmodule") write_params(params) if __name__ == '__main__': run()
isc
6,070,674,432,901,419,000
24.387097
79
0.579416
false
3.167002
false
false
false
itsnotmyfault1/kimcopter2
crazyflie-pc-client/lib/cflib/crazyflie/__init__.py
1
13576
#!/usr/bin/env python # -*- coding: utf-8 -*- # # || ____ _ __ # +------+ / __ )(_) /_______________ _____ ___ # | 0xBC | / __ / / __/ ___/ ___/ __ `/_ / / _ \ # +------+ / /_/ / / /_/ /__/ / / /_/ / / /_/ __/ # || || /_____/_/\__/\___/_/ \__,_/ /___/\___/ # # Copyright (C) 2011-2013 Bitcraze AB # # Crazyflie Nano Quadcopter Client # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; either version 2 # of the License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, # MA 02110-1301, USA. """ The Crazyflie module is used to easily connect/send/receive data from a Crazyflie. Each function in the Crazyflie has a class in the module that can be used to access that functionality. The same design is then used in the Crazyflie firmware which makes the mapping 1:1 in most cases. """ __author__ = 'Bitcraze AB' __all__ = ['Crazyflie'] import logging logger = logging.getLogger(__name__) import time from threading import Thread from threading import Timer from .commander import Commander from .console import Console from .param import Param from .log import Log from .toccache import TocCache import cflib.crtp from cflib.utils.callbacks import Caller class State: """Stat of the connection procedure""" DISCONNECTED = 0 INITIALIZED = 1 CONNECTED = 2 SETUP_FINISHED = 3 class Crazyflie(): """The Crazyflie class""" # Callback callers disconnected = Caller() connectionLost = Caller() connected = Caller() connectionInitiated = Caller() connectSetupFinished = Caller() connectionFailed = Caller() receivedPacket = Caller() linkQuality = Caller() state = State.DISCONNECTED def __init__(self, link=None, ro_cache=None, rw_cache=None): """ Create the objects from this module and register callbacks. ro_cache -- Path to read-only cache (string) rw_cache -- Path to read-write cache (string) """ self.link = link self._toc_cache = TocCache(ro_cache=ro_cache, rw_cache=rw_cache) self.incoming = _IncomingPacketHandler(self) self.incoming.setDaemon(True) self.incoming.start() self.commander = Commander(self) self.log = Log(self) self.console = Console(self) self.param = Param(self) self._log_toc_updated = False self._param_toc_updated = False self.link_uri = "" # Used for retry when no reply was sent back self.receivedPacket.add_callback(self._check_for_initial_packet_cb) self.receivedPacket.add_callback(self._check_for_answers) self.answer_timers = {} # Connect callbacks to logger self.disconnected.add_callback( lambda uri: logger.info("Callback->Disconnected from [%s]", uri)) self.connected.add_callback( lambda uri: logger.info("Callback->Connected to [%s]", uri)) self.connectionLost.add_callback( lambda uri, errmsg: logger.info("Callback->Connectionl ost to" " [%s]: %s", uri, errmsg)) self.connectionFailed.add_callback( lambda uri, errmsg: logger.info("Callback->Connected failed to" " [%s]: %s", uri, errmsg)) self.connectionInitiated.add_callback( lambda uri: logger.info("Callback->Connection initialized[%s]", uri)) self.connectSetupFinished.add_callback( lambda uri: logger.info("Callback->Connection setup finished [%s]", uri)) def _start_connection_setup(self): """Start the connection setup by refreshing the TOCs""" logger.info("We are connected[%s], request connection setup", self.link_uri) self.log.refresh_toc(self._log_toc_updated_cb, self._toc_cache) def _param_toc_updated_cb(self): """Called when the param TOC has been fully updated""" logger.info("Param TOC finished updating") self._param_toc_updated = True if (self._log_toc_updated is True and self._param_toc_updated is True): self.connectSetupFinished.call(self.link_uri) def _log_toc_updated_cb(self): """Called when the log TOC has been fully updated""" logger.info("Log TOC finished updating") self._log_toc_updated = True self.param.refresh_toc(self._param_toc_updated_cb, self._toc_cache) if (self._log_toc_updated and self._param_toc_updated): logger.info("All TOCs finished updating") self.connectSetupFinished.call(self.link_uri) def _link_error_cb(self, errmsg): """Called from the link driver when there's an error""" logger.warning("Got link error callback [%s] in state [%s]", errmsg, self.state) if (self.link is not None): self.link.close() self.link = None if (self.state == State.INITIALIZED): self.connectionFailed.call(self.link_uri, errmsg) if (self.state == State.CONNECTED or self.state == State.SETUP_FINISHED): self.disconnected.call(self.link_uri) self.connectionLost.call(self.link_uri, errmsg) self.state = State.DISCONNECTED def _link_quality_cb(self, percentage): """Called from link driver to report link quality""" self.linkQuality.call(percentage) def _check_for_initial_packet_cb(self, data): """ Called when first packet arrives from Crazyflie. This is used to determine if we are connected to something that is answering. """ self.state = State.CONNECTED self.connected.call(self.link_uri) self.receivedPacket.remove_callback(self._check_for_initial_packet_cb) def open_link(self, link_uri): """ Open the communication link to a copter at the given URI and setup the connection (download log/parameter TOC). """ self.connectionInitiated.call(link_uri) self.state = State.INITIALIZED self.link_uri = link_uri self._log_toc_updated = False self._param_toc_updated = False try: self.link = cflib.crtp.get_link_driver(link_uri, self._link_quality_cb, self._link_error_cb) # Add a callback so we can check that any data is comming # back from the copter self.receivedPacket.add_callback(self._check_for_initial_packet_cb) self._start_connection_setup() except Exception as ex: # pylint: disable=W0703 # We want to catch every possible exception here and show # it in the user interface import traceback logger.error("Couldn't load link driver: %s\n\n%s", ex, traceback.format_exc()) exception_text = "Couldn't load link driver: %s\n\n%s" % ( ex, traceback.format_exc()) if self.link: self.link.close() self.link = None self.connectionFailed.call(link_uri, exception_text) def close_link(self): """Close the communication link.""" logger.info("Closing link") if (self.link is not None): self.commander.send_setpoint(0, 0, 0, 0, False) if (self.link is not None): self.link.close() self.link = None self.disconnected.call(self.link_uri) def add_port_callback(self, port, cb): """Add a callback to cb on port""" self.incoming.add_port_callback(port, cb) def remove_port_callback(self, port, cb): """Remove the callback cb on port""" self.incoming.remove_port_callback(port, cb) def _no_answer_do_retry(self, pk): """Resend packets that we have not gotten answers to""" logger.debug("ExpectAnswer: No answer on [%d], do retry", pk.port) # Cancel timer before calling for retry to help bug hunting old_timer = self.answer_timers[pk.port] if (old_timer is not None): old_timer.cancel() self.send_packet(pk, True) else: logger.warning("ExpectAnswer: ERROR! Was doing retry but" "timer was None") def _check_for_answers(self, pk): """ Callback called for every packet received to check if we are waiting for an answer on this port. If so, then cancel the retry timer. """ try: timer = self.answer_timers[pk.port] if (timer is not None): logger.debug("ExpectAnswer: Got answer back on port [%d]" ", cancelling timer", pk.port) timer.cancel() self.answer_timers[pk.port] = None except KeyError: # We are not waiting for any answer on this port, ignore.. pass def send_packet(self, pk, expect_answer=False): """ Send a packet through the link interface. pk -- Packet to send expect_answer -- True if a packet from the Crazyflie is expected to be sent back, otherwise false """ if (self.link is not None): self.link.send_packet(pk) if (expect_answer): logger.debug("ExpectAnswer: Will expect answer on port [%d]", pk.port) new_timer = Timer(0.2, lambda: self._no_answer_do_retry(pk)) try: old_timer = self.answer_timers[pk.port] if (old_timer is not None): old_timer.cancel() # If we get here a second call has been made to send # packet on this port before we have gotten the first # one back. This is an error and might cause loss of # packets!! logger.warning("ExpectAnswer: ERROR! Older timer whas" " running while scheduling new one on " "[%d]", pk.port) except KeyError: pass self.answer_timers[pk.port] = new_timer new_timer.start() class _IncomingPacketHandler(Thread): """Handles incoming packets and sends the data to the correct receivers""" def __init__(self, cf): Thread.__init__(self) self.cf = cf self.cb = [] def add_port_callback(self, port, cb): """Add a callback for data that comes on a specific port""" logger.debug("Adding callback on port [%d] to [%s]", port, cb) self.add_header_callback(cb, port, 0, 0xff, 0x0) def remove_port_callback(self, port, cb): """Remove a callback for data that comes on a specific port""" logger.debug("Removing callback on port [%d] to [%s]", port, cb) for port_callback in self.cb: if (port_callback[0] == port and port_callback[4] == cb): self.cb.remove(port_callback) def add_header_callback(self, cb, port, channel, port_mask=0xFF, channel_mask=0xFF): """ Add a callback for a specific port/header callback with the possibility to add a mask for channel and port for multiple hits for same callback. """ self.cb.append([port, port_mask, channel, channel_mask, cb]) def run(self): while(True): if self.cf.link is None: time.sleep(1) continue pk = self.cf.link.receive_packet(1) if pk is None: continue #All-packet callbacks self.cf.receivedPacket.call(pk) found = False for cb in self.cb: if (cb[0] == pk.port & cb[1] and cb[2] == pk.channel & cb[3]): try: cb[4](pk) except Exception: # pylint: disable=W0703 # Disregard pylint warning since we want to catch all # exceptions and we can't know what will happen in # the callbacks. import traceback logger.warning("Exception while doing callback on port" " [%d]\n\n%s", pk.port, traceback.format_exc()) if (cb[0] != 0xFF): found = True if not found: logger.warning("Got packet on header (%d,%d) but no callback " "to handle it", pk.port, pk.channel)
gpl-2.0
7,902,765,162,880,301,000
37.350282
79
0.560106
false
4.149144
false
false
false
twisted/mantissa
xmantissa/test/historic/test_privateApplication3to4.py
1
3405
""" Tests for the upgrade of L{PrivateApplication} schema from 3 to 4. """ from axiom.userbase import LoginSystem from axiom.test.historic.stubloader import StubbedTest from xmantissa.ixmantissa import ITemplateNameResolver, IWebViewer from xmantissa.website import WebSite from xmantissa.webapp import PrivateApplication from xmantissa.publicweb import CustomizedPublicPage from xmantissa.webgestalt import AuthenticationApplication from xmantissa.prefs import PreferenceAggregator, DefaultPreferenceCollection from xmantissa.search import SearchAggregator from xmantissa.test.historic.stub_privateApplication3to4 import ( USERNAME, DOMAIN, PREFERRED_THEME, PRIVATE_KEY) class PrivateApplicationUpgradeTests(StubbedTest): """ Tests for L{xmantissa.webapp.privateApplication3to4}. """ def setUp(self): d = StubbedTest.setUp(self) def siteStoreUpgraded(ignored): loginSystem = self.store.findUnique(LoginSystem) account = loginSystem.accountByAddress(USERNAME, DOMAIN) self.subStore = account.avatars.open() return self.subStore.whenFullyUpgraded() d.addCallback(siteStoreUpgraded) return d def test_powerup(self): """ At version 4, L{PrivateApplication} should be an L{ITemplateNameResolver} powerup on its store. """ application = self.subStore.findUnique(PrivateApplication) powerups = list(self.subStore.powerupsFor(ITemplateNameResolver)) self.assertIn(application, powerups) def test_webViewer(self): """ At version 5, L{PrivateApplication} should be an L{IWebViewer} powerup on its store. """ application = self.subStore.findUnique(PrivateApplication) interfaces = list(self.subStore.interfacesFor(application)) self.assertIn(IWebViewer, interfaces) def test_attributes(self): """ All of the attributes of L{PrivateApplication} should have the same values on the upgraded item as they did before the upgrade. """ application = self.subStore.findUnique(PrivateApplication) self.assertEqual(application.preferredTheme, PREFERRED_THEME) self.assertEqual(application.privateKey, PRIVATE_KEY) website = self.subStore.findUnique(WebSite) self.assertIdentical(application.website, website) customizedPublicPage = self.subStore.findUnique(CustomizedPublicPage) self.assertIdentical( application.customizedPublicPage, customizedPublicPage) authenticationApplication = self.subStore.findUnique( AuthenticationApplication) self.assertIdentical( application.authenticationApplication, authenticationApplication) preferenceAggregator = self.subStore.findUnique(PreferenceAggregator) self.assertIdentical( application.preferenceAggregator, preferenceAggregator) defaultPreferenceCollection = self.subStore.findUnique( DefaultPreferenceCollection) self.assertIdentical( application.defaultPreferenceCollection, defaultPreferenceCollection) searchAggregator = self.subStore.findUnique(SearchAggregator) self.assertIdentical(application.searchAggregator, searchAggregator) self.assertIdentical(application.privateIndexPage, None)
mit
4,140,660,516,419,531,000
37.258427
77
0.726579
false
4.422078
true
false
false
tmenjo/cinder-2015.1.1
cinder/tests/test_rbd.py
1
50268
# Copyright 2012 Josh Durgin # Copyright 2013 Canonical Ltd. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import math import os import tempfile import mock from oslo_log import log as logging from oslo_utils import timeutils from oslo_utils import units from cinder import db from cinder import exception from cinder.i18n import _ from cinder.image import image_utils from cinder import test from cinder.tests.image import fake as fake_image from cinder.tests import test_volume from cinder.volume import configuration as conf import cinder.volume.drivers.rbd as driver from cinder.volume.flows.manager import create_volume LOG = logging.getLogger(__name__) # This is used to collect raised exceptions so that tests may check what was # raised. # NOTE: this must be initialised in test setUp(). RAISED_EXCEPTIONS = [] class MockException(Exception): def __init__(self, *args, **kwargs): RAISED_EXCEPTIONS.append(self.__class__) class MockImageNotFoundException(MockException): """Used as mock for rbd.ImageNotFound.""" class MockImageBusyException(MockException): """Used as mock for rbd.ImageBusy.""" class MockImageExistsException(MockException): """Used as mock for rbd.ImageExists.""" def common_mocks(f): """Decorator to set mocks common to all tests. The point of doing these mocks here is so that we don't accidentally set mocks that can't/don't get unset. """ def _common_inner_inner1(inst, *args, **kwargs): @mock.patch('cinder.volume.drivers.rbd.RBDVolumeProxy') @mock.patch('cinder.volume.drivers.rbd.RADOSClient') @mock.patch('cinder.backup.drivers.ceph.rbd') @mock.patch('cinder.backup.drivers.ceph.rados') def _common_inner_inner2(mock_rados, mock_rbd, mock_client, mock_proxy): inst.mock_rbd = mock_rbd inst.mock_rados = mock_rados inst.mock_client = mock_client inst.mock_proxy = mock_proxy inst.mock_rbd.RBD.Error = Exception inst.mock_rados.Error = Exception inst.mock_rbd.ImageBusy = MockImageBusyException inst.mock_rbd.ImageNotFound = MockImageNotFoundException inst.mock_rbd.ImageExists = MockImageExistsException inst.driver.rbd = inst.mock_rbd inst.driver.rados = inst.mock_rados return f(inst, *args, **kwargs) return _common_inner_inner2() return _common_inner_inner1 CEPH_MON_DUMP = """dumped monmap epoch 1 { "epoch": 1, "fsid": "33630410-6d93-4d66-8e42-3b953cf194aa", "modified": "2013-05-22 17:44:56.343618", "created": "2013-05-22 17:44:56.343618", "mons": [ { "rank": 0, "name": "a", "addr": "[::1]:6789\/0"}, { "rank": 1, "name": "b", "addr": "[::1]:6790\/0"}, { "rank": 2, "name": "c", "addr": "[::1]:6791\/0"}, { "rank": 3, "name": "d", "addr": "127.0.0.1:6792\/0"}, { "rank": 4, "name": "e", "addr": "example.com:6791\/0"}], "quorum": [ 0, 1, 2]} """ class RBDTestCase(test.TestCase): def setUp(self): global RAISED_EXCEPTIONS RAISED_EXCEPTIONS = [] super(RBDTestCase, self).setUp() self.cfg = mock.Mock(spec=conf.Configuration) self.cfg.volume_tmp_dir = None self.cfg.image_conversion_dir = None self.cfg.rbd_pool = 'rbd' self.cfg.rbd_ceph_conf = None self.cfg.rbd_secret_uuid = None self.cfg.rbd_user = None self.cfg.volume_dd_blocksize = '1M' self.cfg.rbd_store_chunk_size = 4 mock_exec = mock.Mock() mock_exec.return_value = ('', '') self.driver = driver.RBDDriver(execute=mock_exec, configuration=self.cfg) self.driver.set_initialized() self.volume_name = u'volume-00000001' self.snapshot_name = u'snapshot-00000001' self.volume_size = 1 self.volume = dict(name=self.volume_name, size=self.volume_size) self.snapshot = dict(volume_name=self.volume_name, name=self.snapshot_name) @common_mocks def test_create_volume(self): client = self.mock_client.return_value client.__enter__.return_value = client self.driver.create_volume(self.volume) chunk_size = self.cfg.rbd_store_chunk_size * units.Mi order = int(math.log(chunk_size, 2)) args = [client.ioctx, str(self.volume_name), self.volume_size * units.Gi, order] kwargs = {'old_format': False, 'features': client.features} self.mock_rbd.RBD.return_value.create.assert_called_once_with( *args, **kwargs) client.__enter__.assert_called_once_with() client.__exit__.assert_called_once_with(None, None, None) @common_mocks def test_manage_existing_get_size(self): with mock.patch.object(self.driver.rbd.Image(), 'size') as \ mock_rbd_image_size: with mock.patch.object(self.driver.rbd.Image(), 'close') \ as mock_rbd_image_close: mock_rbd_image_size.return_value = 2 * units.Gi existing_ref = {'source-name': self.volume_name} return_size = self.driver.manage_existing_get_size( self.volume, existing_ref) self.assertEqual(2, return_size) mock_rbd_image_size.assert_called_once_with() mock_rbd_image_close.assert_called_once_with() @common_mocks def test_manage_existing_get_invalid_size(self): with mock.patch.object(self.driver.rbd.Image(), 'size') as \ mock_rbd_image_size: with mock.patch.object(self.driver.rbd.Image(), 'close') \ as mock_rbd_image_close: mock_rbd_image_size.return_value = 'abcd' existing_ref = {'source-name': self.volume_name} self.assertRaises(exception.VolumeBackendAPIException, self.driver.manage_existing_get_size, self.volume, existing_ref) mock_rbd_image_size.assert_called_once_with() mock_rbd_image_close.assert_called_once_with() @common_mocks def test_manage_existing(self): client = self.mock_client.return_value client.__enter__.return_value = client with mock.patch.object(self.driver.rbd.RBD(), 'rename') as \ mock_rbd_image_rename: exist_volume = 'vol-exist' existing_ref = {'source-name': exist_volume} mock_rbd_image_rename.return_value = 0 self.driver.manage_existing(self.volume, existing_ref) mock_rbd_image_rename.assert_called_with( client.ioctx, exist_volume, self.volume_name) @common_mocks def test_manage_existing_with_exist_rbd_image(self): client = self.mock_client.return_value client.__enter__.return_value = client self.mock_rbd.RBD.return_value.rename.side_effect = ( MockImageExistsException) exist_volume = 'vol-exist' existing_ref = {'source-name': exist_volume} self.assertRaises(self.mock_rbd.ImageExists, self.driver.manage_existing, self.volume, existing_ref) # Make sure the exception was raised self.assertEqual(RAISED_EXCEPTIONS, [self.mock_rbd.ImageExists]) @common_mocks def test_delete_backup_snaps(self): self.driver.rbd.Image.remove_snap = mock.Mock() with mock.patch.object(self.driver, '_get_backup_snaps') as \ mock_get_backup_snaps: mock_get_backup_snaps.return_value = [{'name': 'snap1'}] rbd_image = self.driver.rbd.Image() self.driver._delete_backup_snaps(rbd_image) mock_get_backup_snaps.assert_called_once_with(rbd_image) self.assertTrue( self.driver.rbd.Image.return_value.remove_snap.called) @common_mocks def test_delete_volume(self): client = self.mock_client.return_value self.driver.rbd.Image.return_value.list_snaps.return_value = [] with mock.patch.object(self.driver, '_get_clone_info') as \ mock_get_clone_info: with mock.patch.object(self.driver, '_delete_backup_snaps') as \ mock_delete_backup_snaps: mock_get_clone_info.return_value = (None, None, None) self.driver.delete_volume(self.volume) mock_get_clone_info.assert_called_once_with( self.mock_rbd.Image.return_value, self.volume_name, None) (self.driver.rbd.Image.return_value .list_snaps.assert_called_once_with()) client.__enter__.assert_called_once_with() client.__exit__.assert_called_once_with(None, None, None) mock_delete_backup_snaps.assert_called_once_with( self.mock_rbd.Image.return_value) self.assertFalse( self.driver.rbd.Image.return_value.unprotect_snap.called) self.assertEqual( 1, self.driver.rbd.RBD.return_value.remove.call_count) @common_mocks def delete_volume_not_found(self): self.mock_rbd.Image.side_effect = self.mock_rbd.ImageNotFound self.assertIsNone(self.driver.delete_volume(self.volume)) self.mock_rbd.Image.assert_called_once_with() # Make sure the exception was raised self.assertEqual(RAISED_EXCEPTIONS, [self.mock_rbd.ImageNotFound]) @common_mocks def test_delete_busy_volume(self): self.mock_rbd.Image.return_value.list_snaps.return_value = [] self.mock_rbd.RBD.return_value.remove.side_effect = ( self.mock_rbd.ImageBusy) with mock.patch.object(self.driver, '_get_clone_info') as \ mock_get_clone_info: mock_get_clone_info.return_value = (None, None, None) with mock.patch.object(self.driver, '_delete_backup_snaps') as \ mock_delete_backup_snaps: with mock.patch.object(driver, 'RADOSClient') as \ mock_rados_client: self.assertRaises(exception.VolumeIsBusy, self.driver.delete_volume, self.volume) mock_get_clone_info.assert_called_once_with( self.mock_rbd.Image.return_value, self.volume_name, None) (self.mock_rbd.Image.return_value.list_snaps .assert_called_once_with()) mock_rados_client.assert_called_once_with(self.driver) mock_delete_backup_snaps.assert_called_once_with( self.mock_rbd.Image.return_value) self.assertFalse( self.mock_rbd.Image.return_value.unprotect_snap.called) self.assertEqual( 1, self.mock_rbd.RBD.return_value.remove.call_count) # Make sure the exception was raised self.assertEqual(RAISED_EXCEPTIONS, [self.mock_rbd.ImageBusy]) @common_mocks def test_delete_volume_not_found(self): self.mock_rbd.Image.return_value.list_snaps.return_value = [] self.mock_rbd.RBD.return_value.remove.side_effect = ( self.mock_rbd.ImageNotFound) with mock.patch.object(self.driver, '_get_clone_info') as \ mock_get_clone_info: mock_get_clone_info.return_value = (None, None, None) with mock.patch.object(self.driver, '_delete_backup_snaps') as \ mock_delete_backup_snaps: with mock.patch.object(driver, 'RADOSClient') as \ mock_rados_client: self.assertIsNone(self.driver.delete_volume(self.volume)) mock_get_clone_info.assert_called_once_with( self.mock_rbd.Image.return_value, self.volume_name, None) (self.mock_rbd.Image.return_value.list_snaps .assert_called_once_with()) mock_rados_client.assert_called_once_with(self.driver) mock_delete_backup_snaps.assert_called_once_with( self.mock_rbd.Image.return_value) self.assertFalse( self.mock_rbd.Image.return_value.unprotect_snap.called) self.assertEqual( 1, self.mock_rbd.RBD.return_value.remove.call_count) # Make sure the exception was raised self.assertEqual(RAISED_EXCEPTIONS, [self.mock_rbd.ImageNotFound]) @common_mocks def test_create_snapshot(self): proxy = self.mock_proxy.return_value proxy.__enter__.return_value = proxy self.driver.create_snapshot(self.snapshot) args = [str(self.snapshot_name)] proxy.create_snap.assert_called_with(*args) proxy.protect_snap.assert_called_with(*args) @common_mocks def test_delete_snapshot(self): proxy = self.mock_proxy.return_value proxy.__enter__.return_value = proxy self.driver.delete_snapshot(self.snapshot) args = [str(self.snapshot_name)] proxy.remove_snap.assert_called_with(*args) proxy.unprotect_snap.assert_called_with(*args) @common_mocks def test_get_clone_info(self): volume = self.mock_rbd.Image() volume.set_snap = mock.Mock() volume.parent_info = mock.Mock() parent_info = ('a', 'b', '%s.clone_snap' % (self.volume_name)) volume.parent_info.return_value = parent_info info = self.driver._get_clone_info(volume, self.volume_name) self.assertEqual(info, parent_info) self.assertFalse(volume.set_snap.called) volume.parent_info.assert_called_once_with() @common_mocks def test_get_clone_info_w_snap(self): volume = self.mock_rbd.Image() volume.set_snap = mock.Mock() volume.parent_info = mock.Mock() parent_info = ('a', 'b', '%s.clone_snap' % (self.volume_name)) volume.parent_info.return_value = parent_info snapshot = self.mock_rbd.ImageSnapshot() info = self.driver._get_clone_info(volume, self.volume_name, snap=snapshot) self.assertEqual(info, parent_info) self.assertEqual(volume.set_snap.call_count, 2) volume.parent_info.assert_called_once_with() @common_mocks def test_get_clone_info_w_exception(self): volume = self.mock_rbd.Image() volume.set_snap = mock.Mock() volume.parent_info = mock.Mock() volume.parent_info.side_effect = self.mock_rbd.ImageNotFound snapshot = self.mock_rbd.ImageSnapshot() info = self.driver._get_clone_info(volume, self.volume_name, snap=snapshot) self.assertEqual(info, (None, None, None)) self.assertEqual(volume.set_snap.call_count, 2) volume.parent_info.assert_called_once_with() # Make sure the exception was raised self.assertEqual(RAISED_EXCEPTIONS, [self.mock_rbd.ImageNotFound]) @common_mocks def test_get_clone_info_deleted_volume(self): volume = self.mock_rbd.Image() volume.set_snap = mock.Mock() volume.parent_info = mock.Mock() parent_info = ('a', 'b', '%s.clone_snap' % (self.volume_name)) volume.parent_info.return_value = parent_info info = self.driver._get_clone_info(volume, "%s.deleted" % (self.volume_name)) self.assertEqual(info, parent_info) self.assertFalse(volume.set_snap.called) volume.parent_info.assert_called_once_with() @common_mocks def test_create_cloned_volume_same_size(self): src_name = u'volume-00000001' dst_name = u'volume-00000002' self.cfg.rbd_max_clone_depth = 2 with mock.patch.object(self.driver, '_get_clone_depth') as \ mock_get_clone_depth: # Try with no flatten required with mock.patch.object(self.driver, '_resize') as mock_resize: mock_get_clone_depth.return_value = 1 self.driver.create_cloned_volume({'name': dst_name, 'size': 10}, {'name': src_name, 'size': 10}) (self.mock_rbd.Image.return_value.create_snap .assert_called_once_with('.'.join((dst_name, 'clone_snap')))) (self.mock_rbd.Image.return_value.protect_snap .assert_called_once_with('.'.join((dst_name, 'clone_snap')))) self.assertEqual( 1, self.mock_rbd.RBD.return_value.clone.call_count) self.mock_rbd.Image.return_value.close \ .assert_called_once_with() self.assertTrue(mock_get_clone_depth.called) self.assertEqual( 0, mock_resize.call_count) @common_mocks def test_create_cloned_volume_different_size(self): src_name = u'volume-00000001' dst_name = u'volume-00000002' self.cfg.rbd_max_clone_depth = 2 with mock.patch.object(self.driver, '_get_clone_depth') as \ mock_get_clone_depth: # Try with no flatten required with mock.patch.object(self.driver, '_resize') as mock_resize: mock_get_clone_depth.return_value = 1 self.driver.create_cloned_volume({'name': dst_name, 'size': 20}, {'name': src_name, 'size': 10}) (self.mock_rbd.Image.return_value.create_snap .assert_called_once_with('.'.join((dst_name, 'clone_snap')))) (self.mock_rbd.Image.return_value.protect_snap .assert_called_once_with('.'.join((dst_name, 'clone_snap')))) self.assertEqual( 1, self.mock_rbd.RBD.return_value.clone.call_count) self.mock_rbd.Image.return_value.close \ .assert_called_once_with() self.assertTrue(mock_get_clone_depth.called) self.assertEqual( 1, mock_resize.call_count) @common_mocks def test_create_cloned_volume_w_flatten(self): src_name = u'volume-00000001' dst_name = u'volume-00000002' self.cfg.rbd_max_clone_depth = 1 self.mock_rbd.RBD.return_value.clone.side_effect = ( self.mock_rbd.RBD.Error) with mock.patch.object(self.driver, '_get_clone_depth') as \ mock_get_clone_depth: # Try with no flatten required mock_get_clone_depth.return_value = 1 self.assertRaises(self.mock_rbd.RBD.Error, self.driver.create_cloned_volume, dict(name=dst_name), dict(name=src_name)) (self.mock_rbd.Image.return_value.create_snap .assert_called_once_with('.'.join((dst_name, 'clone_snap')))) (self.mock_rbd.Image.return_value.protect_snap .assert_called_once_with('.'.join((dst_name, 'clone_snap')))) self.assertEqual( 1, self.mock_rbd.RBD.return_value.clone.call_count) (self.mock_rbd.Image.return_value.unprotect_snap .assert_called_once_with('.'.join((dst_name, 'clone_snap')))) (self.mock_rbd.Image.return_value.remove_snap .assert_called_once_with('.'.join((dst_name, 'clone_snap')))) self.mock_rbd.Image.return_value.close.assert_called_once_with() self.assertTrue(mock_get_clone_depth.called) @common_mocks def test_create_cloned_volume_w_clone_exception(self): src_name = u'volume-00000001' dst_name = u'volume-00000002' self.cfg.rbd_max_clone_depth = 2 self.mock_rbd.RBD.return_value.clone.side_effect = ( self.mock_rbd.RBD.Error) with mock.patch.object(self.driver, '_get_clone_depth') as \ mock_get_clone_depth: # Try with no flatten required mock_get_clone_depth.return_value = 1 self.assertRaises(self.mock_rbd.RBD.Error, self.driver.create_cloned_volume, {'name': dst_name}, {'name': src_name}) (self.mock_rbd.Image.return_value.create_snap .assert_called_once_with('.'.join((dst_name, 'clone_snap')))) (self.mock_rbd.Image.return_value.protect_snap .assert_called_once_with('.'.join((dst_name, 'clone_snap')))) self.assertEqual( 1, self.mock_rbd.RBD.return_value.clone.call_count) (self.mock_rbd.Image.return_value.unprotect_snap .assert_called_once_with('.'.join((dst_name, 'clone_snap')))) (self.mock_rbd.Image.return_value.remove_snap .assert_called_once_with('.'.join((dst_name, 'clone_snap')))) self.mock_rbd.Image.return_value.close.assert_called_once_with() @common_mocks def test_good_locations(self): locations = ['rbd://fsid/pool/image/snap', 'rbd://%2F/%2F/%2F/%2F', ] map(self.driver._parse_location, locations) @common_mocks def test_bad_locations(self): locations = ['rbd://image', 'http://path/to/somewhere/else', 'rbd://image/extra', 'rbd://image/', 'rbd://fsid/pool/image/', 'rbd://fsid/pool/image/snap/', 'rbd://///', ] for loc in locations: self.assertRaises(exception.ImageUnacceptable, self.driver._parse_location, loc) self.assertFalse( self.driver._is_cloneable(loc, {'disk_format': 'raw'})) @common_mocks def test_cloneable(self): with mock.patch.object(self.driver, '_get_fsid') as mock_get_fsid: mock_get_fsid.return_value = 'abc' location = 'rbd://abc/pool/image/snap' info = {'disk_format': 'raw'} self.assertTrue(self.driver._is_cloneable(location, info)) self.assertTrue(mock_get_fsid.called) @common_mocks def test_uncloneable_different_fsid(self): with mock.patch.object(self.driver, '_get_fsid') as mock_get_fsid: mock_get_fsid.return_value = 'abc' location = 'rbd://def/pool/image/snap' self.assertFalse( self.driver._is_cloneable(location, {'disk_format': 'raw'})) self.assertTrue(mock_get_fsid.called) @common_mocks def test_uncloneable_unreadable(self): with mock.patch.object(self.driver, '_get_fsid') as mock_get_fsid: mock_get_fsid.return_value = 'abc' location = 'rbd://abc/pool/image/snap' self.driver.rbd.Error = Exception self.mock_proxy.side_effect = Exception args = [location, {'disk_format': 'raw'}] self.assertFalse(self.driver._is_cloneable(*args)) self.assertEqual(1, self.mock_proxy.call_count) self.assertTrue(mock_get_fsid.called) @common_mocks def test_uncloneable_bad_format(self): with mock.patch.object(self.driver, '_get_fsid') as mock_get_fsid: mock_get_fsid.return_value = 'abc' location = 'rbd://abc/pool/image/snap' formats = ['qcow2', 'vmdk', 'vdi'] for f in formats: self.assertFalse( self.driver._is_cloneable(location, {'disk_format': f})) self.assertTrue(mock_get_fsid.called) def _copy_image(self): with mock.patch.object(tempfile, 'NamedTemporaryFile'): with mock.patch.object(os.path, 'exists') as mock_exists: mock_exists.return_value = True with mock.patch.object(image_utils, 'fetch_to_raw'): with mock.patch.object(self.driver, 'delete_volume'): with mock.patch.object(self.driver, '_resize'): mock_image_service = mock.MagicMock() args = [None, {'name': 'test', 'size': 1}, mock_image_service, None] self.driver.copy_image_to_volume(*args) @common_mocks def test_copy_image_no_volume_tmp(self): self.cfg.volume_tmp_dir = None self.cfg.image_conversion_dir = None self._copy_image() @common_mocks def test_copy_image_volume_tmp(self): self.cfg.volume_tmp_dir = None self.cfg.image_conversion_dir = '/var/run/cinder/tmp' self._copy_image() @common_mocks def test_update_volume_stats(self): client = self.mock_client.return_value client.__enter__.return_value = client client.cluster = mock.Mock() client.cluster.mon_command = mock.Mock() client.cluster.mon_command.return_value = ( 0, '{"stats":{"total_bytes":64385286144,' '"total_used_bytes":3289628672,"total_avail_bytes":61095657472},' '"pools":[{"name":"rbd","id":2,"stats":{"kb_used":1510197,' '"bytes_used":1546440971,"max_avail":28987613184,"objects":412}},' '{"name":"volumes","id":3,"stats":{"kb_used":0,"bytes_used":0,' '"max_avail":28987613184,"objects":0}}]}\n', '') self.driver.configuration.safe_get = mock.Mock() self.driver.configuration.safe_get.return_value = 'RBD' expected = dict( volume_backend_name='RBD', vendor_name='Open Source', driver_version=self.driver.VERSION, storage_protocol='ceph', total_capacity_gb=27, free_capacity_gb=26, reserved_percentage=0) actual = self.driver.get_volume_stats(True) client.cluster.mon_command.assert_called_once_with( '{"prefix":"df", "format":"json"}', '') self.assertDictMatch(expected, actual) @common_mocks def test_update_volume_stats_error(self): client = self.mock_client.return_value client.__enter__.return_value = client client.cluster = mock.Mock() client.cluster.mon_command = mock.Mock() client.cluster.mon_command.return_value = (22, '', '') self.driver.configuration.safe_get = mock.Mock() self.driver.configuration.safe_get.return_value = 'RBD' expected = dict(volume_backend_name='RBD', vendor_name='Open Source', driver_version=self.driver.VERSION, storage_protocol='ceph', total_capacity_gb='unknown', free_capacity_gb='unknown', reserved_percentage=0) actual = self.driver.get_volume_stats(True) client.cluster.mon_command.assert_called_once_with( '{"prefix":"df", "format":"json"}', '') self.assertDictMatch(expected, actual) @common_mocks def test_get_mon_addrs(self): with mock.patch.object(self.driver, '_execute') as mock_execute: mock_execute.return_value = (CEPH_MON_DUMP, '') hosts = ['::1', '::1', '::1', '127.0.0.1', 'example.com'] ports = ['6789', '6790', '6791', '6792', '6791'] self.assertEqual((hosts, ports), self.driver._get_mon_addrs()) @common_mocks def test_initialize_connection(self): hosts = ['::1', '::1', '::1', '127.0.0.1', 'example.com'] ports = ['6789', '6790', '6791', '6792', '6791'] with mock.patch.object(self.driver, '_get_mon_addrs') as \ mock_get_mon_addrs: mock_get_mon_addrs.return_value = (hosts, ports) expected = { 'driver_volume_type': 'rbd', 'data': { 'name': '%s/%s' % (self.cfg.rbd_pool, self.volume_name), 'hosts': hosts, 'ports': ports, 'auth_enabled': False, 'auth_username': None, 'secret_type': 'ceph', 'secret_uuid': None, } } volume = dict(name=self.volume_name) actual = self.driver.initialize_connection(volume, None) self.assertDictMatch(expected, actual) self.assertTrue(mock_get_mon_addrs.called) @common_mocks def test_clone(self): src_pool = u'images' src_image = u'image-name' src_snap = u'snapshot-name' client_stack = [] def mock__enter__(inst): def _inner(): client_stack.append(inst) return inst return _inner client = self.mock_client.return_value # capture both rados client used to perform the clone client.__enter__.side_effect = mock__enter__(client) self.driver._clone(self.volume, src_pool, src_image, src_snap) args = [client_stack[0].ioctx, str(src_image), str(src_snap), client_stack[1].ioctx, str(self.volume_name)] kwargs = {'features': client.features} self.mock_rbd.RBD.return_value.clone.assert_called_once_with( *args, **kwargs) self.assertEqual(client.__enter__.call_count, 2) @common_mocks def test_extend_volume(self): fake_size = '20' fake_vol = {'project_id': 'testprjid', 'name': self.volume_name, 'size': fake_size, 'id': 'a720b3c0-d1f0-11e1-9b23-0800200c9a66'} self.mox.StubOutWithMock(self.driver, '_resize') size = int(fake_size) * units.Gi self.driver._resize(fake_vol, size=size) self.mox.ReplayAll() self.driver.extend_volume(fake_vol, fake_size) self.mox.VerifyAll() @common_mocks def test_retype(self): context = {} diff = {'encryption': {}, 'extra_specs': {}} fake_volume = {'name': 'testvolume', 'host': 'currenthost'} fake_type = 'high-IOPS' # no support for migration host = {'host': 'anotherhost'} self.assertFalse(self.driver.retype(context, fake_volume, fake_type, diff, host)) host = {'host': 'currenthost'} # no support for changing encryption diff['encryption'] = {'non-empty': 'non-empty'} self.assertFalse(self.driver.retype(context, fake_volume, fake_type, diff, host)) diff['encryption'] = {} # no support for changing extra_specs diff['extra_specs'] = {'non-empty': 'non-empty'} self.assertFalse(self.driver.retype(context, fake_volume, fake_type, diff, host)) diff['extra_specs'] = {} self.assertTrue(self.driver.retype(context, fake_volume, fake_type, diff, host)) def test_rbd_volume_proxy_init(self): mock_driver = mock.Mock(name='driver') mock_driver._connect_to_rados.return_value = (None, None) with driver.RBDVolumeProxy(mock_driver, self.volume_name): self.assertEqual(1, mock_driver._connect_to_rados.call_count) self.assertFalse(mock_driver._disconnect_from_rados.called) self.assertEqual(1, mock_driver._disconnect_from_rados.call_count) mock_driver.reset_mock() snap = u'snapshot-name' with driver.RBDVolumeProxy(mock_driver, self.volume_name, snapshot=snap): self.assertEqual(1, mock_driver._connect_to_rados.call_count) self.assertFalse(mock_driver._disconnect_from_rados.called) self.assertEqual(1, mock_driver._disconnect_from_rados.call_count) @common_mocks def test_connect_to_rados(self): # Default self.cfg.rados_connect_timeout = -1 self.mock_rados.Rados.return_value.open_ioctx.return_value = \ self.mock_rados.Rados.return_value.ioctx # default configured pool ret = self.driver._connect_to_rados() self.assertTrue(self.mock_rados.Rados.return_value.connect.called) # Expect no timeout if default is used self.mock_rados.Rados.return_value.connect.assert_called_once_with() self.assertTrue(self.mock_rados.Rados.return_value.open_ioctx.called) self.assertEqual(ret[1], self.mock_rados.Rados.return_value.ioctx) self.mock_rados.Rados.return_value.open_ioctx.assert_called_with( self.cfg.rbd_pool) # different pool ret = self.driver._connect_to_rados('alt_pool') self.assertTrue(self.mock_rados.Rados.return_value.connect.called) self.assertTrue(self.mock_rados.Rados.return_value.open_ioctx.called) self.assertEqual(ret[1], self.mock_rados.Rados.return_value.ioctx) self.mock_rados.Rados.return_value.open_ioctx.assert_called_with( 'alt_pool') # With timeout self.cfg.rados_connect_timeout = 1 self.mock_rados.Rados.return_value.connect.reset_mock() self.driver._connect_to_rados() self.mock_rados.Rados.return_value.connect.assert_called_once_with( timeout=1) # error self.mock_rados.Rados.return_value.open_ioctx.reset_mock() self.mock_rados.Rados.return_value.shutdown.reset_mock() self.mock_rados.Rados.return_value.open_ioctx.side_effect = ( self.mock_rados.Error) self.assertRaises(exception.VolumeBackendAPIException, self.driver._connect_to_rados) self.assertTrue(self.mock_rados.Rados.return_value.open_ioctx.called) self.mock_rados.Rados.return_value.shutdown.assert_called_once_with() class RBDImageIOWrapperTestCase(test.TestCase): def setUp(self): super(RBDImageIOWrapperTestCase, self).setUp() self.meta = mock.Mock() self.meta.user = 'mock_user' self.meta.conf = 'mock_conf' self.meta.pool = 'mock_pool' self.meta.image = mock.Mock() self.meta.image.read = mock.Mock() self.meta.image.write = mock.Mock() self.meta.image.size = mock.Mock() self.mock_rbd_wrapper = driver.RBDImageIOWrapper(self.meta) self.data_length = 1024 self.full_data = 'abcd' * 256 def test_init(self): self.assertEqual(self.mock_rbd_wrapper._rbd_meta, self.meta) self.assertEqual(self.mock_rbd_wrapper._offset, 0) def test_inc_offset(self): self.mock_rbd_wrapper._inc_offset(10) self.mock_rbd_wrapper._inc_offset(10) self.assertEqual(self.mock_rbd_wrapper._offset, 20) def test_rbd_image(self): self.assertEqual(self.mock_rbd_wrapper.rbd_image, self.meta.image) def test_rbd_user(self): self.assertEqual(self.mock_rbd_wrapper.rbd_user, self.meta.user) def test_rbd_pool(self): self.assertEqual(self.mock_rbd_wrapper.rbd_conf, self.meta.conf) def test_rbd_conf(self): self.assertEqual(self.mock_rbd_wrapper.rbd_pool, self.meta.pool) def test_read(self): def mock_read(offset, length): return self.full_data[offset:length] self.meta.image.read.side_effect = mock_read self.meta.image.size.return_value = self.data_length data = self.mock_rbd_wrapper.read() self.assertEqual(data, self.full_data) data = self.mock_rbd_wrapper.read() self.assertEqual(data, '') self.mock_rbd_wrapper.seek(0) data = self.mock_rbd_wrapper.read() self.assertEqual(data, self.full_data) self.mock_rbd_wrapper.seek(0) data = self.mock_rbd_wrapper.read(10) self.assertEqual(data, self.full_data[:10]) def test_write(self): self.mock_rbd_wrapper.write(self.full_data) self.assertEqual(self.mock_rbd_wrapper._offset, 1024) def test_seekable(self): self.assertTrue(self.mock_rbd_wrapper.seekable) def test_seek(self): self.assertEqual(self.mock_rbd_wrapper._offset, 0) self.mock_rbd_wrapper.seek(10) self.assertEqual(self.mock_rbd_wrapper._offset, 10) self.mock_rbd_wrapper.seek(10) self.assertEqual(self.mock_rbd_wrapper._offset, 10) self.mock_rbd_wrapper.seek(10, 1) self.assertEqual(self.mock_rbd_wrapper._offset, 20) self.mock_rbd_wrapper.seek(0) self.mock_rbd_wrapper.write(self.full_data) self.meta.image.size.return_value = self.data_length self.mock_rbd_wrapper.seek(0) self.assertEqual(self.mock_rbd_wrapper._offset, 0) self.mock_rbd_wrapper.seek(10, 2) self.assertEqual(self.mock_rbd_wrapper._offset, self.data_length + 10) self.mock_rbd_wrapper.seek(-10, 2) self.assertEqual(self.mock_rbd_wrapper._offset, self.data_length - 10) # test exceptions. self.assertRaises(IOError, self.mock_rbd_wrapper.seek, 0, 3) self.assertRaises(IOError, self.mock_rbd_wrapper.seek, -1) # offset should not have been changed by any of the previous # operations. self.assertEqual(self.mock_rbd_wrapper._offset, self.data_length - 10) def test_tell(self): self.assertEqual(self.mock_rbd_wrapper.tell(), 0) self.mock_rbd_wrapper._inc_offset(10) self.assertEqual(self.mock_rbd_wrapper.tell(), 10) def test_flush(self): with mock.patch.object(driver, 'LOG') as mock_logger: self.meta.image.flush = mock.Mock() self.mock_rbd_wrapper.flush() self.meta.image.flush.assert_called_once_with() self.meta.image.flush.reset_mock() # this should be caught and logged silently. self.meta.image.flush.side_effect = AttributeError self.mock_rbd_wrapper.flush() self.meta.image.flush.assert_called_once_with() msg = _("flush() not supported in this version of librbd") mock_logger.warning.assert_called_with(msg) def test_fileno(self): self.assertRaises(IOError, self.mock_rbd_wrapper.fileno) def test_close(self): self.mock_rbd_wrapper.close() class ManagedRBDTestCase(test_volume.DriverTestCase): driver_name = "cinder.volume.drivers.rbd.RBDDriver" def setUp(self): super(ManagedRBDTestCase, self).setUp() # TODO(dosaboy): need to remove dependency on mox stubs here once # image.fake has been converted to mock. fake_image.stub_out_image_service(self.stubs) self.volume.driver.set_initialized() self.volume.stats = {'allocated_capacity_gb': 0, 'pools': {}} self.called = [] def _create_volume_from_image(self, expected_status, raw=False, clone_error=False): """Try to clone a volume from an image, and check the status afterwards. NOTE: if clone_error is True we force the image type to raw otherwise clone_image is not called """ volume_id = 1 # See tests.image.fake for image types. if raw: image_id = '155d900f-4e14-4e4c-a73d-069cbf4541e6' else: image_id = 'c905cedb-7281-47e4-8a62-f26bc5fc4c77' # creating volume testdata db.volume_create(self.context, {'id': volume_id, 'updated_at': timeutils.utcnow(), 'display_description': 'Test Desc', 'size': 20, 'status': 'creating', 'instance_uuid': None, 'host': 'dummy'}) try: if not clone_error: self.volume.create_volume(self.context, volume_id, image_id=image_id) else: self.assertRaises(exception.CinderException, self.volume.create_volume, self.context, volume_id, image_id=image_id) volume = db.volume_get(self.context, volume_id) self.assertEqual(volume['status'], expected_status) finally: # cleanup db.volume_destroy(self.context, volume_id) def test_create_vol_from_image_status_available(self): """Clone raw image then verify volume is in available state.""" def _mock_clone_image(context, volume, image_location, image_meta, image_service): return {'provider_location': None}, True with mock.patch.object(self.volume.driver, 'clone_image') as \ mock_clone_image: mock_clone_image.side_effect = _mock_clone_image with mock.patch.object(self.volume.driver, 'create_volume') as \ mock_create: with mock.patch.object(create_volume.CreateVolumeFromSpecTask, '_copy_image_to_volume') as mock_copy: self._create_volume_from_image('available', raw=True) self.assertFalse(mock_copy.called) self.assertTrue(mock_clone_image.called) self.assertFalse(mock_create.called) def test_create_vol_from_non_raw_image_status_available(self): """Clone non-raw image then verify volume is in available state.""" def _mock_clone_image(context, volume, image_location, image_meta, image_service): return {'provider_location': None}, False with mock.patch.object(self.volume.driver, 'clone_image') as \ mock_clone_image: mock_clone_image.side_effect = _mock_clone_image with mock.patch.object(self.volume.driver, 'create_volume') as \ mock_create: with mock.patch.object(create_volume.CreateVolumeFromSpecTask, '_copy_image_to_volume') as mock_copy: self._create_volume_from_image('available', raw=False) self.assertTrue(mock_copy.called) self.assertTrue(mock_clone_image.called) self.assertTrue(mock_create.called) def test_create_vol_from_image_status_error(self): """Fail to clone raw image then verify volume is in error state.""" with mock.patch.object(self.volume.driver, 'clone_image') as \ mock_clone_image: mock_clone_image.side_effect = exception.CinderException with mock.patch.object(self.volume.driver, 'create_volume'): with mock.patch.object(create_volume.CreateVolumeFromSpecTask, '_copy_image_to_volume') as mock_copy: self._create_volume_from_image('error', raw=True, clone_error=True) self.assertFalse(mock_copy.called) self.assertTrue(mock_clone_image.called) self.assertFalse(self.volume.driver.create_volume.called) def test_clone_failure(self): driver = self.volume.driver with mock.patch.object(driver, '_is_cloneable', lambda *args: False): image_loc = (mock.Mock(), None) actual = driver.clone_image(mock.Mock(), mock.Mock(), image_loc, {}, mock.Mock()) self.assertEqual(({}, False), actual) self.assertEqual(({}, False), driver.clone_image('', object(), None, {}, '')) def test_clone_success(self): expected = ({'provider_location': None}, True) driver = self.volume.driver with mock.patch.object(self.volume.driver, '_is_cloneable') as \ mock_is_cloneable: mock_is_cloneable.return_value = True with mock.patch.object(self.volume.driver, '_clone') as \ mock_clone: with mock.patch.object(self.volume.driver, '_resize') as \ mock_resize: image_loc = ('rbd://fee/fi/fo/fum', None) volume = {'name': 'vol1'} actual = driver.clone_image(mock.Mock(), volume, image_loc, {'disk_format': 'raw', 'id': 'id.foo'}, mock.Mock()) self.assertEqual(expected, actual) mock_clone.assert_called_once_with(volume, 'fi', 'fo', 'fum') mock_resize.assert_called_once_with(volume) def test_clone_multilocation_success(self): expected = ({'provider_location': None}, True) driver = self.volume.driver def cloneable_side_effect(url_location, image_meta): return url_location == 'rbd://fee/fi/fo/fum' with mock.patch.object(self.volume.driver, '_is_cloneable') \ as mock_is_cloneable, \ mock.patch.object(self.volume.driver, '_clone') as mock_clone, \ mock.patch.object(self.volume.driver, '_resize') \ as mock_resize: mock_is_cloneable.side_effect = cloneable_side_effect image_loc = ('rbd://bee/bi/bo/bum', [{'url': 'rbd://bee/bi/bo/bum'}, {'url': 'rbd://fee/fi/fo/fum'}]) volume = {'name': 'vol1'} image_meta = mock.sentinel.image_meta image_service = mock.sentinel.image_service actual = driver.clone_image(self.context, volume, image_loc, image_meta, image_service) self.assertEqual(expected, actual) self.assertEqual(2, mock_is_cloneable.call_count) mock_clone.assert_called_once_with(volume, 'fi', 'fo', 'fum') mock_is_cloneable.assert_called_with('rbd://fee/fi/fo/fum', image_meta) mock_resize.assert_called_once_with(volume) def test_clone_multilocation_failure(self): expected = ({}, False) driver = self.volume.driver with mock.patch.object(driver, '_is_cloneable', return_value=False) \ as mock_is_cloneable, \ mock.patch.object(self.volume.driver, '_clone') as mock_clone, \ mock.patch.object(self.volume.driver, '_resize') \ as mock_resize: image_loc = ('rbd://bee/bi/bo/bum', [{'url': 'rbd://bee/bi/bo/bum'}, {'url': 'rbd://fee/fi/fo/fum'}]) volume = {'name': 'vol1'} image_meta = mock.sentinel.image_meta image_service = mock.sentinel.image_service actual = driver.clone_image(self.context, volume, image_loc, image_meta, image_service) self.assertEqual(expected, actual) self.assertEqual(2, mock_is_cloneable.call_count) mock_is_cloneable.assert_any_call('rbd://bee/bi/bo/bum', image_meta) mock_is_cloneable.assert_any_call('rbd://fee/fi/fo/fum', image_meta) self.assertFalse(mock_clone.called) self.assertFalse(mock_resize.called)
apache-2.0
-407,074,156,635,504,500
40.270936
79
0.555721
false
3.913429
true
false
false
andMYhacks/infosec_mentors_project
app/config.py
1
1971
# project/config.py import os # from dotenv import load_dotenv, find_dotenv basedir = os.path.abspath(os.path.dirname(__file__)) # load_dotenv(find_dotenv()) class BaseConfig: # Base configuration SECRET_KEY = os.environ.get('APP_SECRET_KEY') PASSWORD_SALT = os.environ.get('APP_PASSWORD_SALT') DEBUG = False BCRYPT_LOG_ROUNDS = 13 WTF_CSRF_ENABLED = True DEBUG_TB_ENABLED = False DEBUG_TB_INTERCEPT_REDIRECTS = False # TODO: Switch Preferred URL Scheme # PREFERRED_URL_SCHEME = 'https' PREFERRED_URL_SCHEME = 'http' # mail settings MAIL_SERVER = 'smtp.gmail.com' MAIL_PORT = 587 MAIL_USE_TLS = True MAIL_USE_SSL = False # mail credentials MAIL_USERNAME = os.environ.get('APP_MAIL_USERNAME') MAIL_PASSWORD = os.environ.get('APP_MAIL_PASSWORD') # mail account(s) MAIL_DEFAULT_SENDER = os.environ.get('APP_MAIL_SENDER') # redis server CELERY_BROKER_URL = 'redis://localhost:6379/0' CELERY_RESULT_BACKEND = 'redis://localhost:6379/0' @staticmethod def init_app(app): pass class DevConfig(BaseConfig): # Development configuration DEBUG = True WTF_CSRF_ENABLED = False SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(basedir, 'database.sqlite') DEBUG_TB_ENABLED = True class ProdConfig(BaseConfig): # Production configuration DEBUG = False SESSION_COOKIE_SECURE = True SECRET_KEY = os.environ.get('APP_SECRET_KEY') DB_NAME = os.environ.get('APP_DB_NAME') DB_USER = os.environ.get('APP_DB_USER') DB_PASSWORD = os.environ.get('APP_DB_PASSWORD') SQLALCHEMY_DATABASE_URI = 'postgresql://' + DB_USER + ':' + DB_PASSWORD + '@localhost/' + DB_NAME DEBUG_TB_ENABLED = False STRIPE_SECRET_KEY = os.environ.get('APP_STRIPE_SECRET_KEY') STRIPE_PUBLISHABLE_KEY = os.environ.get('APP_PUBLISHABLE_KEY') config_type = { 'dev': DevConfig, 'prod': ProdConfig, 'defalt': DevConfig }
gpl-3.0
7,683,234,075,352,378,000
26
101
0.660071
false
3.113744
true
false
false
aferrugento/SemLDA
wsd.py
1
15115
from pywsd.lesk import adapted_lesk from nltk.corpus import wordnet as wn import pickle import time import sys def main(file_name): start = time.time() #string = '/home/adriana/Dropbox/mine/Tese/preprocessing/data_output/' #string = '/home/aferrugento/Desktop/' string = '' h = open(string + file_name + '_proc.txt') sentences = h.read() h.close() extra_synsets = {} sentences = sentences.split("\n") for i in range(len(sentences)): sentences[i] = sentences[i].split(" ") for j in range(len(sentences[i])): if sentences[i][j] == '': continue sentences[i][j] = sentences[i][j].split("_")[0] for i in range(len(sentences)): aux = '' for j in range(len(sentences[i])): aux += sentences[i][j] + ' ' sentences[i] = aux word_count = pickle.load(open('word_count_new.p')) synset_count = pickle.load(open('synset_count.p')) word_count_corpus = calculate_word_frequency(sentences) sum_word_corpus = 0 for key in word_count_corpus.keys(): sum_word_corpus += word_count_corpus.get(key) sum_word = 0 for key in word_count.keys(): sum_word += word_count.get(key) sum_synset = 0 for key in synset_count.keys(): sum_synset += synset_count.get(key) word_list = [] for key in word_count.keys(): word_list.append(word_count.get(key)) synset_list = [] for key in synset_count.keys(): synset_list.append(synset_count.get(key)) word_list.sort() synset_list.sort() #print len(word_list), len(synset_list) #print len(word_list)/2., len(synset_list)/2., (len(word_list)/2.) -1, (len(synset_list)/2.) -1 #print word_list[len(word_list)/2], word_list[(len(word_list)/2)-1] #print synset_list[len(synset_list)/2], synset_list[(len(synset_list)/2)-1] word_median = round(2./sum_word, 5) synset_median = round(2./sum_synset, 5) #print word_median, synset_median #print sum_word, sum_synset #return #f = open(string + 'preprocess_semLDA_EPIA/NEWS2_snowballstopword_wordnetlemma_pos_freq.txt') f = open(string + file_name +'_freq.txt') m = f.read() f.close() m = m.split("\n") for i in range(len(m)): m[i] = m[i].split(" ") count = 0 imag = -1 #f = open(string + 'preprocess_semLDA_EPIA/znew_eta_NEWS2.txt') f = open(string + file_name + '_eta.txt') g = f.read() f.close() g = g.split("\n") for i in range(len(g)): g[i] = g[i].split(" ") dic_g = create_dicio(g) g = open(string + file_name +'_wsd.txt','w') #dictio = pickle.load(open(string + 'preprocess_semLDA_EPIA/NEWS2_snowballstopword_wordnetlemma_pos_vocab.p')) dictio = pickle.load(open(string + file_name +'_vocab.p')) nn = open(string + file_name +'_synsetVoc.txt','w') synsets = {} to_write = [] p = open(string + 'NEWS2_wsd.log','w') for i in range(len(m)): nana = str(m[i][0]) + ' ' print 'Doc ' + str(i) p.write('---------- DOC ' +str(i) + ' ----------\n') #words_probs = bayes_theorem(sentences[i], dictio, word_count, sum_word, word_median) #return #g.write(str(m[i][0]) + ' ') for k in range(1, len(m[i])): #print sentences[i] if m[i][k] == '': continue #print dictio.get(int(m[i][k].split(":")[0])) + str(m[i][k].split(":")[0]) #print wn.synsets(dictio.get(int(m[i][k].split(":")[0])).split("_")[0], penn_to_wn(dictio.get(int(m[i][k].split(":")[0])).split("_")[1])) #caso nao existam synsets para aquela palavra if len(wn.synsets(dictio.get(int(m[i][k].split(":")[0])).split("_")[0], penn_to_wn(dictio.get(int(m[i][k].split(":")[0])).split("_")[1]))) == 0: nana += m[i][k]+":1[" +str(count)+":"+str(1)+"] " synsets[imag] = count extra_synsets[imag] = dictio.get(int(m[i][k].split(":")[0])) #g.write(m[i][k]+":1[" +str(imag)+":"+str(1)+"] ") imag -= 1 count += 1 continue sent = sentences[i] ambiguous = dictio.get(int(m[i][k].split(":")[0])).split("_")[0] post = dictio.get(int(m[i][k].split(":")[0])).split("_")[1] try: answer = adapted_lesk(sent, ambiguous, pos= penn_to_wn(post), nbest=True) except Exception, e: #caso o lesk se arme em estupido s = wn.synsets(dictio.get(int(m[i][k].split(":")[0])).split("_")[0], penn_to_wn(dictio.get(int(m[i][k].split(":")[0])).split("_")[1])) if len(s) != 0: count2 = 0 #ver quantos synsets existem no semcor #for n in range(len(s)): # if dic_g.has_key(str(s[n].offset)): # words = dic_g.get(str(s[n].offset)) # for j in range(len(words)): # if words[j].split(":")[0] == m[i][k].split(":")[0]: # count2 += 1 # se nao existir nenhum criar synset imaginario #if count2 == 0: # nana += m[i][k]+":1[" +str(count)+":"+str(1)+"] " # synsets[imag] = count # extra_synsets[imag] = dictio.get(int(m[i][k].split(":")[0])) #g.write(m[i][k]+":1[" +str(imag)+":"+str(1)+"] ") # count += 1 # imag -= 1 # continue #caso existam ir buscar as suas probabilidades ao semcor nana += m[i][k] +':'+ str(len(s)) + '[' c = 1 prob = 1.0/len(s) for n in range(len(s)): #print answer[n][1].offset #print 'Coco ' + str(s[n].offset) #if dic_g.has_key(str(s[n].offset)): #words = dic_g.get(str(s[n].offset)) #for j in range(len(words)): # if words[j].split(":")[0] == m[i][k].split(":")[0]: # aux = 0 a = (s[n].offset()) #print s[n].offset() if synsets.has_key(a): aux = synsets.get(a) else: synsets[a] = count aux = count count += 1 if n == len(s) - 1: nana += str(aux) + ':' + str(prob) + '] ' else: nana += str(aux) + ':' + str(prob) + ' ' else: nana += m[i][k]+":1[" +str(count)+":"+str(1)+"] " synsets[imag] = count extra_synsets[imag] = dictio.get(int(m[i][k].split(":")[0])) #g.write(m[i][k]+":1[" +str(imag)+":"+str(1)+"] ") count += 1 imag -= 1 continue #g.write(m[i][k] +':'+ str(len(answer)) + '[') total = 0 for j in range(len(answer)): total += answer[j][0] #caso lesk nao devolva nenhuma resposta criar synset imaginario if len(answer) == 0: nana += m[i][k]+":1[" +str(count)+":"+str(1)+"] " synsets[imag] = count extra_synsets[imag] = dictio.get(int(m[i][k].split(":")[0])) #g.write(m[i][k]+":1[" +str(imag)+":"+str(1)+"] ") count += 1 imag -= 1 continue #print ambiguous #print total #print answer #caso nenhum dos synsets tenha overlap ir ver ao semcor as suas probabilidades if total == 0: #print 'ZERO' count2 = 0 #for n in range(len(answer)): # if dic_g.has_key(str(answer[n][1].offset)): # words = dic_g.get(str(answer[n][1].offset)) # for j in range(len(words)): # if words[j].split(":")[0] == m[i][k].split(":")[0]: # count2 += 1 #if count2 == 0: # nana += m[i][k]+":1[" +str(count)+":"+str(1)+"] " # synsets[imag] = count # extra_synsets[imag] = dictio.get(int(m[i][k].split(":")[0])) #g.write(m[i][k]+":1[" +str(imag)+":"+str(1)+"] ") # count += 1 # imag -= 1 # continue s = wn.synsets(dictio.get(int(m[i][k].split(":")[0])).split("_")[0], penn_to_wn(dictio.get(int(m[i][k].split(":")[0])).split("_")[1])) nana += m[i][k] +':'+ str(len(s)) + '[' c = 1 prob = 1.0/len(s) for n in range(len(s)): #print answer[n][1].offset #print 'Coco ' + str(s[n].offset) #if dic_g.has_key(str(s[n].offset)): #words = dic_g.get(str(s[n].offset)) #for j in range(len(words)): # if words[j].split(":")[0] == m[i][k].split(":")[0]: # aux = 0 a = (s[n].offset()) #print s[n].offset() if synsets.has_key(a): aux = synsets.get(a) else: synsets[a] = count aux = count count += 1 if n == len(s) - 1: nana += str(aux) + ':' + str(prob) + '] ' else: nana += str(aux) + ':' + str(prob) + ' ' #print nana continue #contar quantos synsets e que nao estao a zero count2 = 0 for j in range(len(answer)): if answer[j][0] == 0: continue else: count2 += 1 c = 1 nana += m[i][k] +':'+ str(count2) + '[' for j in range(len(answer)): #words_synsets = words_probs.get(int(m[i][k].split(':')[0])) #s.write(answer[j][1].offset+"\n") if answer[j][0] == 0: continue aux = 0 a = (answer[j][1].offset()) #print 'Coco '+ str(answer[j][1].offset()) if synsets.has_key(a): aux = synsets.get(a) else: synsets[a] = count aux = count count += 1 prob_s = 0.0 prob_w = 0.0 prob_s_w = float(answer[j][0])/total #if synset_count.has_key(str(answer[j][1].offset)): # prob_s = synset_count.get(str(answer[j][1].offset))/float(sum_synset) #else: # prob_s = 0.1 prob_s_s = 1.0/count2 #if word_count.has_key(dictio.get(int(m[i][k].split(":")[0]))): # prob_w = word_count.get(dictio.get(int(m[i][k].split(":")[0])))/float(sum_word) #else: # prob_w = 0.1 if word_count_corpus.has_key(dictio.get(int(m[i][k].split(":")[0])).split("_")[0]): prob_w = word_count_corpus.get(dictio.get(int(m[i][k].split(":")[0])).split("_")[0])/float(sum_word_corpus) else: prob_w = 0.1 prob_w_s = (prob_w * prob_s_w) / prob_s_s if j == len(answer) - 1 or count2 == c: if prob_w_s > 1.0: #print 'Word: 'dictio.get(int(m[i][k].split(":")[0])) + ' Synset: ' + str(answer[j][1]) p.write('Word: '+ dictio.get(int(m[i][k].split(":")[0])) + ' Synset: ' + str(answer[j][1])) #print 'Synsets disambiguated: ' + str(answer) p.write('---- Synsets disambiguated: ' + str(answer)) #print synset_count.get(str(answer[j][1].offset)), word_count.get(dictio.get(int(m[i][k].split(":")[0]))), sum_synset, sum_word #print 'P(s)=' +prob_s +', P(w)='+prob_w +', P(s|w)='+ prob_s_w +', P(w|s)='+ prob_w_s p.write('---- P(s)=' +str(prob_s) +', P(w)='+ str(prob_w) +', P(s|w)='+ str(prob_s_w) +', P(w|s)='+ str(prob_w_s)) p.write("\n") nana += str(aux) + ':' + str(1) + '] ' #nana += str(aux) + ':' + str(words_synsets.get(answer[j][1].offset)) + '] ' else: nana += str(aux) + ':' + str(prob_w_s) + '] ' #g.write(str(aux) + ':' + str(float(answer[j][0]/total)) + '] ') else: c += 1 if prob_w_s > 1.0: #print 'Word: 'dictio.get(int(m[i][k].split(":")[0])) + ' Synset: ' + str(answer[j][1]) p.write('Word: '+ dictio.get(int(m[i][k].split(":")[0])) + ' Synset: ' + str(answer[j][1])) #print 'Synsets disambiguated: ' + str(answer) p.write('---- Synsets disambiguated: ' + str(answer)) #print synset_count.get(str(answer[j][1].offset)), word_count.get(dictio.get(int(m[i][k].split(":")[0]))), sum_synset, sum_word #print 'P(s)=' +prob_s +', P(w)='+prob_w +', P(s|w)='+ prob_s_w +', P(w|s)='+ prob_w_s p.write('---- P(s)=' +str(prob_s) +', P(w)='+ str(prob_w) +', P(s|w)='+ str(prob_s_w) +', P(w|s)='+ str(prob_w_s)) p.write("\n") nana += str(aux) + ':' + str(1) + '] ' #nana += str(aux) + ':' + str(words_synsets.get(answer[j][1].offset)) +' ' else: nana += str(aux) + ':' + str(prob_w_s) +' ' #g.write(str(aux) + ':' + str(float(answer[j][0]/total)) +' ') nana += '\n' #print nana #return to_write.append(nana) #g.write("\n") ne = revert_dicio(synsets) for i in range(len(ne)): #print ne.get(i), type(ne.get(i)) nn.write(str(ne.get(i))+'\n') g.write(str(len(ne))+"\n") for i in range(len(to_write)): g.write(to_write[i]) nn.close() p.close() g.close() end = time.time() pickle.dump(extra_synsets, open(string + file_name +"_imag.p","w")) print end - start def calculate_word_frequency(corpus): word_count_dict = {} for i in range(len(corpus)): for j in range(len(corpus[i])): if word_count_dict.has_key(corpus[i][j]): aux = word_count_dict.get(corpus[i][j]) word_count_dict[corpus[i][j]] = aux + 1 else: word_count_dict[corpus[i][j]] = 1 return word_count_dict #bayes_theorem(sentences[i], dictio, synset_count, word_count, sum_synset, sum_word, synset_median, word_median) def bayes_theorem(context, vocab, word_count, sum_word, word_median): words_probs = {} print len(vocab) count = 0 for word in vocab: if count%1000 == 0: print 'word ' + str(count) count += 1 sent = context ambiguous = vocab.get(word).split("_")[0] post = vocab.get(word).split("_")[1] #print ambiguous, post try: answer = adapted_lesk(sent, ambiguous, pos= penn_to_wn(post), nbest=True) except Exception, e: continue total = 0 for j in range(len(answer)): total += answer[j][0] if total == 0: continue for j in range(len(answer)): if answer[j][0] == 0: continue prob_w = 0.0 prob_s_w = float(answer[j][0])/total if word_count.has_key(vocab.get(word)): prob_w = word_count.get(vocab.get(word))/float(sum_word) else: prob_w = word_median prob_w_s = prob_s_w * prob_w if words_probs.has_key(word): aux = words_probs.get(word) aux[int(answer[j][1].offset)] = prob_w_s words_probs[word] = aux else: aux = {} aux[int(answer[j][1].offset)] = prob_w_s words_probs[word] = aux #print words_probs synsets_probs = {} for word in words_probs: for synset in words_probs.get(word): if synsets_probs.has_key(synset): aux = synsets_probs.get(synset) aux[word] = words_probs.get(word).get(synset) synsets_probs[synset] = aux else: aux = {} aux[word] = words_probs.get(word).get(synset) synsets_probs[synset] = aux for synset in synsets_probs: sum_words = 0.0 for word in synsets_probs.get(synset): sum_words += synsets_probs.get(synset).get(word) for word in synsets_probs.get(synset): aux = synsets_probs.get(synset).get(word) synsets_probs.get(synset)[word] = float(aux)/sum_words new_word_probs = {} for word in synsets_probs: for synset in synsets_probs.get(word): if new_word_probs.has_key(synset): aux = new_word_probs.get(synset) aux[word] = synsets_probs.get(word).get(synset) new_word_probs[synset] = aux else: aux = {} aux[word] = synsets_probs.get(word).get(synset) new_word_probs[synset] = aux return new_word_probs def create_dicio(eta): dictio = {} for i in range(len(eta)-1): for j in range(2, len(eta[i])): if dictio.has_key(eta[i][0]): aux = dictio.get(eta[i][0]) aux.append(eta[i][j]) dictio[eta[i][0]] = aux else: aux = [] aux.append(eta[i][j]) dictio[eta[i][0]] = aux return dictio def revert_dicio(words_ids): new_dictio = {} for key in words_ids: new_dictio[words_ids[key]] = key return new_dictio def is_noun(tag): return tag in ['NN', 'NNS', 'NNP', 'NNPS'] def is_verb(tag): return tag in ['VB', 'VBD', 'VBG', 'VBN', 'VBP', 'VBZ'] def is_adverb(tag): return tag in ['RB', 'RBR', 'RBS'] def is_adjective(tag): return tag in ['JJ', 'JJR', 'JJS'] def penn_to_wn(tag): if is_adjective(tag): return wn.ADJ elif is_noun(tag): return wn.NOUN elif is_adverb(tag): return wn.ADV elif is_verb(tag): return wn.VERB return None if __name__ == "__main__": main(sys.argv[1])
lgpl-2.1
8,973,805,166,957,558,000
30.100823
147
0.557327
false
2.40723
false
false
false
akirk/youtube-dl
youtube_dl/extractor/dreisat.py
1
3607
from __future__ import unicode_literals import re from .common import InfoExtractor from ..utils import ( ExtractorError, unified_strdate, determine_ext, ) class DreiSatIE(InfoExtractor): IE_NAME = '3sat' _VALID_URL = r'(?:http://)?(?:www\.)?3sat\.de/mediathek/(?:index\.php|mediathek\.php)?\?(?:(?:mode|display)=[^&]+&)*obj=(?P<id>[0-9]+)$' _TESTS = [ { 'url': 'http://www.3sat.de/mediathek/index.php?mode=play&obj=45918', 'md5': 'be37228896d30a88f315b638900a026e', 'info_dict': { 'id': '45918', 'ext': 'mp4', 'title': 'Waidmannsheil', 'description': 'md5:cce00ca1d70e21425e72c86a98a56817', 'uploader': '3sat', 'upload_date': '20140913' } }, { 'url': 'http://www.3sat.de/mediathek/mediathek.php?mode=play&obj=51066', 'only_matching': True, }, ] def _real_extract(self, url): mobj = re.match(self._VALID_URL, url) video_id = mobj.group('id') details_url = 'http://www.3sat.de/mediathek/xmlservice/web/beitragsDetails?ak=web&id=%s' % video_id details_doc = self._download_xml(details_url, video_id, 'Downloading video details') status_code = details_doc.find('./status/statuscode') if status_code is not None and status_code.text != 'ok': code = status_code.text if code == 'notVisibleAnymore': message = 'Video %s is not available' % video_id else: message = '%s returned error: %s' % (self.IE_NAME, code) raise ExtractorError(message, expected=True) thumbnail_els = details_doc.findall('.//teaserimage') thumbnails = [{ 'width': int(te.attrib['key'].partition('x')[0]), 'height': int(te.attrib['key'].partition('x')[2]), 'url': te.text, } for te in thumbnail_els] information_el = details_doc.find('.//information') video_title = information_el.find('./title').text video_description = information_el.find('./detail').text details_el = details_doc.find('.//details') video_uploader = details_el.find('./channel').text upload_date = unified_strdate(details_el.find('./airtime').text) format_els = details_doc.findall('.//formitaet') formats = [] for fe in format_els: if fe.find('./url').text.startswith('http://www.metafilegenerator.de/'): continue url = fe.find('./url').text # ext = determine_ext(url, None) # if ext == 'meta': # doc = self._download_xml(url, video_id, 'Getting rtmp URL') # url = doc.find('./default-stream-url').text formats.append({ 'format_id': fe.attrib['basetype'], 'width': int(fe.find('./width').text), 'height': int(fe.find('./height').text), 'url': url, 'filesize': int(fe.find('./filesize').text), 'video_bitrate': int(fe.find('./videoBitrate').text), }) self._sort_formats(formats) return { '_type': 'video', 'id': video_id, 'title': video_title, 'formats': formats, 'description': video_description, 'thumbnails': thumbnails, 'thumbnail': thumbnails[-1]['url'], 'uploader': video_uploader, 'upload_date': upload_date, }
unlicense
-2,878,699,225,443,779,000
35.806122
140
0.524258
false
3.695697
false
false
false
svr93/node-gyp
gyp/pylib/gyp/easy_xml.py
1
4891
# Copyright (c) 2011 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import re import os def XmlToString(content, encoding='utf-8', pretty=False): """ Writes the XML content to disk, touching the file only if it has changed. Visual Studio files have a lot of pre-defined structures. This function makes it easy to represent these structures as Python data structures, instead of having to create a lot of function calls. Each XML element of the content is represented as a list composed of: 1. The name of the element, a string, 2. The attributes of the element, a dictionary (optional), and 3+. The content of the element, if any. Strings are simple text nodes and lists are child elements. Example 1: <test/> becomes ['test'] Example 2: <myelement a='value1' b='value2'> <childtype>This is</childtype> <childtype>it!</childtype> </myelement> becomes ['myelement', {'a':'value1', 'b':'value2'}, ['childtype', 'This is'], ['childtype', 'it!'], ] Args: content: The structured content to be converted. encoding: The encoding to report on the first XML line. pretty: True if we want pretty printing with indents and new lines. Returns: The XML content as a string. """ # We create a huge list of all the elements of the file. xml_parts = ['<?xml version="1.0" encoding="%s"?>' % encoding] if pretty: xml_parts.append('\n') _ConstructContentList(xml_parts, content, pretty) # Convert it to a string return ''.join(xml_parts) def _ConstructContentList(xml_parts, specification, pretty, level=0): """ Appends the XML parts corresponding to the specification. Args: xml_parts: A list of XML parts to be appended to. specification: The specification of the element. See EasyXml docs. pretty: True if we want pretty printing with indents and new lines. level: Indentation level. """ # The first item in a specification is the name of the element. if pretty: indentation = ' ' * level new_line = '\n' else: indentation = '' new_line = '' name = specification[0] if not isinstance(name, str): raise Exception('The first item of an EasyXml specification should be ' 'a string. Specification was ' + str(specification)) xml_parts.append(indentation + '<' + name) # Optionally in second position is a dictionary of the attributes. rest = specification[1:] if rest and isinstance(rest[0], dict): for at, val in sorted(rest[0].iteritems()): xml_parts.append(' %s="%s"' % (at, _XmlEscape(val, attr=True))) rest = rest[1:] if rest: xml_parts.append('>') all_strings = reduce(lambda x, y: x and isinstance(y, str), rest, True) multi_line = not all_strings if multi_line and new_line: xml_parts.append(new_line) for child_spec in rest: # If it's a string, append a text node. # Otherwise recurse over that child definition if isinstance(child_spec, str): xml_parts.append(_XmlEscape(child_spec)) else: _ConstructContentList(xml_parts, child_spec, pretty, level + 1) if multi_line and indentation: xml_parts.append(indentation) xml_parts.append('</%s>%s' % (name, new_line)) else: xml_parts.append('/>%s' % new_line) def WriteXmlIfChanged(content, path, encoding='utf-8', pretty=False, win32=False): """ Writes the XML content to disk, touching the file only if it has changed. Args: content: The structured content to be written. path: Location of the file. encoding: The encoding to report on the first line of the XML file. pretty: True if we want pretty printing with indents and new lines. """ xml_string = XmlToString(content, encoding, pretty) if win32 and os.linesep != '\r\n': xml_string = xml_string.replace('\n', '\r\n') # Fix encoding xml_string = unicode(xml_string, 'Windows-1251').encode(encoding) # Get the old content try: f = open(path, 'r') existing = f.read() f.close() except: existing = None # It has changed, write it if existing != xml_string: f = open(path, 'w') f.write(xml_string) f.close() _xml_escape_map = { '"': '&quot;', "'": '&apos;', '<': '&lt;', '>': '&gt;', '&': '&amp;', '\n': '&#xA;', '\r': '&#xD;', } _xml_escape_re = re.compile( "(%s)" % "|".join(map(re.escape, _xml_escape_map.keys()))) def _XmlEscape(value, attr=False): """ Escape a string for inclusion in XML.""" def replace(match): m = match.string[match.start() : match.end()] # don't replace single quotes in attrs if attr and m == "'": return m return _xml_escape_map[m] return _xml_escape_re.sub(replace, value)
mit
-6,326,042,995,111,190,000
29.56875
80
0.639951
false
3.671922
false
false
false
simphony/simphony-remote
jupyterhub/remoteappmanager_config.py
1
2462
# # -------------------- # # Docker configuration # # -------------------- # # # # Configuration options for connecting to the docker machine. # # These options override the default provided by the local environment # # variables. # # # # The endpoint of the docker machine, specified as a URL. # # By default, it is obtained by DOCKER_HOST envvar. On Linux in a vanilla # # install, the connection uses a unix socket by default. # # docker_host = "tcp://192.168.99.100:2376" # # Docker realm is used to identify the containers that are managed by this # # particular instance of simphony-remote. It will be the first entry in # # the container name, and will also be added as part of a run-time container # # label. You generally should not change this unless you have multiple # # installations of simphony-remote all using the same docker host. # # docker_realm = "whatever" # # # TLS configuration # # ----------------- # # # # Set this to True to enable TLS connection with the docker client # # tls = True # # # Enables verification of the certificates. By default, this is the # # result of the DOCKER_TLS_VERIFY envvar. Set to False to skip verification/ # # tls_verify = True # # # Full paths of the CA certificate, certificate and key of the docker # # machine. Normally these are computed from the DOCKER_CERT_PATH. # # If you want to use a recognised CA for verification, set the tls_ca to # # an empty string # # tls_ca = "/path/to/ca.pem" # tls_cert = "/path/to/cert.pem" # tls_key = "/path/to/key.pem" # # # ---------- # # Accounting # # ---------- # # Notes on os.path: # # 1. When running with system-user mode, both the current directory and '~' # # are the system user's home directory. # # 2. When running in virtual-user mode, the current directory is the # # directory where jupyterhub is started, '~' would be evaluated according to # # the spawned process's owner's home directory (not the virtual user's # # home directory) # # # CSV database support # # database_class = "remoteappmanager.db.csv_db.CSVDatabase" # database_kwargs = { # "csv_file_path": os.path.abspath("./remoteappmanager.csv")} # # # Sqlite database support # # database_class = "remoteappmanager.db.orm.ORMDatabase" # database_kwargs = { # "url": "sqlite:///"+os.path.abspath('./remoteappmanager.db')} # # ---------------- # # Google Analytics # # ---------------- # # Put your tracking id from Google Analytics here. # ga_tracking_id = "UA-XXXXXX-X"
bsd-3-clause
-61,252,048,939,831,944
33.676056
79
0.672624
false
3.467606
false
false
false
jolid/script.module.donnie
lib/donnie/tvrelease.py
1
4966
import urllib2, urllib, sys, os, re, random, copy from BeautifulSoup import BeautifulSoup, Tag, NavigableString import xbmc,xbmcplugin,xbmcgui,xbmcaddon from t0mm0.common.net import Net from t0mm0.common.addon import Addon from scrapers import CommonScraper net = Net() try: import json except: # pre-frodo and python 2.4 import simplejson as json ''' ########################################################### Usage and helper functions ############################################################''' class TVReleaseServiceSracper(CommonScraper): def __init__(self, settingsid, DB=None, REG=None): if DB: self.DB=DB if REG: self.REG=REG self.addon_id = 'script.module.donnie' self.service='tvrelease' self.name = 'tv-release.net' self.raiseError = False self.referrer = 'http://tv-release.net/' self.base_url = 'http://tv-release.net/' self.user_agent = 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.0.3) Gecko/2008092417 Firefox/3.0.3' self.provides = [] self.settingsid = settingsid self._loadsettings() self.settings_addon = self.addon def _getShows(self, silent=False): self.log('Do Nothing here') def _getRecentShows(self, silent=False): self.log('Do Nothing here') def _getEpisodes(self, showid, show, url, pDialog, percent, silent): self.log('Do Nothing here') def _getMovies(self, silent=False): self.log('Do Nothing here') def _getRecentMovies(self, silent): self.log('Do Nothing here') def _getStreams(self, episodeid=None, movieid=None): query = "" if episodeid: row = self.DB.query("SELECT rw_shows.showname, season, episode FROM rw_episodes JOIN rw_shows ON rw_shows.showid=rw_episodes.showid WHERE episodeid=?", [episodeid]) name = row[0].replace("'", "") if re.search('\\(\\d\\d\\d\\d\\)$', row[0]): name = name[0:len(name)-7] season = row[1].zfill(2) episode = row[2] #query = str("%s S%sE%s" % (name, season, episode)) uri = "" elif movieid: row = self.DB.query("SELECT movie, year FROM rw_movies WHERE imdb=? LIMIT 1", [movieid]) movie = self.cleanQuery(row[0]) query = "%s %s" %(movie, row[1]) '''streams = [] url = "%splugins/metasearch" % self.base_url params = {"type": "video", "filter": "cached", "api_key": api_key, "q": query} pagedata = net.http_POST(url, params).content if pagedata=='': return False data = json.loads(pagedata) try: files = data['files'] for f in files: if f['type'] == 'video': raw_url = f['id'] name = f['name'] size = int(f['size']) / (1024 * 1024) if size > 2000: size = size / 1024 unit = 'GB' else : unit = 'MB' self.getStreamByPriority('Furk - %s ([COLOR blue]%s %s[/COLOR])' %(name, size, unit), self.service + '://' + raw_url) except: pass self.DB.commit()''' def getStreamByPriority(self, link, stream): self.log(link) host = 'tv-release.net' SQL = "INSERT INTO rw_stream_list(stream, url, priority, machineid) " \ "SELECT ?, ?, priority, ? " \ "FROM rw_providers " \ "WHERE mirror=? and provider=?" self.DB.execute(SQL, [link, stream, self.REG.getSetting('machine-id'), host, self.service]) def _getServicePriority(self, link): self.log(link) host = 'tv-release.net' row = self.DB.query("SELECT priority FROM rw_providers WHERE mirror=? and provider=?", [host, self.service]) return row[0] def _resolveStream(self, stream): raw_url = stream.replace(self.service + '://', '') resolved_url = '' t_files = [] t_options = [] sdialog = xbmcgui.Dialog() api_key = self._getKey() params = {"type": "video", "id": raw_url, "api_key": api_key, 't_files': 1} url = "%sfile/get" % self.base_url pagedata = net.http_POST(url, params).content if pagedata=='': return False #print pagedata data = json.loads(str(pagedata)) try: files = data['files'][0]['t_files'] for f in files: if re.search('^video/', f['ct']): size = int(f['size']) / (1024 * 1024) if size > 2000: size = size / 1024 unit = 'GB' else : unit = 'MB' t_files.append("%s ([COLOR blue]%s %s[/COLOR])" %(f['name'], size, unit)) t_options.append(f['url_dl']) file_select = sdialog.select('Select Furk Stream', t_files) if file_select < 0: return resolved_url resolved_url = str(t_options[file_select]) except: pass self.log("Furk retruned: %s", resolved_url, level=0) return resolved_url def _resolveIMDB(self, uri): #Often needed if a sites movie index does not include imdb links but the movie page does imdb = '' print uri pagedata = self.getURL(uri, append_base_url=True) if pagedata=='': return imdb = re.search('http://www.imdb.com/title/(.+?)/', pagedata).group(1) return imdb def whichHost(self, host): #Sometimes needed table = { 'Watch Blah' : 'blah.com', 'Watch Blah2' : 'blah2.com', } try: host_url = table[host] return host_url except: return 'Unknown'
gpl-2.0
6,514,483,047,913,878,000
27.872093
167
0.620217
false
2.882182
false
false
false
kpiorno/kivy3dgui
kivy3dgui/objloader.py
1
5490
from kivy.logger import Logger import os class MeshData(object): def __init__(self, **kwargs): self.name = kwargs.get("name") self.vertex_format = [ ('v_pos', 3, 'float'), ('v_normal', 3, 'float'), ('v_tc0', 2, 'float')] self.vertices = [] self.indices = [] def calculate_normals(self): for i in range(len(self.indices) / (3)): fi = i * 3 v1i = self.indices[fi] v2i = self.indices[fi + 1] v3i = self.indices[fi + 2] vs = self.vertices p1 = [vs[v1i + c] for c in range(3)] p2 = [vs[v2i + c] for c in range(3)] p3 = [vs[v3i + c] for c in range(3)] u,v = [0,0,0], [0,0,0] for j in range(3): v[j] = p2[j] - p1[j] u[j] = p3[j] - p1[j] n = [0,0,0] n[0] = u[1] * v[2] - u[2] * v[1] n[1] = u[2] * v[0] - u[0] * v[2] n[2] = u[0] * v[1] - u[1] * v[0] for k in range(3): self.vertices[v1i + 3 + k] = n[k] self.vertices[v2i + 3 + k] = n[k] self.vertices[v3i + 3 + k] = n[k] class ObjFile: def finish_object(self): if self._current_object == None: return mesh = [MeshData()] cont_mesh=0 idx = 0 for f in self.faces: verts = f[0] norms = f[1] tcs = f[2] material_ = list(map(float, f[3])) if len(mesh[cont_mesh].indices) == 65535: mesh.append(MeshData()) cont_mesh+=1 idx=0 for i in range(3): #get normal components n = (0.0, 0.0, 0.0) if norms[i] != -1: n = self.normals[norms[i]-1] #get texture coordinate components t = (0.4, 0.4) if tcs[i] != -1: t = self.texcoords[tcs[i]-1] #get vertex components v = self.vertices[verts[i]-1] data = [v[0], v[1], v[2], n[0], n[1], n[2], t[0], t[1], material_[0], material_[1], material_[2]] mesh[cont_mesh].vertices.extend(data) tri = [idx, idx+1, idx+2] mesh[cont_mesh].indices.extend(tri) idx += 3 self.objects[self._current_object] = mesh #mesh.calculate_normals() self.faces = [] def __init__(self, filename, swapyz=False): """Loads a Wavefront OBJ file. """ self.objects = {} self.vertices = [] self.normals = [] self.texcoords = [] self.faces = [] self.mtl = None self._current_object = None material = None for line in open(filename, "r"): if line.startswith('#'): continue if line.startswith('s'): continue values = line.split() if not values: continue if values[0] == 'o': self.finish_object() self._current_object = values[1] elif values[0] == 'mtllib': mtl_path = mtl_filename = values[1] if (os.path.isabs(filename) and not os.path.isabs(mtl_filename)) or \ (os.path.dirname(filename) and not os.path.dirname(mtl_filename)): # if needed, correct the mtl path to be relative or same-dir to/as the object path mtl_path = os.path.join(os.path.dirname(filename), mtl_filename) self.mtl = MTL(mtl_path) elif values[0] in ('usemtl', 'usemat'): material = values[1] if values[0] == 'v': v = list(map(float, values[1:4])) if swapyz: v = v[0], v[2], v[1] self.vertices.append(v) elif values[0] == 'vn': v = list(map(float, values[1:4])) if swapyz: v = v[0], v[2], v[1] self.normals.append(v) elif values[0] == 'vt': self.texcoords.append(list(map(float, values[1:3]))) elif values[0] == 'f': face = [] texcoords = [] norms = [] for v in values[1:]: w = v.split('/') face.append(int(w[0])) if len(w) >= 2 and len(w[1]) > 0: texcoords.append(int(w[1])) else: texcoords.append(-1) if len(w) >= 3 and len(w[2]) > 0: norms.append(int(w[2])) else: norms.append(-1) self.faces.append((face, norms, texcoords, self.mtl[material]["Kd"] if self.mtl!=None else [1., 1., 1.])) self.finish_object() def MTL(filename): contents = {} mtl = None if not os.path.exists(filename): return for line in open(filename, "r"): if line.startswith('#'): continue values = line.split() if not values: continue if values[0] == 'newmtl': mtl = contents[values[1]] = {} elif mtl is None: raise ValueError("mtl file doesn't start with newmtl stmt") mtl[values[0]] = values[1:] return contents
mit
-678,245,770,594,618,000
32.47561
121
0.428597
false
3.548804
false
false
false
bmentges/django-cart
cart/cart.py
1
2554
import datetime from django.db.models import Sum from django.db.models import F from . import models CART_ID = 'CART-ID' class ItemAlreadyExists(Exception): pass class ItemDoesNotExist(Exception): pass class Cart: def __init__(self, request): cart_id = request.session.get(CART_ID) if cart_id: cart = models.Cart.objects.filter(id=cart_id, checked_out=False).first() if cart is None: cart = self.new(request) else: cart = self.new(request) self.cart = cart def __iter__(self): for item in self.cart.item_set.all(): yield item def new(self, request): cart = models.Cart.objects.create(creation_date=datetime.datetime.now()) request.session[CART_ID] = cart.id return cart def add(self, product, unit_price, quantity=1): item = models.Item.objects.filter(cart=self.cart, product=product).first() if item: item.unit_price = unit_price item.quantity += int(quantity) item.save() else: models.Item.objects.create(cart=self.cart, product=product, unit_price=unit_price, quantity=quantity) def remove(self, product): item = models.Item.objects.filter(cart=self.cart, product=product).first() if item: item.delete() else: raise ItemDoesNotExist def update(self, product, quantity, unit_price=None): item = models.Item.objects.filter(cart=self.cart, product=product).first() if item: if quantity == 0: item.delete() else: item.unit_price = unit_price item.quantity = int(quantity) item.save() else: raise ItemDoesNotExist def count(self): return self.cart.item_set.all().aggregate(Sum('quantity')).get('quantity__sum', 0) def summary(self): return self.cart.item_set.all().aggregate(total=Sum(F('quantity')*F('unit_price'))).get('total', 0) def clear(self): self.cart.item_set.all().delete() def is_empty(self): return self.count() == 0 def cart_serializable(self): representation = {} for item in self.cart.item_set.all(): item_id = str(item.object_id) item_dict = { 'total_price': item.total_price, 'quantity': item.quantity } representation[item_id] = item_dict return representation
lgpl-3.0
-4,190,622,972,837,579,300
28.697674
113
0.577525
false
3.923195
false
false
false
librallu/cohorte-herald
python/herald/remote/herald_xmlrpc.py
1
10255
#!/usr/bin/env python # -- Content-Encoding: UTF-8 -- """ Pelix remote services implementation based on Herald messaging and xmlrpclib :author: Thomas Calmant :copyright: Copyright 2014, isandlaTech :license: Apache License 2.0 :version: 0.0.3 :status: Alpha .. Copyright 2014 isandlaTech Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ # Module version __version_info__ = (0, 0, 3) __version__ = ".".join(str(x) for x in __version_info__) # Documentation strings format __docformat__ = "restructuredtext en" # ------------------------------------------------------------------------------ # Herald import herald.beans as beans import herald.remote # iPOPO decorators from pelix.ipopo.decorators import ComponentFactory, Requires, Validate, \ Invalidate, Property, Provides, Instantiate # Pelix constants import pelix.remote import pelix.remote.transport.commons as commons # Standard library import logging # XML RPC modules try: # Python 3 # pylint: disable=F0401 from xmlrpc.server import SimpleXMLRPCDispatcher import xmlrpc.client as xmlrpclib except ImportError: # Python 2 # pylint: disable=F0401 from SimpleXMLRPCServer import SimpleXMLRPCDispatcher import xmlrpclib # ------------------------------------------------------------------------------ HERALDRPC_CONFIGURATION = 'herald-xmlrpc' """ Remote Service configuration constant """ PROP_HERALDRPC_PEER = "herald.rpc.peer" """ UID of the peer exporting a service """ PROP_HERALDRPC_SUBJECT = 'herald.rpc.subject' """ Subject to contact the exporter """ SUBJECT_REQUEST = 'herald/rpc/xmlrpc' """ Subject to use for requests """ SUBJECT_REPLY = 'herald/rpc/xmlrpc/reply' """ Subject to use for replies """ _logger = logging.getLogger(__name__) # ------------------------------------------------------------------------------ class _XmlRpcDispatcher(SimpleXMLRPCDispatcher): """ A XML-RPC dispatcher with a custom dispatch method Calls the dispatch method given in the constructor """ def __init__(self, dispatch_method, encoding=None): """ Sets up the servlet """ SimpleXMLRPCDispatcher.__init__(self, allow_none=True, encoding=encoding) # Register the system.* functions self.register_introspection_functions() # Make a link to the dispatch method self._dispatch_method = dispatch_method def _simple_dispatch(self, name, params): """ Dispatch method """ try: # Internal method return self.funcs[name](*params) except KeyError: # Other method pass # Call the other method outside the except block, to avoid messy logs # in case of error return self._dispatch_method(name, params) def dispatch(self, data): """ Handles a HTTP POST request :param data: The string content of the request :return: The XML-RPC response as a string """ # Dispatch return self._marshaled_dispatch(data, self._simple_dispatch) @ComponentFactory(herald.remote.FACTORY_HERALD_XMLRPC_EXPORTER) @Requires('_directory', herald.SERVICE_DIRECTORY) # SERVICE_EXPORT_PROVIDER is provided by the parent class @Provides(herald.SERVICE_LISTENER) @Property('_filters', herald.PROP_FILTERS, [SUBJECT_REQUEST]) @Property('_kinds', pelix.remote.PROP_REMOTE_CONFIGS_SUPPORTED, (HERALDRPC_CONFIGURATION,)) @Instantiate('herald-rpc-exporter-xmlrpc') class HeraldRpcServiceExporter(commons.AbstractRpcServiceExporter): """ Herald Remote Services exporter """ def __init__(self): """ Sets up the exporter """ # Call parent super(HeraldRpcServiceExporter, self).__init__() # Herald directory self._directory = None # Herald filters self._filters = None # Handled configurations self._kinds = None # Dispatcher self._dispatcher = None def make_endpoint_properties(self, svc_ref, name, fw_uid): """ Prepare properties for the ExportEndpoint to be created :param svc_ref: Service reference :param name: Endpoint name :param fw_uid: Framework UID :return: A dictionary of extra endpoint properties """ return {PROP_HERALDRPC_PEER: self._directory.local_uid, PROP_HERALDRPC_SUBJECT: SUBJECT_REQUEST} @Validate def validate(self, context): """ Component validated """ # Call parent super(HeraldRpcServiceExporter, self).validate(context) # Setup the dispatcher self._dispatcher = _XmlRpcDispatcher(self.dispatch) @Invalidate def invalidate(self, context): """ Component invalidated """ # Call parent super(HeraldRpcServiceExporter, self).invalidate(context) # Clean up self._dispatcher = None def herald_message(self, herald_svc, message): """ Received a message from Herald :param herald_svc: The Herald service :param message: A message bean """ result = self._dispatcher.dispatch(message.content) # answer to the message reply_msg = beans.Message(SUBJECT_REPLY, result) reply_msg.add_header('replies-to', message.uid) origin = message.get_header('original_sender') if origin is None: # in the case it was not routed origin = message.sender herald_svc.fire(origin, reply_msg) # ------------------------------------------------------------------------------ class _XmlRpcEndpointProxy(object): """ Proxy to use XML-RPC over Herald """ def __init__(self, name, peer, subject, send_method): """ Sets up the endpoint proxy :param name: End point name :param peer: UID of the peer to contact :param subject: Subject to use for RPC :param send_method: Method to use to send a request """ self.__name = name self.__peer = peer self.__subject = subject self.__send = send_method self.__cache = {} def __getattr__(self, name): """ Prefixes the requested attribute name by the endpoint name """ return self.__cache.setdefault( name, _XmlRpcMethod("{0}.{1}".format(self.__name, name), self.__peer, self.__subject, self.__send)) class _XmlRpcMethod(object): """ Represents a method in a call proxy """ def __init__(self, method_name, peer, subject, send_method): """ Sets up the method :param method_name: Full method name :param peer: UID of the peer to contact :param subject: Subject to use for RPC :param send_method: Method to use to send a request """ self.__name = method_name self.__peer = peer self.__subject = subject self.__send = send_method def __call__(self, *args): """ Method is being called """ # Forge the request request = xmlrpclib.dumps(args, self.__name, encoding='utf-8', allow_none=True) # Send it reply_message = self.__send(self.__peer, self.__subject, request) # Parse the reply parser, unmarshaller = xmlrpclib.getparser() parser.feed(reply_message.content) parser.close() return unmarshaller.close() @ComponentFactory(herald.remote.FACTORY_HERALD_XMLRPC_IMPORTER) @Requires('_herald', herald.SERVICE_HERALD) @Requires('_directory', herald.SERVICE_DIRECTORY) @Provides(pelix.remote.SERVICE_IMPORT_ENDPOINT_LISTENER) @Property('_kinds', pelix.remote.PROP_REMOTE_CONFIGS_SUPPORTED, (HERALDRPC_CONFIGURATION,)) @Instantiate('herald-rpc-importer-xmlrpc') class HeraldRpcServiceImporter(commons.AbstractRpcServiceImporter): """ XML-RPC Remote Services importer """ def __init__(self): """ Sets up the exporter """ # Call parent super(HeraldRpcServiceImporter, self).__init__() # Herald service self._herald = None # Component properties self._kinds = None def __call(self, peer, subject, content): """ Method called by the proxy to send a message over Herald """ msg = beans.Message(subject, content) msg.add_header('original_sender', self._directory.local_uid) return self._herald.send(peer, msg) def make_service_proxy(self, endpoint): """ Creates the proxy for the given ImportEndpoint :param endpoint: An ImportEndpoint bean :return: A service proxy """ # Get Peer UID information peer_uid = endpoint.properties.get(PROP_HERALDRPC_PEER) if not peer_uid: _logger.warning("Herald-RPC endpoint without peer UID: %s", endpoint) return # Get request subject information subject = endpoint.properties.get(PROP_HERALDRPC_SUBJECT) if not subject: _logger.warning("Herald-RPC endpoint without subject: %s", endpoint) return # Return the proxy return _XmlRpcEndpointProxy(endpoint.name, peer_uid, subject, self.__call) def clear_service_proxy(self, endpoint): """ Destroys the proxy made for the given ImportEndpoint :param endpoint: An ImportEndpoint bean """ # Nothing to do return
apache-2.0
7,400,921,342,612,229,000
28.724638
80
0.603608
false
4.336152
true
false
false
nemesiscodex/JukyOS-sugar
extensions/deviceicon/touchpad.py
1
4769
# Copyright (C) 2010, Walter Bender, Sugar Labs # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA from gettext import gettext as _ import os import gtk import gconf import glib import logging from sugar.graphics.tray import TrayIcon from sugar.graphics.xocolor import XoColor from sugar.graphics.palette import Palette from sugar.graphics import style from jarabe.frame.frameinvoker import FrameWidgetInvoker TOUCHPAD_MODE_MOUSE = 'mouse' TOUCHPAD_MODE_PENTABLET = 'pentablet' TOUCHPAD_MODES = (TOUCHPAD_MODE_MOUSE, TOUCHPAD_MODE_PENTABLET) STATUS_TEXT = (_('finger'), _('stylus')) STATUS_ICON = ('touchpad-capacitive', 'touchpad-resistive') # NODE_PATH is used to communicate with the touchpad device. NODE_PATH = '/sys/devices/platform/i8042/serio1/hgpk_mode' class DeviceView(TrayIcon): """ Manage the touchpad mode from the device palette on the Frame. """ FRAME_POSITION_RELATIVE = 500 def __init__(self): """ Create the icon that represents the touchpad. """ icon_name = STATUS_ICON[_read_touchpad_mode()] client = gconf.client_get_default() color = XoColor(client.get_string('/desktop/sugar/user/color')) TrayIcon.__init__(self, icon_name=icon_name, xo_color=color) self.set_palette_invoker(FrameWidgetInvoker(self)) self.connect('button-release-event', self.__button_release_event_cb) def create_palette(self): """ Create a palette for this icon; called by the Sugar framework when a palette needs to be displayed. """ label = glib.markup_escape_text(_('My touchpad')) self.palette = ResourcePalette(label, self.icon) self.palette.set_group_id('frame') return self.palette def __button_release_event_cb(self, widget, event): """ Callback for button release event; used to invoke touchpad-mode change. """ self.palette.toggle_mode() return True class ResourcePalette(Palette): """ Palette attached to the decive icon that represents the touchpas. """ def __init__(self, primary_text, icon): """ Create the palette and initilize with current touchpad status. """ Palette.__init__(self, label=primary_text) self._icon = icon vbox = gtk.VBox() self.set_content(vbox) self._status_text = gtk.Label() vbox.pack_start(self._status_text, padding=style.DEFAULT_PADDING) self._status_text.show() vbox.show() self._mode = _read_touchpad_mode() self._update() def _update(self): """ Update the label and icon based on the current mode. """ self._status_text.set_label(STATUS_TEXT[self._mode]) self._icon.props.icon_name = STATUS_ICON[self._mode] def toggle_mode(self): """ Toggle the touchpad mode. """ self._mode = 1 - self._mode _write_touchpad_mode(self._mode) self._update() def setup(tray): """ Initialize the devic icon; called by the shell when initializing the Frame. """ if os.path.exists(NODE_PATH): tray.add_device(DeviceView()) _write_touchpad_mode_str(TOUCHPAD_MODE_MOUSE) def _read_touchpad_mode_str(): """ Read the touchpad mode string from the node path. """ node_file_handle = open(NODE_PATH, 'r') text = node_file_handle.read().strip().lower() node_file_handle.close() return text def _read_touchpad_mode(): """ Read the touchpad mode and return the mode index. """ mode_str = _read_touchpad_mode_str() if mode_str not in TOUCHPAD_MODES: return None return TOUCHPAD_MODES.index(mode_str) def _write_touchpad_mode_str(mode_str): """ Write the touchpad mode to the node path. """ try: node_file_handle = open(NODE_PATH, 'w') except IOError, e: logging.error('Error opening %s for writing: %s', NODE_PATH, e) return node_file_handle.write(mode_str) node_file_handle.close() def _write_touchpad_mode(mode_num): """ Look up the mode (by index) and write to node path. """ return _write_touchpad_mode_str(TOUCHPAD_MODES[mode_num])
gpl-2.0
4,583,050,605,971,774,500
31.664384
78
0.672678
false
3.561613
false
false
false
janschulz/igraph
interfaces/python/igraph/nexus.py
1
21903
# vim:ts=4:sw=4:sts=4:et # -*- coding: utf-8 -*- """Interface to the Nexus online graph repository. The classes in this file facilitate access to the Nexus online graph repository at U{http://nexus.igraph.org}. The main entry point of this package is the C{Nexus} variable, which is an instance of L{NexusConnection}. Use L{NexusConnection.get} to get a particular network from Nexus, L{NexusConnection.list} to list networks having a given set of tags, L{NexusConnection.search} to search in the dataset descriptions, or L{NexusConnection.info} to show the info sheet of a dataset.""" from cStringIO import StringIO from gzip import GzipFile from itertools import izip from textwrap import TextWrapper from urllib import urlencode from urlparse import urlparse, urlunparse from textwrap import TextWrapper from igraph.compat import property from igraph.configuration import Configuration from igraph.utils import multidict import re import urllib2 __all__ = ["Nexus", "NexusConnection"] __license__ = u"""\ Copyright (C) 2006-2012 Tamás Nepusz <[email protected]> Pázmány Péter sétány 1/a, 1117 Budapest, Hungary This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA """ class NexusConnection(object): """Connection to a remote Nexus server. In most cases, you will not have to instantiate this object, just use the global L{Nexus} variable which is an instance of L{NexusConnection} and connects to the Nexus repository at U{http://nexus.igraph.org}. Example: >>> print Nexus.info("karate") # doctest:+ELLIPSIS Nexus dataset 'karate' (#1) vertices/edges: 34/78 name: Zachary's karate club tags: social network; undirected; weighted ... >>> karate = Nexus.get("karate") >>> from igraph import summary >>> summary(karate) IGRAPH UNW- 34 78 -- Zachary's karate club network + attr: Author (g), Citation (g), name (g), Faction (v), id (v), name (v), weight (e) @undocumented: _get_response, _parse_dataset_id, _parse_text_response, _ensure_uncompressed""" def __init__(self, nexus_url=None): """Constructs a connection to a remote Nexus server. @param nexus_url: the root URL of the remote server. Leave it at its default value (C{None}) unless you have set up your own Nexus server and you want to connect to that. C{None} fetches the URL from igraph's configuration file or uses the default URL if no URL is specified in the configuration file. """ self.debug = False self.url = nexus_url self._opener = urllib2.build_opener() def get(self, id): """Retrieves the dataset with the given ID from Nexus. Dataset IDs are formatted as follows: the name of a dataset on its own means that a single network should be returned if the dataset contains a single network, or multiple networks should be returned if the dataset contains multiple networks. When the name is followed by a dot and a network ID, only a single network will be returned: the one that has the given network ID. When the name is followed by a dot and a star, a dictionary mapping network IDs to networks will be returned even if the original dataset contains a single network only. E.g., getting C{"karate"} would return a single network since the Zachary karate club dataset contains one network only. Getting C{"karate.*"} on the other hand would return a dictionary with one entry that contains the Zachary karate club network. @param id: the ID of the dataset to retrieve. @return: an instance of L{Graph} (if a single graph has to be returned) or a dictionary mapping network IDs to instances of L{Graph}. """ from igraph import load dataset_id, network_id = self._parse_dataset_id(id) params = dict(format="Python-igraph", id=dataset_id) response = self._get_response("/api/dataset", params, compressed=True) response = self._ensure_uncompressed(response) result = load(response, format="pickle") if network_id is None: # If result contains a single network only, return that network. # Otherwise return the whole dictionary if not isinstance(result, dict): return result if len(result) == 1: return result[result.keys()[0]] return result if network_id == "*": # Return a dict no matter what if not isinstance(result, dict): result = dict(dataset_id=result) return result return result[network_id] def info(self, id): """Retrieves informations about the dataset with the given numeric or string ID from Nexus. @param id: the numeric or string ID of the dataset to retrieve. @return: an instance of L{NexusDatasetInfo}. """ params = dict(format="text", id=id) response = self._get_response("/api/dataset_info", params) return NexusDatasetInfo.FromMultiDict(self._parse_text_response(response)) def list(self, tags=None, operator="or", order="date"): """Retrieves a list of datasets matching a set of tags from Nexus. @param tags: the tags the returned datasets should have. C{None} retrieves all the datasets, a single string retrieves datasets having that given tag. Multiple tags may also be specified as a list, tuple or any other iterable. @param operator: when multiple tags are given, this argument specifies whether the retrieved datasets should match all the tags (C{"and"}) or any of them (C{"or"}). @param order: the order of entries; it must be one of C{"date"}, C{"name"} or C{"popularity"}. @return: a L{NexusDatasetInfoList} object, which basically acts like a list and yields L{NexusDatasetInfo} objects. The list is populated lazily; i.e. the requests will be fired only when needed. """ params = dict(format="text", order=order) if tags is not None: if not hasattr(tags, "__iter__") or isinstance(tags, basestring): params["tag"] = str(tags) else: params["tag"] = "|".join(str(tag) for tag in tags) params["operator"] = operator return NexusDatasetInfoList(self, "/api/dataset_info", params) def search(self, query, order="date"): """Retrieves a list of datasets matching a query string from Nexus. @param query: the query string. Searches are case insensitive and Nexus searches for complete words only. The special word OR can be used to find datasets that contain any of the given words (instead of all of them). Exact phrases must be enclosed in quotes in the search string. See the Nexus webpage for more information at U{http://nexus.igraph.org/web/docs#searching}. @param order: the order of entries; it must be one of C{"date"}, C{"name"} or C{"popularity"}. @return: a L{NexusDatasetInfoList} object, which basically acts like a list and yields L{NexusDatasetInfo} objects. The list is populated lazily; i.e. the requests will be fired only when needed. """ params = dict(q=query, order=order, format="text") return NexusDatasetInfoList(self, "/api/search", params) @staticmethod def _ensure_uncompressed(response): """Expects an HTTP response object, checks its Content-Encoding header, decompresses the data and returns an in-memory buffer holding the uncompressed data.""" compressed = response.headers.get("Content-Encoding") == "gzip" if not compressed: content_disp = response.headers.get("Content-Disposition", "") compressed = bool(re.match(r'attachment; *filename=.*\.gz\"?$', content_disp)) if compressed: return GzipFile(fileobj=StringIO(response.read()), mode="rb") print response.headers return response def _get_response(self, path, params={}, compressed=False): """Sends a request to Nexus at the given path with the given parameters and returns a file-like object for the response. `compressed` denotes whether we accept compressed responses.""" if self.url is None: url = Configuration.instance()["remote.nexus.url"] else: url = self.url url = "%s%s?%s" % (url, path, urlencode(params)) request = urllib2.Request(url) if compressed: request.add_header("Accept-Encoding", "gzip") if self.debug: print "[debug] Sending request: %s" % url return self._opener.open(request) @staticmethod def _parse_dataset_id(id): """Parses a dataset ID used in the `get` request. Returns the dataset ID and the network ID (the latter being C{None} if the original ID did not contain a network ID ). """ dataset_id, _, network_id = str(id).partition(".") if not network_id: network_id = None return dataset_id, network_id @staticmethod def _parse_text_response(response): """Parses a plain text formatted response from Nexus. Plain text formatted responses consist of key-value pairs, separated by C{":"}. Values may span multiple lines; in this case, the key is omitted after the first line and the extra lines start with whitespace. Examples: >>> d = Nexus._parse_text_response("Id: 17\\nName: foo") >>> sorted(d.items()) [('Id', '17'), ('Name', 'foo')] >>> d = Nexus._parse_text_response("Id: 42\\nName: foo\\n .\\n bar") >>> sorted(d.items()) [('Id', '42'), ('Name', 'foo\\n\\nbar')] """ if isinstance(response, basestring): response = response.split("\n") result = multidict() key, value = None, [] for line in response: line = line.rstrip() if not line: continue if key is not None and line[0] in ' \t': # Line continuation line = line.lstrip() if line == '.': line = '' value.append(line) else: # Key-value pair if key is not None: result.add(key, "\n".join(value)) key, value = line.split(":", 1) value = [value.strip()] if key is not None: result.add(key, "\n".join(value)) return result @property def url(self): """Returns the root URL of the Nexus repository the connection is communicating with.""" return self._url @url.setter def url(self, value): """Sets the root URL of the Nexus repository the connection is communicating with.""" if value is None: self._url = None else: value = str(value) parts = urlparse(value, "http", False) self._url = urlunparse(parts) if self._url and self._url[-1] == "/": self._url = self._url[:-1] class NexusDatasetInfo(object): """Information about a dataset in the Nexus repository. @undocumented: _update_from_multidict, vertices_edges""" def __init__(self, id=None, sid=None, name=None, networks=None, vertices=None, edges=None, tags=None, attributes=None, rest=None): self._conn = None self.id = id self.sid = sid self.name = name self.vertices = vertices self.edges = edges self.tags = tags self.attributes = attributes if networks is None: self.networks = [] elif not isinstance(networks, (str, unicode)): self.networks = list(networks) else: self.networks = [networks] if rest: self.rest = multidict(rest) else: self.rest = None @property def vertices_edges(self): if self.vertices is None or self.edges is None: return "" elif isinstance(self.vertices, (list, tuple)) and isinstance(self.edges, (list, tuple)): return " ".join("%s/%s" % (v,e) for v, e in izip(self.vertices, self.edges)) else: return "%s/%s" % (self.vertices, self.edges) @vertices_edges.setter def vertices_edges(self, value): if value is None: self.vertices, self.edges = None, None return value = value.strip().split(" ") if len(value) == 0: self.vertices, self.edges = None, None elif len(value) == 1: self.vertices, self.edges = map(int, value[0].split("/")) else: self.vertices = [] self.edges = [] for ve in value: v, e = ve.split("/", 1) self.vertices.append(int(v)) self.edges.append(int(e)) def __repr__(self): params = "(id=%(id)r, sid=%(sid)r, name=%(name)r, networks=%(networks)r, "\ "vertices=%(vertices)r, edges=%(edges)r, tags=%(tags)r, "\ "attributes=%(attributes)r, rest=%(rest)r)" % self.__dict__ return "%s%s" % (self.__class__.__name__, params) def __str__(self): if self.networks and len(self.networks) > 1: lines = ["Nexus dataset '%s' (#%s) with %d networks" % \ (self.sid, self.id, len(self.networks))] else: lines = ["Nexus dataset '%(sid)s' (#%(id)s)" % self.__dict__] lines.append("vertices/edges: %s" % self.vertices_edges) if self.name: lines.append("name: %s" % self.name) if self.tags: lines.append("tags: %s" % "; ".join(self.tags)) if self.rest: wrapper = TextWrapper(width=76, subsequent_indent=' ') keys = sorted(self.rest.iterkeys()) if "attribute" in self.rest: keys.remove("attribute") keys.append("attribute") for key in keys: for value in self.rest.getlist(key): paragraphs = str(value).splitlines() wrapper.initial_indent = "%s: " % key for paragraph in paragraphs: ls = wrapper.wrap(paragraph) if ls: lines.extend(wrapper.wrap(paragraph)) else: lines.append(" .") wrapper.initial_indent = " " return "\n".join(lines) def _update_from_multidict(self, params): """Updates the dataset object from a multidict representation of key-value pairs, similar to the ones provided by the Nexus API in plain text response.""" self.id = params.get("id") self.sid = params.get("sid") self.name = params.get("name") self.vertices = params.get("vertices") self.edges = params.get("edges") self.tags = params.get("tags") networks = params.get("networks") if networks: self.networks = networks.split() keys_to_ignore = set("id sid name vertices edges tags networks".split()) if self.vertices is None and self.edges is None: # Try "vertices/edges" self.vertices_edges = params.get("vertices/edges") keys_to_ignore.add("vertices/edges") if self.rest is None: self.rest = multidict() for k in set(params.iterkeys()) - keys_to_ignore: for v in params.getlist(k): self.rest.add(k, v) if self.id: self.id = int(self.id) if self.vertices and not isinstance(self.vertices, (list, tuple)): self.vertices = int(self.vertices) if self.edges and not isinstance(self.edges, (list, tuple)): self.edges = int(self.edges) if self.tags is not None: self.tags = self.tags.split(";") @classmethod def FromMultiDict(cls, dict): """Constructs a Nexus dataset object from a multidict representation of key-value pairs, similar to the ones provided by the Nexus API in plain text response.""" result = cls() result._update_from_multidict(dict) return result def download(self, network_id=None): """Retrieves the actual dataset from Nexus. @param network_id: if the dataset contains multiple networks, the ID of the network to be retrieved. C{None} returns a single network if the dataset contains a single network, or a dictionary of networks if the dataset contains more than one network. C{"*"} retrieves a dictionary even if the dataset contains a single network only. @return: a L{Graph} instance or a dictionary mapping network names to L{Graph} instances. """ if self.id is None: raise ValueError("dataset ID is empty") conn = self._conn or Nexus if network_id is None: return conn.get(self.id) return conn.get("%s.%s" % (self.id, network_id)) get = download class NexusDatasetInfoList(object): """A read-only list-like object that can be used to retrieve the items from a Nexus search result. """ def __init__(self, connection, method, params): """Constructs a Nexus dataset list that will use the given connection and the given parameters to retrieve the search results. @param connection: a Nexus connection object @param method: the URL of the Nexus API method to call @param params: the parameters to pass in the GET requests, in the form of a Python dictionary. """ self._conn = connection self._method = str(method) self._params = params self._length = None self._datasets = [] self._blocksize = 10 def _fetch_results(self, index): """Fetches the results from Nexus such that the result item with the given index will be available (unless the result list is shorter than the given index of course).""" # Calculate the start offset page = index // self._blocksize offset = page * self._blocksize self._params["offset"] = offset self._params["limit"] = self._blocksize # Ensure that self._datasets has the necessary length diff = (page+1) * self._blocksize - len(self._datasets) if diff > 0: self._datasets.extend([None] * diff) response = self._conn._get_response(self._method, self._params) current_dataset = None for line in response: key, value = line.strip().split(": ", 1) key = key.lower() if key == "totalsize": # Total number of items in the search result self._length = int(value) elif key == "id": # Starting a new dataset if current_dataset: self._datasets[offset] = current_dataset offset += 1 current_dataset = NexusDatasetInfo(id=int(value)) current_dataset._conn = self._conn elif key == "sid": current_dataset.sid = value elif key == "name": current_dataset.name = value elif key == "vertices": current_dataset.vertices = int(value) elif key == "edges": current_dataset.edges = int(value) elif key == "vertices/edges": current_dataset.vertices_edges = value elif key == "tags": current_dataset.tags = value.split(";") if current_dataset: self._datasets[offset] = current_dataset def __getitem__(self, index): if len(self._datasets) <= index: self._fetch_results(index) elif self._datasets[index] is None: self._fetch_results(index) return self._datasets[index] def __iter__(self): for i in xrange(len(self)): yield self[i] def __len__(self): """Returns the number of result items.""" if self._length is None: self._fetch_results(0) return self._length def __str__(self): """Converts the Nexus result list into a nice human-readable format.""" max_index_length = len(str(len(self))) + 2 indent = "\n" + " " * (max_index_length+1) result = [] for index, item in enumerate(self): formatted_item = ("[%d]" % index).rjust(max_index_length) + " " + \ str(item).replace("\n", indent) result.append(formatted_item) return "\n".join(result) Nexus = NexusConnection()
gpl-2.0
82,784,205,133,577,890
38.103571
96
0.59241
false
4.210152
false
false
false
jepcastelein/marketopy
marketo.py
1
9277
import requests import logging import time class MarketoClient: """Basic Marketo Client""" def __init__(self, identity, client_id, client_secret, api): self.api_endpoint = api self.identity_endpoint = identity self.client_id = client_id self.client_secret = client_secret self.api_version = "v1" self._fields = None self._session = requests.Session() self.refresh_auth_token() def refresh_auth_token(self): auth_url = "%s/oauth/token?grant_type=client_credentials" % ( self.identity_endpoint) auth_url += "&client_id=%s&client_secret=%s" % (self.client_id, self.client_secret) debug("Calling %s" % auth_url) r = requests.get(auth_url) r.raise_for_status() auth_data = r.json() log("Access token acquired: %s expiring in %s" % (auth_data['access_token'], auth_data['expires_in'])) self.auth_token = auth_data['access_token'] @property def fields(self): if self._fields is None: res = "leads/describe.json" fields = self.auth_get(res)["result"] fields = [f["rest"]["name"] for f in fields] self._fields = fields return self._fields def get_paging_token(self, since): """ Get a paging token. Format expeced: 2014-10-06. """ resource = "activities/pagingtoken.json" params = {"sinceDatetime": since} data = self.auth_get(resource, params) return data["nextPageToken"] def get_leadchanges(self, since, fields): """ Get lead changes. Params: fields = ["company", "score", "firstName"] """ return LeadChangeSet(self, since, fields, page_size=300) def get_lead_by_id(self, id, fields=None): """Get a lead by its ID""" resource = "lead/%i.json" % id data = self.auth_get(resource) return data def get_leads_by_id(self, ids, fields=None): params = {"filterType": "id", "filterValues": ",".join(ids), "fields": ",".join(fields) } resource = "leads.json" data = self.auth_get(resource, params=params) return data["result"] def query_leads(self, query, return_fields=None): """Query leads by any parameters. query: dict of fields / value to query on return fields: array of which fields should be requested from marketo """ resource = "leads.json" params = { "filterType": ",".join(query.keys()), "filterValues": ",".join(query.values())} if return_fields is not None: params["fields"] = return_fields data = self.auth_get(resource, params=params) return data["result"] def build_resource_url(self, resource): res_url = "%s/%s/%s" % (self.api_endpoint, self.api_version, resource) return res_url def auth_get(self, resource, params=[], page_size=None): """ Make an authenticated GET to Marketo, check success and return dict from json response. page_size: page size, max and default 300 """ headers = {"Authorization": "Bearer %s" % self.auth_token} if page_size is not None: params['batchSize'] = page_size res_url = self.build_resource_url(resource) r = self._session.get(res_url, headers=headers, params=params) r.raise_for_status() data = r.json() if data["success"] is False: err = data["errors"][0] raise Exception("Error %s - %s, calling %s" % (err["code"], err["message"], r.url)) time.sleep(20/80) return data class Lead(object): def __init__(self, client, id): self._client = client self._resource = "leads.json" self.id = id self._data_cache = None self._default_fields = None def __getattr__(self, name): log("Looking for %s" % name) if name not in self.fields: raise AttributeError if name in self._data: return self._data[name] elif name in self.fields: self._load_data(name) return self._data[name] else: raise AttributeError @property def fields(self): return self._client.fields @property def _data(self): if self._data_cache is None: if self._default_fields is not None: self._load_data(self._default_fields) else: self._load_data() return self._data_cache def _load_data(self, fields=None): "Load lead data for fields provided, or use default fields." resource = "leads/%s.json" % (self.id) params = {} if fields is not None: if type(fields) is str: fields = [fields] params = {"fields": ",".join(fields)} result = self._client.auth_get(resource, params)["result"][0] if self._data_cache is not None: newdata = self._data_cache.copy() newdata.update(result) self._data_cache = newdata else: self._data_cache = result class LeadChangeSet: """ REST Resource: activities/leadchanges.json Represent a set of changed leads, only taking into account changed leads, not new leads. TODO: handle new leads """ def __init__(self, client, since, fields, page_size): self.resource = "activities/leadchanges.json" self.client = client self.since = since self.fields = fields self.page_size = page_size self.has_more_result = False self.next_page_token = None self.changes = [] self.fetch_next_page() def __iter__(self): return self def __next__(self): if len(self.changes) == 0 and not self.has_more_result: raise StopIteration if len(self.changes) == 0 and self.has_more_result: self.fetch_next_page() return self.changes.pop(0) def fetch_next_page(self): debug("[mkto] Fetching next page for LeadChangeSet") if self.next_page_token is None: self.next_page_token = self.client.get_paging_token( since=self.since) params = { "fields": ','.join(self.fields), "nextPageToken": self.next_page_token} data = self.client.auth_get(self.resource, params, self.page_size) # If moreResult is true, set flag on object and next page token, if # not, reset them if data["moreResult"]: self.has_more_result = True self.next_page_token = data["nextPageToken"] else: self.has_more_result = False self.next_page_token = None for lead in self.prepare_results(data["result"]): self.changes.append(lead) def prepare_results(self, results): """ Iterates over change results and output an array with changed fields and values """ for c in results: changed_fields = {} changed_fields["id"] = c['leadId'] # if no fields updated -> new lead -> skip if len(c["fields"]) == 0: continue for f in c["fields"]: changed_fields[f["name"]] = f["newValue"] yield changed_fields class PagedMarketoResult: def __init__(self, client, resource, since, fields, page_size): self.resource = resource self.client = client self.since = since self.fields = fields self.page_size = page_size self.has_more_result = False self.next_page_token = None self.changes = [] self.fetch_next_page() def __iter__(self): return self def __next__(self): if len(self.changes) == 0 and not self.has_more_result: raise StopIteration if len(self.changes) == 0 and self.has_more_result: self.fetch_next_page() return self.changes.pop(0) def fetch_next_page(self): debug("fetching next page") if self.next_page_token is None: self.next_page_token = self.client.get_paging_token( since=self.since) params = { "fields": ','.join(self.fields), "nextPageToken": self.next_page_token} data = self.client.auth_get(self.resource, params, self.page_size) # If moreResult is true, set flag on object and next page token, if # not, reset them if data["moreResult"]: self.has_more_result = True self.next_page_token = data["nextPageToken"] else: self.has_more_result = False self.next_page_token = None for lead in self.prepare_results(data["result"]): self.changes.append(lead) def debug(msg): logger = logging.getLogger(__name__) logger.debug(msg) def log(msg): logger = logging.getLogger(__name__) logger.info(msg)
apache-2.0
-1,778,801,031,148,775,700
29.12013
78
0.555244
false
3.990108
false
false
false
walty8/trac
tracopt/ticket/deleter.py
1
7165
# -*- coding: utf-8 -*- # # Copyright (C) 2010 Edgewall Software # All rights reserved. # # This software is licensed as described in the file COPYING, which # you should have received as part of this distribution. The terms # are also available at http://trac.edgewall.org/wiki/TracLicense. # # This software consists of voluntary contributions made by many # individuals. For the exact contribution history, see the revision # history and logs, available at http://trac.edgewall.org/log/. from genshi.builder import tag from genshi.filters import Transformer from genshi.filters.transform import StreamBuffer from trac.attachment import Attachment from trac.core import Component, TracError, implements from trac.ticket.model import Ticket from trac.ticket.web_ui import TicketModule from trac.util import get_reporter_id from trac.util.datefmt import from_utimestamp from trac.util.presentation import captioned_button from trac.util.translation import _ from trac.web.api import IRequestFilter, IRequestHandler, ITemplateStreamFilter from trac.web.chrome import ITemplateProvider, add_notice, add_stylesheet class TicketDeleter(Component): """Ticket and ticket comment deleter. This component allows deleting ticket comments and complete tickets. For users having `TICKET_ADMIN` permission, it adds a "Delete" button next to each "Reply" button on the page. The button in the ticket description requests deletion of the complete ticket, and the buttons in the change history request deletion of a single comment. '''Comment and ticket deletion are irreversible (and therefore ''dangerous'') operations.''' For that reason, a confirmation step is requested. The confirmation page shows the ticket box (in the case of a ticket deletion) or the ticket change (in the case of a comment deletion). """ implements(ITemplateProvider, ITemplateStreamFilter, IRequestFilter, IRequestHandler) # ITemplateProvider methods def get_htdocs_dirs(self): return [] def get_templates_dirs(self): from pkg_resources import resource_filename return [resource_filename(__name__, 'templates')] # ITemplateStreamFilter methods def filter_stream(self, req, method, filename, stream, data): if filename not in ('ticket.html', 'ticket_preview.html'): return stream ticket = data.get('ticket') if not (ticket and ticket.exists and 'TICKET_ADMIN' in req.perm(ticket.resource)): return stream # Insert "Delete" buttons for ticket description and each comment def delete_ticket(): return tag.form( tag.div( tag.input(type='hidden', name='action', value='delete'), tag.input(type='submit', value=captioned_button(req, u'–', # 'EN DASH' _("Delete")), title=_('Delete ticket'), class_="trac-delete"), class_="inlinebuttons"), action='#', method='get') def delete_comment(): for event in buffer: cnum, cdate = event[1][1].get('id')[12:].split('-', 1) return tag.form( tag.div( tag.input(type='hidden', name='action', value='delete-comment'), tag.input(type='hidden', name='cnum', value=cnum), tag.input(type='hidden', name='cdate', value=cdate), tag.input(type='submit', value=captioned_button(req, u'–', # 'EN DASH' _("Delete")), title=_('Delete comment %(num)s', num=cnum), class_="trac-delete"), class_="inlinebuttons"), action='#', method='get') buffer = StreamBuffer() return stream | Transformer('//div[@class="description"]' '/h3[@id="comment:description"]') \ .after(delete_ticket).end() \ .select('//div[starts-with(@class, "change")]/@id') \ .copy(buffer).end() \ .select('//div[starts-with(@class, "change") and @id]' '//div[@class="trac-ticket-buttons"]') \ .append(delete_comment) # IRequestFilter methods def pre_process_request(self, req, handler): if handler is not TicketModule(self.env): return handler action = req.args.get('action') if action in ('delete', 'delete-comment'): return self else: return handler def post_process_request(self, req, template, data, content_type): return template, data, content_type # IRequestHandler methods def match_request(self, req): return False def process_request(self, req): id = int(req.args.get('id')) req.perm('ticket', id).require('TICKET_ADMIN') ticket = Ticket(self.env, id) action = req.args['action'] cnum = req.args.get('cnum') if req.method == 'POST': if 'cancel' in req.args: href = req.href.ticket(id) if action == 'delete-comment': href += '#comment:%s' % cnum req.redirect(href) if action == 'delete': ticket.delete() add_notice(req, _('The ticket #%(id)s has been deleted.', id=ticket.id)) req.redirect(req.href()) elif action == 'delete-comment': cdate = from_utimestamp(long(req.args.get('cdate'))) ticket.delete_change(cdate=cdate) add_notice(req, _('The ticket comment %(num)s on ticket ' '#%(id)s has been deleted.', num=cnum, id=ticket.id)) req.redirect(req.href.ticket(id)) tm = TicketModule(self.env) data = tm._prepare_data(req, ticket) tm._insert_ticket_data(req, ticket, data, get_reporter_id(req, 'author'), {}) data.update(action=action, cdate=None) if action == 'delete-comment': data['cdate'] = req.args.get('cdate') cdate = from_utimestamp(long(data['cdate'])) for change in data['changes']: if change.get('date') == cdate: data['change'] = change data['cnum'] = change.get('cnum') break else: raise TracError(_('Comment %(num)s not found', num=cnum)) elif action == 'delete': attachments = Attachment.select(self.env, ticket.realm, ticket.id) data.update(attachments=list(attachments)) add_stylesheet(req, 'common/css/ticket.css') return 'ticket_delete.html', data, None
bsd-3-clause
-3,726,745,054,534,510,600
40.155172
79
0.561933
false
4.409483
false
false
false
ggaughan/dee
darwen.py
1
3332
from Dee import Relation, Key, Tuple, QUOTA, MAX, MIN, IS_EMPTY, COUNT, GENERATE from DeeDatabase import Database class darwen_Database(Database): def __init__(self, name): """Define initial relvars and their initial values here (Called once on database creation)""" Database.__init__(self, name) if 'IS_CALLED' not in self: print "Adding IS_CALLED..." self.IS_CALLED = Relation(['StudentId', 'Name'], [('S1', 'Anne'), ('S2', 'Boris'), ('S3', 'Cindy'), ('S4', 'Devinder'), ('S5', 'Boris'), ] ) if 'IS_ENROLLED_ON' not in self: print "Adding IS_ENROLLED_ON..." self.IS_ENROLLED_ON = Relation(['StudentId', 'CourseId'], [('S1', 'C1'), ('S1', 'C2'), ('S2', 'C1'), ('S3', 'C3'), ('S4', 'C1'), ] ) if 'COURSE' not in self: print "Adding COURSE..." self.COURSE = Relation(['CourseId', 'Title'], [('C1', 'Database'), ('C2', 'HCI'), ('C3', 'Op Systems'), ('C4', 'Programming'), ] ) if 'EXAM_MARK' not in self: print "Adding EXAM_MARK..." self.EXAM_MARK = Relation(['StudentId', 'CourseId', 'Mark'], [('S1', 'C1', 85), ('S1', 'C2', 49), ('S2', 'C1', 49), ('S3', 'C3', 66), ('S4', 'C1', 93), ] ) def _vinit(self): """Define virtual relvars/relconsts (Called repeatedly, e.g. after database load from disk or commit) """ Database._vinit(self) if 'C_ER' not in self: print "Defining C_ER..." #this will always be the case, even when re-loading: we don't store relations with callable bodies self.C_ER = Relation(['CourseId', 'Exam_Result'], self.vC_ER, {'pk':(Key,['CourseId'])}) def vC_ER(self): return self.COURSE.extend(['Exam_Result'], lambda t:{'Exam_Result': (self.EXAM_MARK & GENERATE({'CourseId':t.CourseId}) )(['StudentId', 'Mark'])} )(['CourseId', 'Exam_Result']) #fixed #Load or create the database darwen = Database.open(darwen_Database, "darwen") ################################### if __name__=="__main__": print darwen.relations
mit
8,448,689,067,475,852,000
40.717949
112
0.344238
false
4.640669
false
false
false
diofeher/django-nfa
django/contrib/admin/widgets.py
1
8956
""" Form Widget classes specific to the Django admin site. """ import copy from django import newforms as forms from django.newforms.widgets import RadioFieldRenderer from django.newforms.util import flatatt from django.utils.datastructures import MultiValueDict from django.utils.text import capfirst, truncate_words from django.utils.translation import ugettext as _ from django.utils.safestring import mark_safe from django.utils.encoding import force_unicode from django.conf import settings class FilteredSelectMultiple(forms.SelectMultiple): """ A SelectMultiple with a JavaScript filter interface. Note that the resulting JavaScript assumes that the SelectFilter2.js library and its dependencies have been loaded in the HTML page. """ def __init__(self, verbose_name, is_stacked, attrs=None, choices=()): self.verbose_name = verbose_name self.is_stacked = is_stacked super(FilteredSelectMultiple, self).__init__(attrs, choices) def render(self, name, value, attrs=None, choices=()): from django.conf import settings output = [super(FilteredSelectMultiple, self).render(name, value, attrs, choices)] output.append(u'<script type="text/javascript">addEvent(window, "load", function(e) {') # TODO: "id_" is hard-coded here. This should instead use the correct # API to determine the ID dynamically. output.append(u'SelectFilter.init("id_%s", "%s", %s, "%s"); });</script>\n' % \ (name, self.verbose_name.replace('"', '\\"'), int(self.is_stacked), settings.ADMIN_MEDIA_PREFIX)) return mark_safe(u''.join(output)) class AdminDateWidget(forms.TextInput): class Media: js = (settings.ADMIN_MEDIA_PREFIX + "js/calendar.js", settings.ADMIN_MEDIA_PREFIX + "js/admin/DateTimeShortcuts.js") def __init__(self, attrs={}): super(AdminDateWidget, self).__init__(attrs={'class': 'vDateField', 'size': '10'}) class AdminTimeWidget(forms.TextInput): class Media: js = (settings.ADMIN_MEDIA_PREFIX + "js/calendar.js", settings.ADMIN_MEDIA_PREFIX + "js/admin/DateTimeShortcuts.js") def __init__(self, attrs={}): super(AdminTimeWidget, self).__init__(attrs={'class': 'vTimeField', 'size': '8'}) class AdminSplitDateTime(forms.SplitDateTimeWidget): """ A SplitDateTime Widget that has some admin-specific styling. """ def __init__(self, attrs=None): widgets = [AdminDateWidget, AdminTimeWidget] # Note that we're calling MultiWidget, not SplitDateTimeWidget, because # we want to define widgets. forms.MultiWidget.__init__(self, widgets, attrs) def format_output(self, rendered_widgets): return mark_safe(u'<p class="datetime">%s %s<br />%s %s</p>' % \ (_('Date:'), rendered_widgets[0], _('Time:'), rendered_widgets[1])) class AdminRadioFieldRenderer(RadioFieldRenderer): def render(self): """Outputs a <ul> for this set of radio fields.""" return mark_safe(u'<ul%s>\n%s\n</ul>' % ( flatatt(self.attrs), u'\n'.join([u'<li>%s</li>' % force_unicode(w) for w in self])) ) class AdminRadioSelect(forms.RadioSelect): renderer = AdminRadioFieldRenderer class AdminFileWidget(forms.FileInput): """ A FileField Widget that shows its current value if it has one. """ def __init__(self, attrs={}): super(AdminFileWidget, self).__init__(attrs) def render(self, name, value, attrs=None): from django.conf import settings output = [] if value: output.append('%s <a target="_blank" href="%s%s">%s</a> <br />%s ' % \ (_('Currently:'), settings.MEDIA_URL, value, value, _('Change:'))) output.append(super(AdminFileWidget, self).render(name, value, attrs)) return mark_safe(u''.join(output)) class ForeignKeyRawIdWidget(forms.TextInput): """ A Widget for displaying ForeignKeys in the "raw_id" interface rather than in a <select> box. """ def __init__(self, rel, attrs=None): self.rel = rel super(ForeignKeyRawIdWidget, self).__init__(attrs) def render(self, name, value, attrs=None): from django.conf import settings related_url = '../../../%s/%s/' % (self.rel.to._meta.app_label, self.rel.to._meta.object_name.lower()) if self.rel.limit_choices_to: url = '?' + '&amp;'.join(['%s=%s' % (k, v) for k, v in self.rel.limit_choices_to.items()]) else: url = '' if not attrs.has_key('class'): attrs['class'] = 'vForeignKeyRawIdAdminField' # The JavaScript looks for this hook. output = [super(ForeignKeyRawIdWidget, self).render(name, value, attrs)] # TODO: "id_" is hard-coded here. This should instead use the correct # API to determine the ID dynamically. output.append('<a href="%s%s" class="related-lookup" id="lookup_id_%s" onclick="return showRelatedObjectLookupPopup(this);"> ' % \ (related_url, url, name)) output.append('<img src="%simg/admin/selector-search.gif" width="16" height="16" alt="Lookup" /></a>' % settings.ADMIN_MEDIA_PREFIX) if value: output.append(self.label_for_value(value)) return mark_safe(u''.join(output)) def label_for_value(self, value): return '&nbsp;<strong>%s</strong>' % \ truncate_words(self.rel.to.objects.get(pk=value), 14) class ManyToManyRawIdWidget(ForeignKeyRawIdWidget): """ A Widget for displaying ManyToMany ids in the "raw_id" interface rather than in a <select multiple> box. """ def __init__(self, rel, attrs=None): super(ManyToManyRawIdWidget, self).__init__(rel, attrs) def render(self, name, value, attrs=None): attrs['class'] = 'vManyToManyRawIdAdminField' if value: value = ','.join([str(v) for v in value]) else: value = '' return super(ManyToManyRawIdWidget, self).render(name, value, attrs) def label_for_value(self, value): return '' def value_from_datadict(self, data, files, name): value = data.get(name, None) if value and ',' in value: return data[name].split(',') if value: return [value] return None def _has_changed(self, initial, data): if initial is None: initial = [] if data is None: data = [] if len(initial) != len(data): return True for pk1, pk2 in zip(initial, data): if force_unicode(pk1) != force_unicode(pk2): return True return False class RelatedFieldWidgetWrapper(forms.Widget): """ This class is a wrapper to a given widget to add the add icon for the admin interface. """ def __init__(self, widget, rel, admin_site): self.is_hidden = widget.is_hidden self.needs_multipart_form = widget.needs_multipart_form self.attrs = widget.attrs self.choices = widget.choices self.widget = widget self.rel = rel # so we can check if the related object is registered with this AdminSite self.admin_site = admin_site def __deepcopy__(self, memo): obj = copy.copy(self) obj.widget = copy.deepcopy(self.widget, memo) obj.attrs = self.widget.attrs memo[id(self)] = obj return obj def render(self, name, value, *args, **kwargs): from django.conf import settings rel_to = self.rel.to related_url = '../../../%s/%s/' % (rel_to._meta.app_label, rel_to._meta.object_name.lower()) self.widget.choices = self.choices output = [self.widget.render(name, value, *args, **kwargs)] if rel_to in self.admin_site._registry: # If the related object has an admin interface: # TODO: "id_" is hard-coded here. This should instead use the correct # API to determine the ID dynamically. output.append(u'<a href="%sadd/" class="add-another" id="add_id_%s" onclick="return showAddAnotherPopup(this);"> ' % \ (related_url, name)) output.append(u'<img src="%simg/admin/icon_addlink.gif" width="10" height="10" alt="Add Another"/></a>' % settings.ADMIN_MEDIA_PREFIX) return mark_safe(u''.join(output)) def build_attrs(self, extra_attrs=None, **kwargs): "Helper function for building an attribute dictionary." self.attrs = self.widget.build_attrs(extra_attrs=None, **kwargs) return self.attrs def value_from_datadict(self, data, files, name): return self.widget.value_from_datadict(data, files, name) def _has_changed(self, initial, data): return self.widget._has_changed(initial, data) def id_for_label(self, id_): return self.widget.id_for_label(id_)
bsd-3-clause
2,662,747,587,263,736,300
40.655814
146
0.621706
false
3.81431
false
false
false
chdb/DhammaMap
app/cryptoken.py
1
6464
#!/usr/bin/python # -*- coding: utf-8 -*- #from __future__ import unicode_literals import hashlib import hmac import os import json import utils as u import widget as W import logging from base64 import urlsafe_b64encode\ , urlsafe_b64decode class Base64Error (Exception): '''invalid Base64 character or incorrect padding''' def decodeToken (token, expected): try: td = _decode (token) valid, expired = td.valid (expected) if valid: if expected == 'session': td.data['_ts'] = td.timeStamp return td.data, expired except Base64Error: logging.warning ('invalid Base64 in %s Token: %r', type, token) except: logging.exception('unexpected exception decoding %s token : %r', type, token) return None, False def encodeVerifyToken (data, tt): # tt = _tokenType (tt) assert tt in ['signUp' ,'pw1' ,'pw2' ], 'invalid TokenType: %s' % tt return _encode (tt, data) def encodeSessionToken (ssn):#, user=None): data = dict(ssn) if '_userID' in ssn: return _encode ('auth', data) return _encode ('anon', data) TokenTypes = ( 'anon' , 'auth' , 'signUp' , 'pw1' ) def _tokenTypeCode (tt): return TokenTypes.index(tt) def _tokenType (code): return TokenTypes [code] #......................................... class _TokenData (object): def __init__ (_s, token, tt, obj, bM, ts): _s.badMac = bM _s.tokenType = tt _s.timeStamp = ts _s.token = token _s.data = obj def maxAge (_s): if _s.tokenType =='auth' : return u.config('maxIdleAuth') elif _s.tokenType =='signUp': return u.config('maxAgeSignUpTok') elif _s.tokenType =='pw1' : return u.config('maxAgePasswordTok') else: raise RuntimeError ('invalid token type') def valid (_s, expected): """ Checks encryption validity and expiry: whether the token is younger than maxAge seconds. Use neutral evaluation pathways to beat timing attacks. NB: return only success or failure - log shows why it failed but user mustn't know ! """ if expected == 'session': badType = (_s.tokenType != 'anon' and _s.tokenType != 'auth') else: badType = _s.tokenType != expected if _s.tokenType == 'anon': expired = False else: expired = not u.validTimeStamp (_s.timeStamp, _s.maxAge()) badData = _s.data is None # and (type(_s.data) == dict) isValid = False # check booleans in order of their initialisation if _s.badMac: x ='Invalid MAC' elif badType: x ='Invalid token type:{} expected:{}'.format(_s.tokenType, expected) elif badData: x ='Invalid data object' else: isValid = True if expired: logging.debug ('Token expired: %r', _s.token) #no warning log if merely expired if not isValid: logging.warning ('%s in Token: %r', x, _s.token) return isValid, expired #......................................... # Some global constants to hold the lengths of component substrings of the token CH = 1 TS = 4 UID = 8 MAC = 20 def _hash (msg, ts): """hmac output of sha1 is 20 bytes irrespective of msg length""" k = W.W.keys (ts) return hmac.new (k, msg, hashlib.sha1).digest() def _serialize (data): '''Generic data is stored in the token. The data could be a dict or any other serialisable type. However the data size is limited because currently it all goes into one cookie and there is a max cookie size for some browsers so we place a limit in session.save() ''' # ToDo: replace json with binary protocol cpickle # ToDo compression of data thats too long to fit otherwise: # data = json.encode (data) # if len(data) > data_max: # 4K minus the other fields # level = (len(data) - data_max) * K # experiment! or use level = 9 # data = zlib.compress( data, level) # if len(data) > data_max: # assert False, 'oh dear!' todo - save some? data in datastore # return data, True # return data, False # todo: encode a boolean in kch to indicate whether compressed #logging.debug ('serializing data = %r', data) s = json.dumps (data, separators=(',',':')) #logging.debug('serialized data: %r', s) return s.encode('utf-8') #byte str def _deserialize (data): try: # logging.debug('data1: %r', data) obj = json.loads (data) # logging.debug('obj: %r', obj) return obj # byteify(obj) except Exception, e: logging.exception(e) return None def _encode (tokentype, obj): """ obj is serializable session data returns a token string of base64 chars with iv and encrypted tokentype and data """ tt = _tokenTypeCode (tokentype) logging.debug ('encode tokentype = %r tt = %r',tokentype, tt) now = u.sNow() #logging.debug ('encode tokentype = %r tt = %r',tokentype, tt) data = W._iB.pack (now, tt) # ts + tt data += _serialize (obj) # ts + tt + data h20 = _hash (data, now) return urlsafe_b64encode (data + h20) # ts + tt + data + mac def _decode (token): """inverse of encode: return _TokenData""" try: bytes = urlsafe_b64decode (token) # ts + tt + data + mac except TypeError: logging.warning('Base64 Error: token = %r', token) logging.exception('Base64 Error: ') raise Base64Error ts, tt = W._iB.unpack_from (bytes) ttype = _tokenType (tt) #logging.debug ('decode tokentype = %r tt = %r token = %s',ttype, tt, token) preDataLen = TS+CH data = bytes[ :-MAC] mac1 = bytes[-MAC: ] mac2 = _hash (data, ts) badMac = not u.sameStr (mac1, mac2) data = _deserialize (data [preDataLen: ]) # logging.debug('data: %r', data) return _TokenData (token, ttype, data, badMac, ts)
mit
-6,609,240,872,610,904,000
35.942857
100
0.550124
false
3.815821
false
false
false
SciLifeLab/bcbio-nextgen
bcbio/rnaseq/count.py
1
12286
""" count number of reads mapping to features of transcripts """ import os import sys import itertools # soft imports try: import HTSeq import pandas as pd import gffutils except ImportError: HTSeq, pd, gffutils = None, None, None from bcbio.utils import file_exists from bcbio.distributed.transaction import file_transaction from bcbio.log import logger from bcbio import bam import bcbio.pipeline.datadict as dd def _get_files(data): mapped = bam.mapped(data["work_bam"], data["config"]) in_file = bam.sort(mapped, data["config"], order="queryname") gtf_file = dd.get_gtf_file(data) work_dir = dd.get_work_dir(data) out_dir = os.path.join(work_dir, "htseq-count") sample_name = dd.get_sample_name(data) out_file = os.path.join(out_dir, sample_name + ".counts") stats_file = os.path.join(out_dir, sample_name + ".stats") return in_file, gtf_file, out_file, stats_file def invert_strand(iv): iv2 = iv.copy() if iv2.strand == "+": iv2.strand = "-" elif iv2.strand == "-": iv2.strand = "+" else: raise ValueError("Illegal strand") return iv2 class UnknownChrom(Exception): pass def _get_stranded_flag(data): strand_flag = {"unstranded": "no", "firststrand": "reverse", "secondstrand": "yes"} stranded = dd.get_strandedness(data, "unstranded").lower() assert stranded in strand_flag, ("%s is not a valid strandedness value. " "Valid values are 'firststrand', 'secondstrand', " "and 'unstranded") return strand_flag[stranded] def htseq_count(data): """ adapted from Simon Anders htseq-count.py script http://www-huber.embl.de/users/anders/HTSeq/doc/count.html """ sam_filename, gff_filename, out_file, stats_file = _get_files(data) stranded = _get_stranded_flag(data["config"]) overlap_mode = "union" feature_type = "exon" id_attribute = "gene_id" minaqual = 0 if file_exists(out_file): return out_file logger.info("Counting reads mapping to exons in %s using %s as the " "annotation and strandedness as %s." % (os.path.basename(sam_filename), os.path.basename(gff_filename), dd.get_strandedness(data))) features = HTSeq.GenomicArrayOfSets("auto", stranded != "no") counts = {} # Try to open samfile to fail early in case it is not there open(sam_filename).close() gff = HTSeq.GFF_Reader(gff_filename) i = 0 try: for f in gff: if f.type == feature_type: try: feature_id = f.attr[id_attribute] except KeyError: sys.exit("Feature %s does not contain a '%s' attribute" % (f.name, id_attribute)) if stranded != "no" and f.iv.strand == ".": sys.exit("Feature %s at %s does not have strand " "information but you are running htseq-count " "in stranded mode. Use '--stranded=no'." % (f.name, f.iv)) features[f.iv] += feature_id counts[f.attr[id_attribute]] = 0 i += 1 if i % 100000 == 0: sys.stderr.write("%d GFF lines processed.\n" % i) except: sys.stderr.write("Error occured in %s.\n" % gff.get_line_number_string()) raise sys.stderr.write("%d GFF lines processed.\n" % i) if len(counts) == 0: sys.stderr.write("Warning: No features of type '%s' found.\n" % feature_type) try: align_reader = htseq_reader(sam_filename) first_read = iter(align_reader).next() pe_mode = first_read.paired_end except: sys.stderr.write("Error occured when reading first line of sam " "file.\n") raise try: if pe_mode: read_seq_pe_file = align_reader read_seq = HTSeq.pair_SAM_alignments(align_reader) empty = 0 ambiguous = 0 notaligned = 0 lowqual = 0 nonunique = 0 i = 0 for r in read_seq: i += 1 if not pe_mode: if not r.aligned: notaligned += 1 continue try: if r.optional_field("NH") > 1: nonunique += 1 continue except KeyError: pass if r.aQual < minaqual: lowqual += 1 continue if stranded != "reverse": iv_seq = (co.ref_iv for co in r.cigar if co.type == "M" and co.size > 0) else: iv_seq = (invert_strand(co.ref_iv) for co in r.cigar if co.type == "M" and co.size > 0) else: if r[0] is not None and r[0].aligned: if stranded != "reverse": iv_seq = (co.ref_iv for co in r[0].cigar if co.type == "M" and co.size > 0) else: iv_seq = (invert_strand(co.ref_iv) for co in r[0].cigar if co.type == "M" and co.size > 0) else: iv_seq = tuple() if r[1] is not None and r[1].aligned: if stranded != "reverse": iv_seq = itertools.chain(iv_seq, (invert_strand(co.ref_iv) for co in r[1].cigar if co.type == "M" and co.size > 0)) else: iv_seq = itertools.chain(iv_seq, (co.ref_iv for co in r[1].cigar if co.type == "M" and co.size > 0)) else: if (r[0] is None) or not (r[0].aligned): notaligned += 1 continue try: if (r[0] is not None and r[0].optional_field("NH") > 1) or \ (r[1] is not None and r[1].optional_field("NH") > 1): nonunique += 1 continue except KeyError: pass if (r[0] and r[0].aQual < minaqual) or (r[1] and r[1].aQual < minaqual): lowqual += 1 continue try: if overlap_mode == "union": fs = set() for iv in iv_seq: if iv.chrom not in features.chrom_vectors: raise UnknownChrom for iv2, fs2 in features[iv].steps(): fs = fs.union(fs2) elif (overlap_mode == "intersection-strict" or overlap_mode == "intersection-nonempty"): fs = None for iv in iv_seq: if iv.chrom not in features.chrom_vectors: raise UnknownChrom for iv2, fs2 in features[iv].steps(): if (len(fs2) > 0 or overlap_mode == "intersection-strict"): if fs is None: fs = fs2.copy() else: fs = fs.intersection(fs2) else: sys.exit("Illegal overlap mode.") if fs is None or len(fs) == 0: empty += 1 elif len(fs) > 1: ambiguous += 1 else: counts[list(fs)[0]] += 1 except UnknownChrom: if not pe_mode: rr = r else: rr = r[0] if r[0] is not None else r[1] empty += 1 if i % 100000 == 0: sys.stderr.write("%d sam %s processed.\n" % (i, "lines " if not pe_mode else "line pairs")) except: if not pe_mode: sys.stderr.write("Error occured in %s.\n" % read_seq.get_line_number_string()) else: sys.stderr.write("Error occured in %s.\n" % read_seq_pe_file.get_line_number_string()) raise sys.stderr.write("%d sam %s processed.\n" % (i, "lines " if not pe_mode else "line pairs")) with file_transaction(data, out_file) as tmp_out_file: with open(tmp_out_file, "w") as out_handle: on_feature = 0 for fn in sorted(counts.keys()): on_feature += counts[fn] out_handle.write("%s\t%d\n" % (fn, counts[fn])) with file_transaction(data, stats_file) as tmp_stats_file: with open(tmp_stats_file, "w") as out_handle: out_handle.write("on_feature\t%d\n" % on_feature) out_handle.write("no_feature\t%d\n" % empty) out_handle.write("ambiguous\t%d\n" % ambiguous) out_handle.write("too_low_aQual\t%d\n" % lowqual) out_handle.write("not_aligned\t%d\n" % notaligned) out_handle.write("alignment_not_unique\t%d\n" % nonunique) return out_file def combine_count_files(files, out_file=None, ext=".fpkm"): """ combine a set of count files into a single combined file """ assert all([file_exists(x) for x in files]), \ "Some count files in %s do not exist." % files for f in files: assert file_exists(f), "%s does not exist or is empty." % f col_names = [os.path.basename(x.split(ext)[0]) for x in files] if not out_file: out_dir = os.path.join(os.path.dirname(files[0])) out_file = os.path.join(out_dir, "combined.counts") if file_exists(out_file): return out_file df = pd.io.parsers.read_table(f, sep="\t", index_col=0, header=None, names=[col_names[0]]) for i, f in enumerate(files): if i == 0: df = pd.io.parsers.read_table(f, sep="\t", index_col=0, header=None, names=[col_names[0]]) else: df = df.join(pd.io.parsers.read_table(f, sep="\t", index_col=0, header=None, names=[col_names[i]])) df.to_csv(out_file, sep="\t", index_label="id") return out_file def annotate_combined_count_file(count_file, gtf_file, out_file=None): dbfn = gtf_file + ".db" if not file_exists(dbfn): return None if not gffutils: return None db = gffutils.FeatureDB(dbfn, keep_order=True) if not out_file: out_dir = os.path.dirname(count_file) out_file = os.path.join(out_dir, "annotated_combined.counts") # if the genes don't have a gene_id or gene_name set, bail out try: symbol_lookup = {f['gene_id'][0]: f['gene_name'][0] for f in db.features_of_type('exon')} except KeyError: return None df = pd.io.parsers.read_table(count_file, sep="\t", index_col=0, header=0) df['symbol'] = df.apply(lambda x: symbol_lookup.get(x.name, ""), axis=1) df.to_csv(out_file, sep="\t", index_label="id") return out_file def htseq_reader(align_file): """ returns a read-by-read sequence reader for a BAM or SAM file """ if bam.is_sam(align_file): read_seq = HTSeq.SAM_Reader(align_file) elif bam.is_bam(align_file): read_seq = HTSeq.BAM_Reader(align_file) else: logger.error("%s is not a SAM or BAM file" % (align_file)) sys.exit(1) return read_seq
mit
-7,574,525,932,074,615,000
36.006024
108
0.47664
false
3.859881
false
false
false
hotdoc/hotdoc_gi_extension
setup.py
1
1887
# -*- coding: utf-8 -*- # # Copyright © 2015,2016 Mathieu Duponchelle <[email protected]> # Copyright © 2015,2016 Collabora Ltd # # This library is free software; you can redistribute it and/or modify it under # the terms of the GNU Lesser General Public License as published by the Free # Software Foundation; either version 2.1 of the License, or (at your option) # any later version. # # This library is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more # details. # # You should have received a copy of the GNU Lesser General Public License # along with this library. If not, see <http://www.gnu.org/licenses/>. import os from setuptools import setup, find_packages with open(os.path.join('hotdoc_gi_extension', 'VERSION.txt'), 'r') as _: VERSION = _.read().strip() setup( name = "hotdoc_gi_extension", version = VERSION, keywords = "gobject-introspection C hotdoc", url='https://github.com/hotdoc/hotdoc_gi_extension', author_email = '[email protected]', license = 'LGPLv2.1+', description = "An extension for hotdoc that parses gir files", author = "Mathieu Duponchelle", packages = find_packages(), package_data = { '': ['*.html'], 'hotdoc_gi_extension': ['VERSION.txt'], 'hotdoc_gi_extension.transition_scripts': ['translate_sections.sh'], }, scripts=['hotdoc_gi_extension/transition_scripts/hotdoc_gtk_doc_porter', 'hotdoc_gi_extension/transition_scripts/hotdoc_gtk_doc_scan_parser'], entry_points = {'hotdoc.extensions': 'get_extension_classes = hotdoc_gi_extension.gi_extension:get_extension_classes'}, install_requires = [ 'lxml', 'pyyaml', ], )
lgpl-2.1
-3,706,372,489,089,509,000
35.960784
123
0.69443
false
3.58365
false
false
false
pugpe/pugpe
apps/cert/management/commands/send_certificates.py
1
2215
# -*- coding: utf-8 -*- import traceback from datetime import timedelta from django.core import mail from django.core.mail import EmailMultiAlternatives, mail_admins from django.core.management.base import BaseCommand from django.template.loader import render_to_string from django.utils.translation import ugettext_lazy as _ from django.contrib.sites.models import Site from django.conf import settings from django.utils import translation from django.utils import timezone from cert.models import Attendee class Command(BaseCommand): help = u'Send certificate e-mails' def get_email(self, attendee): translation.activate(settings.LANGUAGE_CODE) subject = _(u'Certificado de participação | PUG-PE') from_email = settings.DEFAULT_FROM_EMAIL ctx = { 'site': Site.objects.get_current().domain, 'event': attendee.event, 'attendee': attendee, } text_content = render_to_string('cert/cert_email.txt', ctx) html_content = render_to_string('cert/cert_email.html', ctx) msg = EmailMultiAlternatives( subject, text_content, from_email, [attendee.email], ) msg.attach_alternative(html_content, "text/html") return msg def handle(self, *args, **options): connection = mail.get_connection() num_emails = 0 attendees = Attendee.objects.filter(sent_date__isnull=True) # Evitar envio para eventos muito antigos attendees = attendees.filter( pub_date__gte=timezone.now() - timedelta(days=10), ) for attendee in attendees: msg = self.get_email(attendee) try: num_emails += connection.send_messages([msg]) except Exception as exc: subject = _(u'PUG-PE: Problema envio certificado') body = 'except: '.format(exc) body += traceback.format_exc() mail_admins(subject, body) else: attendee.sent_date = timezone.now() attendee.save() self.stdout.write( unicode(_(u'Foram enviados {0} emails\n'.format(num_emails))), )
mit
7,008,236,078,179,404,000
31.072464
74
0.623588
false
3.980216
false
false
false
kodat/odoo-module-template
odoo_module_template/model.py
1
1936
# -*- coding: utf-8 -*- # Bashir Idirs (Alsuty) # Copyright (C) 2016. # # This Code is free: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from openerp import models,fields class FirstModel(models.Model): _name= 'template.firstmodel' image = fields.Binary('Image') name = fields.Char('Name', required=True) select_field = fields.Selection(string="Type", selection=[('type1', 'Type1'), ('type2', 'Type2'), ('type3', 'Type3'),], required=True) boolean_field = fields.Boolean('Check') integer_field = fields.Integer('Integer Number') float_field = fields.Float('Float Value') many2one_field = fields.Many2one('template.secondmodel', 'Many2one') many2many_ids = fields.Many2many('template.thirdmodel', 'many2many_relation', 'firstmodel_id', 'thirdmodel_id', string='Many2many') ony2many_fields = fields.One2many('template.forthmodel', 'firstmodel_id', string='One2many') class SecondModel(models.Model): _name = 'template.secondmodel' name = fields.Char('Name') class ThirdModel(models.Model): _name = 'template.thirdmodel' name = fields.Char('Name') class ForthModel(models.Model): _name = 'template.forthmodel' name = fields.Char('Name') firstmodel_id= fields.Many2one('template.firstmodel')
gpl-3.0
7,697,741,544,575,525,000
30.813559
77
0.681818
false
3.355286
false
false
false
sixfeetup/cloud-custodian
c7n/resources/waf.py
1
1475
# Copyright 2016-2017 Capital One Services, LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import absolute_import, division, print_function, unicode_literals from c7n.manager import resources from c7n.query import QueryResourceManager @resources.register('waf') class WAF(QueryResourceManager): class resource_type(object): service = "waf" enum_spec = ("list_web_acls", "WebACLs", None) detail_spec = ("get_web_acl", "WebACLId", "WebACLId", "WebACL") name = "Name" id = "WebACLId" dimension = "WebACL" filter_name = None @resources.register('waf-regional') class RegionalWAF(QueryResourceManager): class resource_type(object): service = "waf-regional" enum_spec = ("list_web_acls", "WebACLs", None) detail_spec = ("get_web_acl", "WebACLId", "WebACLId", "WebACL") name = "Name" id = "WebACLId" dimension = "WebACL" filter_name = None
apache-2.0
-2,341,126,569,578,517,500
33.302326
82
0.684068
false
3.70603
false
false
false
tuomas2/serviceform
serviceform/serviceform/models/participation.py
1
4015
# -*- coding: utf-8 -*- # (c) 2017 Tuomas Airaksinen # # This file is part of Serviceform. # # Serviceform is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Serviceform is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Serviceform. If not, see <http://www.gnu.org/licenses/>. from typing import Sequence, TYPE_CHECKING from django.contrib.contenttypes.fields import GenericForeignKey from django.contrib.contenttypes.models import ContentType from django.db import models from django.utils.functional import cached_property from .. import utils if TYPE_CHECKING: from .people import Participant class ParticipantLog(models.Model): created_at = models.DateTimeField(auto_now_add=True) participant = models.ForeignKey('serviceform.Participant', on_delete=models.CASCADE) writer_type = models.ForeignKey(ContentType, on_delete=models.CASCADE) writer_id = models.PositiveIntegerField() # Can be either responsible or django user written_by = GenericForeignKey('writer_type', 'writer_id') message = models.TextField() class ParticipationActivity(models.Model): class Meta: unique_together = (('participant', 'activity'),) ordering = ( 'activity__category__category__order', 'activity__category__order', 'activity__order',) participant = models.ForeignKey('serviceform.Participant', on_delete=models.CASCADE) activity = models.ForeignKey('serviceform.Activity', on_delete=models.CASCADE) additional_info = models.CharField(max_length=1024, blank=True, null=True) created_at = models.DateTimeField(auto_now_add=True, null=True) @cached_property def cached_participant(self) -> 'Participant': return utils.get_participant(self.participant_id) def __str__(self): return '%s for %s' % (self.activity, self.participant) @property def choices(self) -> 'Sequence[ParticipationActivityChoice]': return self.choices_set.select_related('activity_choice') @property def additional_info_display(self) -> str: return self.additional_info or '-' class ParticipationActivityChoice(models.Model): class Meta: unique_together = (('activity', 'activity_choice'),) ordering = ('activity_choice__order',) activity = models.ForeignKey(ParticipationActivity, related_name='choices_set', on_delete=models.CASCADE) activity_choice = models.ForeignKey('serviceform.ActivityChoice', on_delete=models.CASCADE) additional_info = models.CharField(max_length=1024, blank=True, null=True) created_at = models.DateTimeField(auto_now_add=True, null=True) @cached_property def cached_participant(self) -> 'Participant': return utils.get_participant(self.activity.participant_id) def __str__(self): return '%s for %s' % (self.activity_choice, self.activity.participant) @property def additional_info_display(self) -> str: return self.additional_info or '-' class QuestionAnswer(models.Model): participant = models.ForeignKey('serviceform.Participant', on_delete=models.CASCADE) question = models.ForeignKey('serviceform.Question', on_delete=models.CASCADE) answer = models.TextField() created_at = models.DateTimeField(auto_now_add=True, null=True) class Meta: ordering = ('question__order',) @cached_property def cached_participant(self) -> 'Participant': return utils.get_participant(self.participant_id) def __str__(self): return '%s: %s' % (self.question.question, self.answer)
gpl-3.0
4,722,200,843,626,066,000
37.615385
95
0.714072
false
3.98709
false
false
false
thermokarst/qiime2
qiime2/core/type/tests/test_parse.py
1
4541
# ---------------------------------------------------------------------------- # Copyright (c) 2016-2020, QIIME 2 development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file LICENSE, distributed with this software. # ---------------------------------------------------------------------------- import unittest from qiime2.core.type.parse import ast_to_type, string_to_ast from qiime2.core.testing.type import Foo, Bar, C1, C2 from qiime2.plugin import (Int, Float, Str, Bool, Range, Choices, TypeMap, Properties, List, Set, Visualization, Metadata, MetadataColumn, Categorical, Numeric) class TestParsing(unittest.TestCase): def assert_roundtrip(self, type): ast = string_to_ast(repr(type)) type1 = ast_to_type(ast) type2 = ast_to_type(type1.to_ast()) self.assertEqual(type, type1) self.assertEqual(ast, type1.to_ast()) self.assertEqual(type1, type2) def test_simple_semantic_type(self): self.assert_roundtrip(Foo) self.assert_roundtrip(Bar) self.assert_roundtrip(C1[Foo]) def test_union_semantic_type(self): self.assert_roundtrip(Foo | Bar) self.assert_roundtrip(C1[Foo | Bar]) def test_complicated_semantic_type(self): self.assert_roundtrip(C2[C1[Foo % Properties(["A", "B"]) | Bar], Foo % Properties("A") ] % Properties(exclude=["B", "C"])) def test_collection_semantic_type(self): self.assert_roundtrip(List[Foo | Bar]) self.assert_roundtrip(Set[Bar]) def test_visualization(self): self.assert_roundtrip(Visualization) def test_primitive_simple(self): self.assert_roundtrip(Int) self.assert_roundtrip(Float) self.assert_roundtrip(Str) self.assert_roundtrip(Bool) def test_primitive_predicate(self): self.assert_roundtrip(Int % Range(0, 10)) self.assert_roundtrip( Int % (Range(0, 10) | Range(50, 100, inclusive_end=True))) self.assert_roundtrip(Float % Range(None, 10)) self.assert_roundtrip(Float % Range(0, None)) self.assert_roundtrip(Str % Choices("A")) self.assert_roundtrip(Str % Choices(["A"])) self.assert_roundtrip(Str % Choices("A", "B")) self.assert_roundtrip(Str % Choices(["A", "B"])) self.assert_roundtrip(Bool % Choices(True)) self.assert_roundtrip(Bool % Choices(False)) def test_collection_primitive(self): self.assert_roundtrip(Set[Str % Choices('A', 'B', 'C')]) self.assert_roundtrip(List[Int % Range(1, 3, inclusive_end=True) | Str % Choices('A', 'B', 'C')]) def test_metadata_primitive(self): self.assert_roundtrip(Metadata) self.assert_roundtrip(MetadataColumn[Numeric]) self.assert_roundtrip(MetadataColumn[Categorical]) self.assert_roundtrip(MetadataColumn[Numeric | Categorical]) def test_typevars(self): T, U, V, W, X = TypeMap({ (Foo, Bar, Str % Choices('A', 'B')): (C1[Foo], C1[Bar]), (Foo | Bar, Foo, Str): (C1[Bar], C1[Foo]) }) scope = {} T1 = ast_to_type(T.to_ast(), scope=scope) U1 = ast_to_type(U.to_ast(), scope=scope) V1 = ast_to_type(V.to_ast(), scope=scope) W1 = ast_to_type(W.to_ast(), scope=scope) X1 = ast_to_type(X.to_ast(), scope=scope) self.assertEqual(len(scope), 1) self.assertEqual(scope[id(T.mapping)], [T1, U1, V1, W1, X1]) self.assertEqual(T1.mapping.lifted, T.mapping.lifted) self.assertIs(T1.mapping, U1.mapping) self.assertIs(U1.mapping, V1.mapping) self.assertIs(V1.mapping, W1.mapping) self.assertIs(W1.mapping, X1.mapping) def test_syntax_error(self): with self.assertRaisesRegex(ValueError, "could not be parsed"): string_to_ast('$') def test_bad_juju(self): with self.assertRaisesRegex(ValueError, "one type expression"): string_to_ast('import os; os.rmdir("something-important")') def test_more_bad(self): with self.assertRaisesRegex(ValueError, "Unknown expression"): string_to_ast('lambda x: x') def test_weird(self): with self.assertRaisesRegex(ValueError, "Unknown literal"): string_to_ast('FeatureTable(Foo + Bar)') if __name__ == '__main__': unittest.main()
bsd-3-clause
2,467,362,676,587,686,400
36.841667
78
0.586214
false
3.644462
true
false
false
owwlo/Courier
src/courier/app/CourierService.py
1
5234
''' Created on Jan 17, 2015 @author: owwlo ''' from PyQt5 import QtGui, QtCore, QtQml, QtQuick from PyQt5.QtCore import QObject, QUrl, Qt, QVariant, QMetaObject, Q_ARG import threading import websocket import json import logging from time import sleep import coloredlogs WS_URL = "ws://localhost:8888/computer" RECONNECT_INTERVAL = 5 logger = logging.getLogger("CourierApp") coloredlogs.install(level = logging.DEBUG, show_hostname = False, show_timestamps = False) class CourierService(threading.Thread, QObject): class WebSocketHandler(): def __init__(self, service): self.__service = service def onMessage(self, ws, message): self.__service.onMessage(message) def onError(self, ws, error): logger.debug("onError " + str(error)) def onClose(self, ws): logger.debug("onCLose") self.__service.ws = None def onOpen(self, ws): logger.debug("onOpen") self.__service.ws = ws self.__service.token = None fetchThread = threading.Thread(target=self.__service.fetchToken) fetchThread.start() # fetchThread.join() onTokenFetched = QtCore.pyqtSignal([str]) onNewMessage = QtCore.pyqtSignal([dict]) def __init__(self, app): threading.Thread.__init__(self) QObject.__init__(self, app) self.__app = app self.handler = self.WebSocketHandler(self) self.token = None # Initialize callback lists for self.__callbacksOnNewMessageFromDevice = [] self.__callbacksOnTokenFetched = [] self.__callbacksOnDeviceConnected = [] def run(self): while(True): ws = websocket.WebSocketApp(WS_URL, on_message=self.handler.onMessage, on_error=self.handler.onError, on_close=self.handler.onClose, on_open=self.handler.onOpen) ws.run_forever() logger.error("Lost connection, will try again in %d seconds." % RECONNECT_INTERVAL) sleep(RECONNECT_INTERVAL) def fetchToken(self): MAX_RETRY_CNT = 5 cnt = MAX_RETRY_CNT while cnt > 0 and self.token == None: if cnt != MAX_RETRY_CNT: logger.warn( "Connect failed, reconnecting... trying count remains: %d" % cnt) self.sendHash(self.getTokenRequestPackage()) sleep(5) cnt -= 1 if self.token == None: logger.error("Cannot connect to server") # else: # self.on def getTokenRequestPackage(self): return {"type": "operation", "command": "request_token"} def getReplyRequestPackage(self, cId, replyText): return {"type": "reply", "cId": str(cId), "content": replyText} def sendReply(self, cId, replyText): pkg = self.getReplyRequestPackage(cId, replyText) self.sendHash(pkg) def parseMessage(self, message): parsed = None try: parsed = json.loads(message) except Exception as e: logger.warn(str(e)) return None return parsed def sendHash(self, h): if self.token: h["token"] = self.token j = json.dumps(h) self.send(j) def send(self, message): if self.ws != None: self.ws.send(message) else: logger.error("Socket Failed.") def onMessage(self, message): logger.debug("Raw Message from Server: " + message) msg = self.parseMessage(message) if msg == None: return mtype = msg["type"] if mtype == "new_msg": self.onNewMessageFromDevice(msg) elif mtype == "token_response": self.onTokenResponse(msg) elif mtype == "info_paired": self.onDeviceConnected(msg) def onTokenResponse(self, message): logger.debug("Get token from server: " + message["token"]) self.token = message["token"] for fn in self.__callbacksOnTokenFetched: fn(self.token) self.onTokenFetched.emit(self.token) def onNewMessageFromDevice(self, message): for fn in self.__callbacksOnNewMessageFromDevice: fn(message) self.onNewMessage.emit(message) def onDeviceConnected(self, message): for fn in self.__callbacksOnDeviceConnected: fn(message) def addOnNewMessageFromDevice(self, callback): self.__callbacksOnNewMessageFromDevice.append(callback) def removeOnNewMessageFromDevice(self, callback): self.__callbacksOnNewMessageFromDevice.remove(callback) def addOnTokenFetched(self, callback): self.__callbacksOnTokenFetched.append(callback) def removeOnTokenFetched(self, callback): self.__callbacksOnTokenFetched.remove(callback) def addOnDeviceConnected(self, callback): self.__callbacksOnDeviceConnected.append(callback) def removeOnDeviceConnected(self, callback): self.__callbacksOnDeviceConnected.remove(callback)
mit
-3,827,367,293,656,535,000
30.341317
95
0.597058
false
4.137549
false
false
false
carlthome/librosa
librosa/feature/utils.py
1
8078
#!/usr/bin/env python # -*- coding: utf-8 -*- """Feature manipulation utilities""" from warnings import warn import numpy as np import scipy.signal from .._cache import cache from ..util.exceptions import ParameterError __all__ = ['delta', 'stack_memory'] @cache(level=40) def delta(data, width=9, order=1, axis=-1, mode='interp', **kwargs): r'''Compute delta features: local estimate of the derivative of the input data along the selected axis. Delta features are computed Savitsky-Golay filtering. Parameters ---------- data : np.ndarray the input data matrix (eg, spectrogram) width : int, positive, odd [scalar] Number of frames over which to compute the delta features. Cannot exceed the length of `data` along the specified axis. If `mode='interp'`, then `width` must be at least `data.shape[axis]`. order : int > 0 [scalar] the order of the difference operator. 1 for first derivative, 2 for second, etc. axis : int [scalar] the axis along which to compute deltas. Default is -1 (columns). mode : str, {'interp', 'nearest', 'mirror', 'constant', 'wrap'} Padding mode for estimating differences at the boundaries. kwargs : additional keyword arguments See `scipy.signal.savgol_filter` Returns ------- delta_data : np.ndarray [shape=(d, t)] delta matrix of `data` at specified order Notes ----- This function caches at level 40. See Also -------- scipy.signal.savgol_filter Examples -------- Compute MFCC deltas, delta-deltas >>> y, sr = librosa.load(librosa.util.example_audio_file()) >>> mfcc = librosa.feature.mfcc(y=y, sr=sr) >>> mfcc_delta = librosa.feature.delta(mfcc) >>> mfcc_delta array([[ 1.666e+01, 1.666e+01, ..., 1.869e-15, 1.869e-15], [ 1.784e+01, 1.784e+01, ..., 6.085e-31, 6.085e-31], ..., [ 7.262e-01, 7.262e-01, ..., 9.259e-31, 9.259e-31], [ 6.578e-01, 6.578e-01, ..., 7.597e-31, 7.597e-31]]) >>> mfcc_delta2 = librosa.feature.delta(mfcc, order=2) >>> mfcc_delta2 array([[ -1.703e+01, -1.703e+01, ..., 3.834e-14, 3.834e-14], [ -1.108e+01, -1.108e+01, ..., -1.068e-30, -1.068e-30], ..., [ 4.075e-01, 4.075e-01, ..., -1.565e-30, -1.565e-30], [ 1.676e-01, 1.676e-01, ..., -2.104e-30, -2.104e-30]]) >>> import matplotlib.pyplot as plt >>> plt.subplot(3, 1, 1) >>> librosa.display.specshow(mfcc) >>> plt.title('MFCC') >>> plt.colorbar() >>> plt.subplot(3, 1, 2) >>> librosa.display.specshow(mfcc_delta) >>> plt.title(r'MFCC-$\Delta$') >>> plt.colorbar() >>> plt.subplot(3, 1, 3) >>> librosa.display.specshow(mfcc_delta2, x_axis='time') >>> plt.title(r'MFCC-$\Delta^2$') >>> plt.colorbar() >>> plt.tight_layout() >>> plt.show() ''' data = np.atleast_1d(data) if mode == 'interp' and width > data.shape[axis]: raise ParameterError("when mode='interp', width={} " "cannot exceed data.shape[axis]={}".format(width, data.shape[axis])) if width < 3 or np.mod(width, 2) != 1: raise ParameterError('width must be an odd integer >= 3') if order <= 0 or not isinstance(order, int): raise ParameterError('order must be a positive integer') kwargs.pop('deriv', None) kwargs.setdefault('polyorder', order) return scipy.signal.savgol_filter(data, width, deriv=order, axis=axis, mode=mode, **kwargs) @cache(level=40) def stack_memory(data, n_steps=2, delay=1, **kwargs): """Short-term history embedding: vertically concatenate a data vector or matrix with delayed copies of itself. Each column `data[:, i]` is mapped to:: data[:, i] -> [data[:, i], data[:, i - delay], ... data[:, i - (n_steps-1)*delay]] For columns `i < (n_steps - 1) * delay` , the data will be padded. By default, the data is padded with zeros, but this behavior can be overridden by supplying additional keyword arguments which are passed to `np.pad()`. Parameters ---------- data : np.ndarray [shape=(t,) or (d, t)] Input data matrix. If `data` is a vector (`data.ndim == 1`), it will be interpreted as a row matrix and reshaped to `(1, t)`. n_steps : int > 0 [scalar] embedding dimension, the number of steps back in time to stack delay : int != 0 [scalar] the number of columns to step. Positive values embed from the past (previous columns). Negative values embed from the future (subsequent columns). kwargs : additional keyword arguments Additional arguments to pass to `np.pad`. Returns ------- data_history : np.ndarray [shape=(m * d, t)] data augmented with lagged copies of itself, where `m == n_steps - 1`. Notes ----- This function caches at level 40. Examples -------- Keep two steps (current and previous) >>> data = np.arange(-3, 3) >>> librosa.feature.stack_memory(data) array([[-3, -2, -1, 0, 1, 2], [ 0, -3, -2, -1, 0, 1]]) Or three steps >>> librosa.feature.stack_memory(data, n_steps=3) array([[-3, -2, -1, 0, 1, 2], [ 0, -3, -2, -1, 0, 1], [ 0, 0, -3, -2, -1, 0]]) Use reflection padding instead of zero-padding >>> librosa.feature.stack_memory(data, n_steps=3, mode='reflect') array([[-3, -2, -1, 0, 1, 2], [-2, -3, -2, -1, 0, 1], [-1, -2, -3, -2, -1, 0]]) Or pad with edge-values, and delay by 2 >>> librosa.feature.stack_memory(data, n_steps=3, delay=2, mode='edge') array([[-3, -2, -1, 0, 1, 2], [-3, -3, -3, -2, -1, 0], [-3, -3, -3, -3, -3, -2]]) Stack time-lagged beat-synchronous chroma edge padding >>> y, sr = librosa.load(librosa.util.example_audio_file()) >>> chroma = librosa.feature.chroma_stft(y=y, sr=sr) >>> tempo, beats = librosa.beat.beat_track(y=y, sr=sr, hop_length=512) >>> beats = librosa.util.fix_frames(beats, x_min=0, x_max=chroma.shape[1]) >>> chroma_sync = librosa.util.sync(chroma, beats) >>> chroma_lag = librosa.feature.stack_memory(chroma_sync, n_steps=3, ... mode='edge') Plot the result >>> import matplotlib.pyplot as plt >>> beat_times = librosa.frames_to_time(beats, sr=sr, hop_length=512) >>> librosa.display.specshow(chroma_lag, y_axis='chroma', x_axis='time', ... x_coords=beat_times) >>> plt.yticks([0, 12, 24], ['Lag=0', 'Lag=1', 'Lag=2']) >>> plt.title('Time-lagged chroma') >>> plt.colorbar() >>> plt.tight_layout() >>> plt.show() """ if n_steps < 1: raise ParameterError('n_steps must be a positive integer') if delay == 0: raise ParameterError('delay must be a non-zero integer') data = np.atleast_2d(data) t = data.shape[1] kwargs.setdefault('mode', 'constant') if kwargs['mode'] == 'constant': kwargs.setdefault('constant_values', [0]) # Pad the end with zeros, which will roll to the front below if delay > 0: padding = (int((n_steps - 1) * delay), 0) else: padding = (0, int((n_steps - 1) * -delay)) data = np.pad(data, [(0, 0), padding], **kwargs) history = data # TODO: this could be more efficient for i in range(1, n_steps): history = np.vstack([np.roll(data, -i * delay, axis=1), history]) # Trim to original width if delay > 0: history = history[:, :t] else: history = history[:, -t:] # Make contiguous return np.asfortranarray(history)
isc
-6,802,198,681,692,537,000
30.678431
97
0.551498
false
3.286412
false
false
false
chewxy/cu
cmd/gencudnn/parse.py
1
3705
from bs4 import BeautifulSoup import requests import re import sys import os inputs ={} outputs = {} ios = {} docs = {} def get(): if os.path.isfile("cache/docs.html"): with open("cache/docs.html", 'r') as f: print("Using cache", file=sys.stderr) return f.read() r = requests.get("http://docs.nvidia.com/deeplearning/sdk/cudnn-developer-guide/index.html") with open("cache/docs.html", 'w') as f: f.write(r.text) return r.text def main(): txt = get() soup = BeautifulSoup(txt, "html5lib") contents = soup.find_all(id="api-introduction") topics = contents[0].find_all(class_="topic concept nested1") for topic in topics: rawFnName = topic.find_all(class_='title topictitle2')[0].text try: fnName = re.search('cudnn.+$', rawFnName).group(0) except AttributeError as e: print("rawFnName: {}".format(rawFnName), file=sys.stderr) continue try: paramsDL = topic.find_all(class_='dl')[0] # first definition list is params except IndexError: print("rawFnName: {} - topic has no dl class".format(fnName), file=sys.stderr) continue # check previous if paramsDL.previous_sibling.previous_sibling.text != "Parameters": print("rawFnName: {} has no params::: {}".format(fnName, paramsDL.previous_sibling), file=sys.stderr) continue params = paramsDL.find_all(class_='dt dlterm') # name paramsDesc = paramsDL.find_all(class_='dd') # use type paramUse = [] for d in paramsDesc: try: use = d.find_all(class_='ph i')[0].text except IndexError as e: use = "Input" paramUse.append(use) if len(params) != len(paramUse): print("rawFnName: {} - differing params and use cases".format(fnName), file=sys.stderr) continue inputParams = [p.text.strip() for i, p in enumerate(params) if (paramUse[i].strip()=='Input') or (paramUse[i].strip()=="Inputs")] outputParams = [p.text.strip() for i, p in enumerate(params) if (paramUse[i].strip()=='Output') or (paramUse[i].strip()=="Outputs")] ioParams = [p.text.strip() for i, p in enumerate(params) if paramUse[i].strip()=='Input/Output'] inputs[fnName] = inputParams outputs[fnName] = outputParams ios[fnName] = ioParams # extract docs try: docbody = topic.find_all(class_='body conbody')[0] except IndexError: print("fnName: {} - no body".format(fnName), file=sys.stderr) continue # clear is better than clever. doc = docbody.find_all("p")[0].text doc = doc.replace("\n", "") doc = re.sub("\t+", " ", doc) doc = re.sub("\s+", " ", doc) doc = doc.replace('"', '`') doc = doc.replace("This function", fnName) doc = doc.replace("This routine", fnName) doc = doc.replace("This", fnName) doc = doc.strip() docs[fnName] = doc # write the go file print("package main") print("var inputParams = map[string][]string{") for k, v in inputs.items(): if len(v) == 0: continue print('"{}": {{ '.format(k), end="") for inp in v : split = inp.split(",") for s in split: print('"{}", '.format(s.strip()), end="") print("},") print("}") print("var outputParams = map[string][]string{") for k, v in outputs.items(): if len(v) == 0: continue print('"{}": {{ '.format(k), end="") for inp in v : split = inp.split(",") for s in split: print('"{}", '.format(s.strip()), end="") print("},") print("}") print("var ioParams = map[string][]string{") for k, v in ios.items(): if len(v) == 0: continue print('"{}": {{ '.format(k), end="") for inp in v : split = inp.split(",") for s in split: print('"{}", '.format(s.strip()), end="") print("},") print("}") print("var docs = map[string]string{") for k, v in docs.items(): print('"{}": "{}",'.format(k, v.strip())) print("}") main()
mit
-7,812,344,624,569,286,000
28.412698
134
0.623212
false
2.899061
false
false
false
MortalViews/python-notes
inheritance.py
1
1192
import random class Person: def __init__(self,name,age,location): self.name = name self.age = age self.locaiton = location def is_sick(self): return random.randint(1,10)%2==0 class AttendenceMixin: def swip_in(self): pass def swip_out(self): pass class Employee(Person): def __init__(self,emp_id,joining_date,*args,**kwargs): self.emp_id =emp_id self.joining_date =joining_date super().__init__(*args,**kwargs) class Contractor(Employee): pass class InfraEmployee(Employee,AttendenceMixin): def __init__(self,dept,*args,**kwargs): self.dept = dept super().__init__(*args,**kwargs) class ITEmployee(Employee,AttendenceMixin): def __init__(self,project,technologies,system_id,*args,**kwargs): self.project =project self.tech = technologies self.system = system_id super().__init__(*args,**kwargs) def is_sick(self): return random.randint(1,10)%2==1 class Manager(Employee): def __init__(self,cabin_no,*args,**kwargs): self.cabin=cabin_no super().__init__(*args,**kwargs)
apache-2.0
4,436,098,363,554,478,600
26.090909
69
0.589765
false
3.547619
false
false
false
lutris/website
games/notifier.py
1
1688
"""Send a digest of unpublished content to moderators""" from django.conf import settings from accounts.models import User from games import models from emails.messages import send_email DEFAULT_COUNT = 12 def get_unpublished_installers(count=DEFAULT_COUNT): """Return a random list of unpublished installers""" return models.Installer.objects.filter(published=False).order_by('?')[:count] def get_unpublished_screenshots(count=DEFAULT_COUNT): """Return a random list of unpublished screenshots""" return models.Screenshot.objects.filter(published=False).order_by('?')[:count] def get_unreviewed_game_submissions(count=DEFAULT_COUNT): """Return a random list of unreviewed game submissions""" return models.GameSubmission.objects.filter( accepted_at__isnull=True ).order_by('?')[:count] def get_installer_issues(count=DEFAULT_COUNT): """Return a random list of installer issues""" return models.InstallerIssue.objects.all().order_by('?')[:count] def get_mod_mail_content(): """Get the payload to be included in the digest""" return { 'installers': get_unpublished_installers(), 'screenshots': get_unpublished_screenshots(), 'submissions': get_unreviewed_game_submissions(), 'issues': get_installer_issues() } def send_daily_mod_mail(): """Send the email to moderators""" context = get_mod_mail_content() if settings.DEBUG: moderators = [u[1] for u in settings.MANAGERS] else: moderators = [u.email for u in User.objects.filter(is_staff=True)] subject = 'Your daily moderator mail' return send_email('daily_mod_mail', context, subject, moderators)
agpl-3.0
-6,707,878,073,285,417,000
32.76
82
0.702607
false
3.751111
false
false
false
cloud-ark/cloudark
server/common/fm_logger.py
1
2400
import inspect import logging from server.common import constants class Logging(object): def __init__(self): logging.basicConfig(filename=constants.LOG_FILE_NAME, level=logging.DEBUG, filemode='a', format='%(asctime)s %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p') self.logger = logging.getLogger("CloudARK") # http://stackoverflow.com/questions/10973362/python-logging-function-name-file-name-line-number-using-a-single-file def info(self, message): # Get the previous frame in the stack, otherwise it would # be this function!!! try: func = inspect.currentframe().f_back.f_code # Dump the message + the name of this function to the log. self.logger.info("<%s>: %s() %s:%i" % ( message, func.co_name, func.co_filename, func.co_firstlineno )) except IOError as e: if e.errno == 28: print("-- Disk full -- (most likely this also won't get printed.") def debug(self, message): # Get the previous frame in the stack, otherwise it would # be this function!!! try: func = inspect.currentframe().f_back.f_code # Dump the message + the name of this function to the log. self.logger.debug("<%s>: %s() %s:%i" % ( message, func.co_name, func.co_filename, func.co_firstlineno )) except IOError as e: if e.errno == 28: print("-- Disk full -- (most likely this also won't get printed.") def error(self, message): # Get the previous frame in the stack, otherwise it would # be this function!!! try: func = inspect.currentframe().f_back.f_code # Dump the message + the name of this function to the log. self.logger.error("<%s>: %s() %s:%i" % ( message, func.co_name, func.co_filename, func.co_firstlineno )) self.logger.error(message, exc_info=1) except IOError as e: if e.errno == 28: print("-- Disk full -- (most likely this also won't get printed.")
apache-2.0
-4,898,865,385,617,483,000
34.294118
120
0.515417
false
4.339964
false
false
false
azvoleff/chitwanabm
chitwanabm/modelloop.py
1
18907
# Copyright 2008-2013 Alex Zvoleff # # This file is part of the chitwanabm agent-based model. # # chitwanabm is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # chitwanabm is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # chitwanabm. If not, see <http://www.gnu.org/licenses/>. # # See the README.rst file for author contact information. """ Contains main model loop: Contains the main loop for the model. Takes input parameters read from runmodel.py, and passes results of model run back. """ from __future__ import division import os import time import copy import logging import numpy as np from pyabm.file_io import write_NBH_shapefile from pyabm.utility import TimeSteps from chitwanabm import rc_params from chitwanabm import test logger = logging.getLogger(__name__) rcParams = rc_params.get_params() timebounds = rcParams['model.timebounds'] timestep = rcParams['model.timestep'] model_time = TimeSteps(timebounds, timestep) def main_loop(world, results_path): """This function contains the main model loop. Passed to it is a list of regions, which contains the person, household, and neighborhood agents to be used in the model, and the land-use parameters.""" if rcParams['run_validation_checks']: if not test.validate_person_attributes(world): logger.critical("Person attributes validation failed") if not test.validate_household_attributes(world): logger.critical("Household attributes validation failed") if not test.validate_neighborhood_attributes(world): logger.critical("Neighborhood attributes validation failed") time_strings = {} # Store the date values (as timestep number (0), float and date string) # for time zero (T0) so that the initial values of the model (which are for # time zero, the zeroth timestep) can be used in plotting model results. time_strings['timestep'] = [0] time_strings['time_float'] = [model_time.get_T0_date_float()] time_strings['time_date'] = [model_time.get_T0_date_string()] # Keep annual totals to print while the model is running annual_num_marr = 0 annual_num_divo = 0 annual_num_births = 0 annual_num_deaths = 0 annual_num_out_migr_LL_indiv = 0 annual_num_ret_migr_LL_indiv = 0 annual_num_out_migr_LD_indiv = 0 annual_num_ret_migr_LD_indiv = 0 annual_num_in_migr_HH = 0 annual_num_out_migr_HH = 0 # Save the starting time of the model to use in printing elapsed time while # it runs. modelrun_starttime = time.time() def write_results_CSV(world, results_path, timestep): """ Function to periodically save model results to CSV (if this option is selected in the rc file). """ if rcParams['save_psn_data']: world.write_persons_to_csv(timestep, results_path) if rcParams['save_NBH_data']: world.write_NBHs_to_csv(timestep, results_path) if rcParams['save_LULC_shapefiles']: NBH_shapefile = os.path.join(results_path, "NBHs_time_%s.shp"%timestep) neighborhoods = [] regions = world.get_regions() for region in regions: neighborhoods.extend(region.get_agents()) file_io.write_NBH_shapefile(neighborhoods, NBH_shapefile) # TODO: The below is still a work in progress # def write_results_netcdf(world, results_path, timestep): # if rcParams['save_psn_data_netcdf']: # world.write_persons_to_netcdf(timestep, results_path) # Write the results for timestep 0 write_results_CSV(world, results_path, 0) # saved_data will store event, population, and fuelwood usage data keyed by # timestep:variable:nbh. saved_data = {} # Save the initialization data for timestep 0 (note that all the event # variables, like new_births, new_deaths, etc., need to be set to None # in each neighborhood, for each variable, as they are unknown for timestep # 0 (since the model has not yet begun). Need to construct an empty_events # dictionary to initialize these events for timestep 0. # TODO: Fix this to work for multiple regions. region = world.get_regions()[0] empty_events = {} EVIs = {} for neighborhood in region.iter_agents(): empty_events[neighborhood.get_ID()] = np.NaN EVIs[neighborhood.get_ID()] = neighborhood._EVI saved_data[0] = {} saved_data[0]['EVI'] = EVIs saved_data[0]['births'] = empty_events saved_data[0]['deaths'] = empty_events saved_data[0]['marr'] = empty_events saved_data[0]['divo'] = empty_events saved_data[0]['out_migr_LL_indiv'] = empty_events saved_data[0]['ret_migr_LL_indiv'] = empty_events saved_data[0]['out_migr_LD_indiv'] = empty_events saved_data[0]['ret_migr_LD_indiv'] = empty_events saved_data[0]['in_migr_HH'] = empty_events saved_data[0]['out_migr_HH'] = empty_events saved_data[0].update(region.get_neighborhood_pop_stats()) saved_data[0].update(region.get_neighborhood_fw_usage(model_time.get_T0_date_float())) ########################################################################### # Define the result arrays - there will be three arrays stored in a # dictionary: # 1) timesteps stores the output # 2) nbh stores neighborhood level output # 3) psn stores person level output results_new_format = {} timesteps_dtype = [('timestep', 'i2'), ('year', 'i2'), ('month', 'i2'), ('date_float', 'f4')] results_new_format['timesteps'] = np.zeros((model_time.get_total_num_timesteps()), dtype=timesteps_dtype) #TODO: Finish this nbh_dtype = [('EVI', 'f4'), ('births', 'i2'), ('deaths', 'i2'), ('marr', 'i2'), ('divo', 'i2'), ('out_migr_LL_indiv', 'i2'), ('ret_migr_LL_indiv', 'i2'), ('out_migr_LD_indiv', 'i2'), ('ret_migr_LD_indiv', 'i2'), ('in_migr_HH', 'i2'), ('out_migr_HH', 'i2'), ('num_psn', 'i4'), ('num_hs', 'i2'), ('num_marr', 'i2')] results_new_format['nbh'] = np.zeros((region.num_members() * model_time.get_total_num_timesteps()), dtype=nbh_dtype) #TODO: Finish this psn_dtype = [('births', 'i2'), ('deaths', 'i2'), ('marr', 'i2'), ('divo', 'i2'), ('out_migr_LL_indiv', 'i2'), ('ret_migr_LL_indiv', 'i2'), ('out_migr_LD_indiv', 'i2'), ('ret_migr_LD_indiv', 'i2'), ('in_migr_HH', 'i2'), ('out_migr_HH', 'i2'), ('num_psn', 'i4'), ('num_hs', 'i2'), ('num_marr', 'i2')] results_new_format['psn'] = np.zeros((model_time.get_total_num_timesteps()), dtype=psn_dtype) # Make a dictionary to store empty (zero) event data for submodels if they # are turned off by the user. zero_events = {} for neighborhood in region.iter_agents(): zero_events[neighborhood.get_ID()] = 0 # "Burn in" by running the model for three years in simulated mode, where # age isn't incremented, but migrations occur. This allows starting the # model with realistic migration histories, avoiding a huge loss of # population to migration in the first month of the model. logger.info('Burning in events for region %s'%region.get_ID()) for neg_timestep in xrange(-rcParams['model.burnin_timesteps'], 0): for region in world.iter_regions(): if rcParams['submodels.migration_LL_individual']: new_out_migr_LL_indiv, new_ret_migr_LL_indiv = region.individual_LL_migrations(model_time.get_T_minus_date_float(neg_timestep), neg_timestep, BURN_IN=True) else: new_out_migr_LL_indiv, new_ret_migr_LL_indiv = zero_events, zero_events if rcParams['submodels.migration_LD_individual']: new_out_migr_LD_indiv, new_ret_migr_LD_indiv = region.individual_LD_migrations(model_time.get_T_minus_date_float(neg_timestep), neg_timestep, BURN_IN=True) else: new_out_migr_LD_indiv, new_ret_migr_LD_indiv = zero_events, zero_events if rcParams['submodels.fertility']: new_births = region.births(model_time.get_cur_date_float(), model_time.get_cur_int_timestep(), simulate=True) else: new_births = zero_events num_new_births = sum(new_births.values()) num_new_out_migr_LL_indiv = sum(new_out_migr_LL_indiv.values()) num_new_ret_migr_LL_indiv = sum(new_ret_migr_LL_indiv.values()) num_new_out_migr_LD_indiv = sum(new_out_migr_LD_indiv.values()) num_new_ret_migr_LD_indiv = sum(new_ret_migr_LD_indiv.values()) logger.info("Burn in %3s: P: %5s NOLL: %3s NRLL: %3s NOLD: %3s NRLD: %3s NB: %3s"%(neg_timestep, region.num_persons(), num_new_out_migr_LL_indiv, num_new_ret_migr_LL_indiv, num_new_out_migr_LD_indiv, num_new_ret_migr_LD_indiv, num_new_births)) while model_time.in_bounds(): timestep = model_time.get_cur_int_timestep() results_new_format['timesteps'][timestep - 1] = (timestep, model_time.get_cur_year(), model_time.get_cur_month(), model_time.get_cur_date_float()) logger.debug('beginning timestep %s (%s)'%(model_time.get_cur_int_timestep(), model_time.get_cur_date_string())) if model_time.get_cur_month() == 1: annual_num_births = 0 annual_num_deaths = 0 annual_num_marr = 0 annual_num_divo = 0 annual_num_out_migr_LL_indiv = 0 annual_num_ret_migr_LL_indiv = 0 annual_num_out_migr_LD_indiv = 0 annual_num_ret_migr_LD_indiv = 0 annual_num_in_migr_HH = 0 annual_num_out_migr_HH = 0 for region in world.iter_regions(): logger.debug('processing region %s'%region.get_ID()) # This could easily handle multiple regions, although currently # there is only one, for all of Chitwan. if rcParams['submodels.fertility']: new_births = region.births(model_time.get_cur_date_float(), model_time.get_cur_int_timestep()) else: new_births = zero_events if rcParams['submodels.mortality']: new_deaths = region.deaths(model_time.get_cur_date_float(), model_time.get_cur_int_timestep()) else: new_deaths = zero_events if rcParams['submodels.marriage']: new_marr = region.marriages(model_time.get_cur_date_float(), model_time.get_cur_int_timestep()) else: new_marr = zero_events if rcParams['submodels.divorce']: new_divo = region.divorces(model_time.get_cur_date_float(), model_time.get_cur_int_timestep()) else: new_divo = zero_events if rcParams['submodels.migration_LL_individual']: new_out_migr_LL_indiv, new_ret_migr_LL_indiv = region.individual_LL_migrations(model_time.get_cur_date_float(), model_time.get_cur_int_timestep()) else: new_out_migr_LL_indiv, new_ret_migr_LL_indiv = zero_events, zero_events if rcParams['submodels.migration_LD_individual']: new_out_migr_LD_indiv, new_ret_migr_LD_indiv = region.individual_LD_migrations(model_time.get_cur_date_float(), model_time.get_cur_int_timestep()) else: new_out_migr_LD_indiv, new_ret_migr_LD_indiv = zero_events, zero_events if rcParams['submodels.migration_household']: new_in_migr_HH, new_out_migr_HH = region.household_migrations(model_time.get_cur_date_float(), model_time.get_cur_int_timestep()) else: new_in_migr_HH, new_out_migr_HH = zero_events, zero_events if rcParams['submodels.schooling']: schooling = region.education(model_time.get_cur_date_float()) else: schooling = zero_events region.increment_age() # Now account for changing NFOs (if desired) if rcParams['NFOs.change.model'].lower() != 'none': region.establish_NFOs() # Save event, LULC, and population data in the saved_data dictionary # for later output to CSV. saved_data[timestep] = {} saved_data[timestep]['EVI'] = EVIs saved_data[timestep]['births'] = new_births saved_data[timestep]['deaths'] = new_deaths saved_data[timestep]['marr'] = new_marr saved_data[timestep]['divo'] = new_divo saved_data[timestep]['out_migr_LL_indiv'] = new_out_migr_LL_indiv saved_data[timestep]['ret_migr_LL_indiv'] = new_ret_migr_LL_indiv saved_data[timestep]['out_migr_LD_indiv'] = new_out_migr_LD_indiv saved_data[timestep]['ret_migr_LD_indiv'] = new_ret_migr_LD_indiv saved_data[timestep]['in_migr_HH'] = new_in_migr_HH saved_data[timestep]['out_migr_HH'] = new_out_migr_HH saved_data[timestep].update(region.get_neighborhood_pop_stats()) saved_data[timestep].update(region.get_neighborhood_fw_usage(model_time.get_cur_date_float())) saved_data[timestep].update(region.get_neighborhood_landuse()) saved_data[timestep].update(region.get_neighborhood_nfo_context()) saved_data[timestep].update(region.get_neighborhood_forest_distance()) # Keep running totals of events for printing results: num_new_births = sum(new_births.values()) num_new_deaths = sum(new_deaths.values()) num_new_marr = sum(new_marr.values()) num_new_divo = sum(new_divo.values()) num_new_out_migr_LL_indiv = sum(new_out_migr_LL_indiv.values()) num_new_ret_migr_LL_indiv = sum(new_ret_migr_LL_indiv.values()) num_new_out_migr_LD_indiv = sum(new_out_migr_LD_indiv.values()) num_new_ret_migr_LD_indiv = sum(new_ret_migr_LD_indiv.values()) num_new_in_migr_HH = sum(new_in_migr_HH.values()) num_new_out_migr_HH = sum(new_out_migr_HH.values()) annual_num_births += num_new_births annual_num_deaths += num_new_deaths annual_num_marr += num_new_marr annual_num_divo += num_new_divo annual_num_out_migr_LL_indiv += num_new_out_migr_LL_indiv annual_num_ret_migr_LL_indiv += num_new_ret_migr_LL_indiv annual_num_out_migr_LD_indiv += num_new_out_migr_LD_indiv annual_num_ret_migr_LD_indiv += num_new_ret_migr_LD_indiv annual_num_in_migr_HH += num_new_in_migr_HH annual_num_out_migr_HH += num_new_out_migr_HH # Print an information line to allow keeping tabs on the model while it # is running. num_persons = region.num_persons() num_households = region.num_households() stats_string = "%s: P: %5s TMa: %5s THH: %5s NMa: %3s NDv: %3s NB: %3s ND: %3s NOLL: %3s NRLL: %3s NOLD: %3s NRLD: %3s NOMH: %3s NIMH: %3s"%( model_time.get_cur_date_string().ljust(7), num_persons, region.get_num_marriages(), num_households, num_new_marr, num_new_divo, num_new_births, num_new_deaths, num_new_out_migr_LL_indiv, num_new_ret_migr_LL_indiv, num_new_out_migr_LD_indiv, num_new_ret_migr_LD_indiv, num_new_out_migr_HH, num_new_in_migr_HH) logger.info('%s'%stats_string) # Save timestep, year and month, and time_float values for use in # storing results (to CSV) keyed to a particular timestep. time_strings['timestep'].append(model_time.get_cur_int_timestep()) time_strings['time_float'].append(model_time.get_cur_date_float()) time_strings['time_date'].append(model_time.get_cur_date_string()) if model_time.get_cur_month() == 12 or model_time.is_last_iteration() \ and model_time.get_cur_date() != model_time._starttime: # Model this years agricultural productivity, to be used in the # next year's model runs. EVIs = region.agricultural_productivity() mean_NBH_EVI = np.mean(EVIs.values()) mean_Valley_EVI = region._Valley_Mean_EVI # The last condition in the above if statement is necessary as # there is no total to print on the first timestep, so it wouldn't # make sense to print it. total_string = "%s totals: New Ma: %3s Dv: %3s B: %3s D: %3s LLOutMi: %3s LLRetMi: %3s LDOutMi: %3s LDRetMi: %3s OutMiHH: %3s InMiHH: %3s | NBHEVI: %3s ValEVI: %3s"%( model_time.get_cur_year(), annual_num_marr, annual_num_divo, annual_num_births, annual_num_deaths, annual_num_out_migr_LL_indiv, annual_num_ret_migr_LL_indiv, annual_num_out_migr_LD_indiv, annual_num_ret_migr_LD_indiv, annual_num_out_migr_HH, annual_num_in_migr_HH, mean_NBH_EVI, mean_Valley_EVI) logger.info('%s'%total_string) logger.info("Elapsed time: %11s"%elapsed_time(modelrun_starttime)) if rcParams['run_validation_checks']: if not test.validate_person_attributes(world): logger.critical("Person attributes validation failed") if not test.validate_household_attributes(world): logger.critical("Household attributes validation failed") if not test.validate_neighborhood_attributes(world): logger.critical("Neighborhood attributes validation failed") if num_persons == 0: logger.info("End of model run: population is zero") break if model_time.get_cur_month() == 12 or model_time.is_last_iteration(): write_results_CSV(world, results_path, model_time.get_cur_int_timestep()) model_time.increment() return saved_data, time_strings, results_new_format def elapsed_time(start_time): elapsed = int(time.time() - start_time) hours = int(elapsed / 3600) minutes = int((elapsed - hours * 3600) / 60) seconds = int(elapsed - hours * 3600 - minutes * 60) return "%ih %im %is" %(hours, minutes, seconds)
gpl-3.0
1,953,961,682,125,296,600
48.755263
178
0.613106
false
3.252537
true
false
false
djfkahn/MemberHubDirectoryTools
roster_tools.py
1
5813
#!/usr/bin/env python """This program inputs a MemberHub directory dump, and analyzes it. """ import family import roster import os from openpyxl import load_workbook MIN_NUM_ROSTER_FIELDS = 5 def ReadRosterAdultsFromMostRecent(file_name=None): """ roster_tools.ReadRosterAdultsFromMostRecent PURPOSE: Generates a list of adult names in the newest roster file. INPUT: - none OUTPUTS: - adults_list -- list of adult name fields in the newest roster file. ASSUMPTIONS: - none """ ## ## Find the files in the "Roster" folder with ".xlsx" extension, sort them by ## date, and pick the most recently added if not file_name: file_path = os.path.abspath("./Roster/") with os.scandir(file_path) as raw_files: files = [file for file in raw_files \ if not(file.name.startswith('~')) and (file.name.endswith('.xlsx'))] files.sort(key=lambda x: os.stat(x).st_mtime, reverse=True) file_name = file_path + "/" +files[0].name ## ## Load the workbook, and select the active/only worksheet wb = load_workbook(file_name) ws = wb.active ## ## Copy all the values in column 'D' for all rows beyond the title row into ## the output list adults_list = [] for fields in ws.iter_rows(min_row=2, max_row=ws.max_row, min_col=4, max_col=4): adults_list.append(fields[0].value) return adults_list def ReadRosterFromFile(file_name, hub_map, rosterC): """ roster_tools.ReadRosterFromFile PURPOSE: Reads a roster file with the following fields: <**Last Name>,<**First Name>,<**Grade>,<**Parent/Guardian Name(s)>,<***Teacher Name> ** - indicates always required field *** - indicates field that is required when Grade field is < 6 INPUT: - file_name -- name of the roster file - hub_map -- dictionary that maps hub names to hub IDs - rosterC -- the Roster object containing the errata OUTPUTS: - roster -- list of families extracted from the roster ASSUMPTIONS: 1. First row of the file is the column headers...not a member of the roster. """ wb = load_workbook(file_name) ws = wb.active student_count = -1 for fields in ws.values: ## Skip the first row if student_count < 0: student_count = 0 continue ## Skip any row for which all fields are not populated empty_field_found = False for i in range(MIN_NUM_ROSTER_FIELDS): if fields[i] == None or fields[i] == "": empty_field_found = True print("Found row with missing required fields:", fields) break if empty_field_found: continue ## each row represents one student student_count += 1 ## treat the student as a member of a new family...for now new_family = family.RosterFamily(adults_raw_name=fields[3]) new_family.AddToFamily(child_first = fields[1], child_last = fields[0], grade = fields[2], adult_names = fields[3], teacher_name = fields[4], hub_map = hub_map, rosterC = rosterC) # if new_family is the same as a family already in the roster, then combine # families. Otherwise, append new_family at the end of the roster. for roster_entry in rosterC.GetRoster(): if roster_entry.IsSameFamily(new_family): roster_entry.CombineWith(new_family) break else: rosterC.append(new_family) print("%d students processed %d families." % (student_count, len(rosterC))) return rosterC.GetRoster() def GetRosterFileName(): """ roster_tools.GetRosterFileName PURPOSE: Gives the user a list of possible roster files, and processes their selection. INPUTS: None OUTPUTS: - file_name - the selected roster file name ASSUMPTIONS: - Assumes the candidate roster files are stored in a subfolder called 'Roster' """ print ("These are the potential roster files:") file_path = os.path.abspath("./Roster/") with os.scandir(file_path) as raw_files: files = [file for file in raw_files \ if not(file.name.startswith('~')) and (file.name.endswith('.xlsx'))] files.sort(key=lambda x: os.stat(x).st_mtime, reverse=True) max_index = 0 file_number = '1' while int(file_number) >= max_index: for file in files: max_index += 1 print("%d) %s" % (max_index, file.name)) file_number = input("Enter list number of file or press <enter> to use '" + files[0].name + "':") if not file_number: return file_path + "/" +files[0].name elif 0 < int(file_number) <= max_index: return file_path + "/" + files[int(file_number)-1].name else: max_index = 0 print("The selection made is out of range. Please try again.") def ReadRoster(hub_map): """ roster_tools.ReadRoster PURPOSE: Prompts the user for roster file name and proceeds to read the file. INPUT: - hub_map -- mapping of teacher names to hub numbers OUTPUTS: - roster -- list of families extracted from the roster ASSUMPTIONS: - All the candidate rosters reside in a folder called "Roster" under the run directory. - All candidate rosters are Microsoft Excel files. """ return ReadRosterFromFile(GetRosterFileName(), hub_map, roster.Roster())
apache-2.0
4,514,965,062,765,273,000
35.559748
109
0.590745
false
3.829381
false
false
false
yusufm/mobly
mobly/controllers/android_device_lib/event_dispatcher.py
1
15487
#!/usr/bin/env python3.4 # # Copyright 2016 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from concurrent.futures import ThreadPoolExecutor import queue import re import threading import time import traceback class EventDispatcherError(Exception): pass class IllegalStateError(EventDispatcherError): """Raise when user tries to put event_dispatcher into an illegal state. """ class DuplicateError(EventDispatcherError): """Raise when a duplicate is being created and it shouldn't. """ class EventDispatcher: """Class managing events for an sl4a connection. """ DEFAULT_TIMEOUT = 60 def __init__(self, sl4a): self._sl4a = sl4a self.started = False self.executor = None self.poller = None self.event_dict = {} self.handlers = {} self.lock = threading.RLock() def poll_events(self): """Continuously polls all types of events from sl4a. Events are sorted by name and store in separate queues. If there are registered handlers, the handlers will be called with corresponding event immediately upon event discovery, and the event won't be stored. If exceptions occur, stop the dispatcher and return """ while self.started: event_obj = None event_name = None try: event_obj = self._sl4a.eventWait(50000) except: if self.started: print("Exception happened during polling.") print(traceback.format_exc()) raise if not event_obj: continue elif 'name' not in event_obj: print("Received Malformed event {}".format(event_obj)) continue else: event_name = event_obj['name'] # if handler registered, process event if event_name in self.handlers: self.handle_subscribed_event(event_obj, event_name) if event_name == "EventDispatcherShutdown": self._sl4a.closeSl4aSession() break else: self.lock.acquire() if event_name in self.event_dict: # otherwise, cache event self.event_dict[event_name].put(event_obj) else: q = queue.Queue() q.put(event_obj) self.event_dict[event_name] = q self.lock.release() def register_handler(self, handler, event_name, args): """Registers an event handler. One type of event can only have one event handler associated with it. Args: handler: The event handler function to be registered. event_name: Name of the event the handler is for. args: User arguments to be passed to the handler when it's called. Raises: IllegalStateError: Raised if attempts to register a handler after the dispatcher starts running. DuplicateError: Raised if attempts to register more than one handler for one type of event. """ if self.started: raise IllegalStateError(("Can't register service after polling is" " started")) self.lock.acquire() try: if event_name in self.handlers: raise DuplicateError('A handler for {} already exists'.format( event_name)) self.handlers[event_name] = (handler, args) finally: self.lock.release() def start(self): """Starts the event dispatcher. Initiates executor and start polling events. Raises: IllegalStateError: Can't start a dispatcher again when it's already running. """ if not self.started: self.started = True self.executor = ThreadPoolExecutor(max_workers=32) self.poller = self.executor.submit(self.poll_events) else: raise IllegalStateError("Dispatcher is already started.") def clean_up(self): """Clean up and release resources after the event dispatcher polling loop has been broken. The following things happen: 1. Clear all events and flags. 2. Close the sl4a client the event_dispatcher object holds. 3. Shut down executor without waiting. """ if not self.started: return self.started = False self.clear_all_events() self._sl4a.close() self.poller.set_result("Done") # The polling thread is guaranteed to finish after a max of 60 seconds, # so we don't wait here. self.executor.shutdown(wait=False) def pop_event(self, event_name, timeout=DEFAULT_TIMEOUT): """Pop an event from its queue. Return and remove the oldest entry of an event. Block until an event of specified name is available or times out if timeout is set. Args: event_name: Name of the event to be popped. timeout: Number of seconds to wait when event is not present. Never times out if None. Returns: event: The oldest entry of the specified event. None if timed out. Raises: IllegalStateError: Raised if pop is called before the dispatcher starts polling. """ if not self.started: raise IllegalStateError( "Dispatcher needs to be started before popping.") e_queue = self.get_event_q(event_name) if not e_queue: raise TypeError("Failed to get an event queue for {}".format( event_name)) try: # Block for timeout if timeout: return e_queue.get(True, timeout) # Non-blocking poll for event elif timeout == 0: return e_queue.get(False) else: # Block forever on event wait return e_queue.get(True) except queue.Empty: raise queue.Empty('Timeout after {}s waiting for event: {}'.format( timeout, event_name)) def wait_for_event(self, event_name, predicate, timeout=DEFAULT_TIMEOUT, *args, **kwargs): """Wait for an event that satisfies a predicate to appear. Continuously pop events of a particular name and check against the predicate until an event that satisfies the predicate is popped or timed out. Note this will remove all the events of the same name that do not satisfy the predicate in the process. Args: event_name: Name of the event to be popped. predicate: A function that takes an event and returns True if the predicate is satisfied, False otherwise. timeout: Number of seconds to wait. *args: Optional positional args passed to predicate(). **kwargs: Optional keyword args passed to predicate(). Returns: The event that satisfies the predicate. Raises: queue.Empty: Raised if no event that satisfies the predicate was found before time out. """ deadline = time.time() + timeout while True: event = None try: event = self.pop_event(event_name, 1) except queue.Empty: pass if event and predicate(event, *args, **kwargs): return event if time.time() > deadline: raise queue.Empty( 'Timeout after {}s waiting for event: {}'.format( timeout, event_name)) def pop_events(self, regex_pattern, timeout): """Pop events whose names match a regex pattern. If such event(s) exist, pop one event from each event queue that satisfies the condition. Otherwise, wait for an event that satisfies the condition to occur, with timeout. Results are sorted by timestamp in ascending order. Args: regex_pattern: The regular expression pattern that an event name should match in order to be popped. timeout: Number of seconds to wait for events in case no event matching the condition exits when the function is called. Returns: results: Pop events whose names match a regex pattern. Empty if none exist and the wait timed out. Raises: IllegalStateError: Raised if pop is called before the dispatcher starts polling. queue.Empty: Raised if no event was found before time out. """ if not self.started: raise IllegalStateError( "Dispatcher needs to be started before popping.") deadline = time.time() + timeout while True: #TODO: fix the sleep loop results = self._match_and_pop(regex_pattern) if len(results) != 0 or time.time() > deadline: break time.sleep(1) if len(results) == 0: raise queue.Empty('Timeout after {}s waiting for event: {}'.format( timeout, regex_pattern)) return sorted(results, key=lambda event: event['time']) def _match_and_pop(self, regex_pattern): """Pop one event from each of the event queues whose names match (in a sense of regular expression) regex_pattern. """ results = [] self.lock.acquire() for name in self.event_dict.keys(): if re.match(regex_pattern, name): q = self.event_dict[name] if q: try: results.append(q.get(False)) except: pass self.lock.release() return results def get_event_q(self, event_name): """Obtain the queue storing events of the specified name. If no event of this name has been polled, wait for one to. Returns: queue: A queue storing all the events of the specified name. None if timed out. timeout: Number of seconds to wait for the operation. Raises: queue.Empty: Raised if the queue does not exist and timeout has passed. """ self.lock.acquire() if not event_name in self.event_dict or self.event_dict[ event_name] is None: self.event_dict[event_name] = queue.Queue() self.lock.release() event_queue = self.event_dict[event_name] return event_queue def handle_subscribed_event(self, event_obj, event_name): """Execute the registered handler of an event. Retrieve the handler and its arguments, and execute the handler in a new thread. Args: event_obj: Json object of the event. event_name: Name of the event to call handler for. """ handler, args = self.handlers[event_name] self.executor.submit(handler, event_obj, *args) def _handle(self, event_handler, event_name, user_args, event_timeout, cond, cond_timeout): """Pop an event of specified type and calls its handler on it. If condition is not None, block until condition is met or timeout. """ if cond: cond.wait(cond_timeout) event = self.pop_event(event_name, event_timeout) return event_handler(event, *user_args) def handle_event(self, event_handler, event_name, user_args, event_timeout=None, cond=None, cond_timeout=None): """Handle events that don't have registered handlers In a new thread, poll one event of specified type from its queue and execute its handler. If no such event exists, the thread waits until one appears. Args: event_handler: Handler for the event, which should take at least one argument - the event json object. event_name: Name of the event to be handled. user_args: User arguments for the handler; to be passed in after the event json. event_timeout: Number of seconds to wait for the event to come. cond: A condition to wait on before executing the handler. Should be a threading.Event object. cond_timeout: Number of seconds to wait before the condition times out. Never times out if None. Returns: worker: A concurrent.Future object associated with the handler. If blocking call worker.result() is triggered, the handler needs to return something to unblock. """ worker = self.executor.submit(self._handle, event_handler, event_name, user_args, event_timeout, cond, cond_timeout) return worker def pop_all(self, event_name): """Return and remove all stored events of a specified name. Pops all events from their queue. May miss the latest ones. If no event is available, return immediately. Args: event_name: Name of the events to be popped. Returns: results: List of the desired events. Raises: IllegalStateError: Raised if pop is called before the dispatcher starts polling. """ if not self.started: raise IllegalStateError(("Dispatcher needs to be started before " "popping.")) results = [] try: self.lock.acquire() while True: e = self.event_dict[event_name].get(block=False) results.append(e) except (queue.Empty, KeyError): return results finally: self.lock.release() def clear_events(self, event_name): """Clear all events of a particular name. Args: event_name: Name of the events to be popped. """ self.lock.acquire() try: q = self.get_event_q(event_name) q.queue.clear() except queue.Empty: return finally: self.lock.release() def clear_all_events(self): """Clear all event queues and their cached events.""" self.lock.acquire() self.event_dict.clear() self.lock.release()
apache-2.0
-2,744,429,708,353,299,500
34.766744
79
0.572093
false
4.826114
false
false
false
cavestruz/L500analysis
plotting/profiles/T_Vcirc_evolution/Vcirc_evolution/plot_Vcirc2_nu_binned_Vc500c.py
1
3175
from L500analysis.data_io.get_cluster_data import GetClusterData from L500analysis.utils.utils import aexp2redshift from L500analysis.plotting.tools.figure_formatting import * from L500analysis.plotting.profiles.tools.profiles_percentile \ import * from L500analysis.plotting.profiles.tools.select_profiles \ import nu_cut, prune_dict from L500analysis.utils.constants import rbins from derived_field_functions import * color = matplotlib.cm.afmhot_r aexps = [1.0,0.9,0.8,0.7,0.6,0.5,0.45,0.4,0.35] nu_threshold = [2.3,2.7] nu_label = r"%0.1f$\leq\nu_{500c}\leq$%0.1f"%(nu_threshold[0],nu_threshold[1]) db_name = 'L500_NR_0' db_dir = '/home/babyostrich/Documents/Repos/L500analysis/' profiles_list = ['r_mid', 'Vcirc2_Vc500c', 'M_dark', 'M_star', 'M_gas', 'R/R500c'] halo_properties_list=['r500c','M_total_500c','nu_500c'] Vcirc2ratioVc500c=r"$\tilde{V}=V^2_{c}/V^2_{c,500c}$" fVcz1=r"$\tilde{V}/\tilde{V}(z=1)$" pa = PlotAxes(figname='Vcirc2_Vc500c_nu%0.1f'%nu_threshold[0], axes=[[0.15,0.4,0.80,0.55],[0.15,0.15,0.80,0.24]], axes_labels=[Vcirc2ratioVc500c,fVcz1], xlabel=r"$R/R_{500c}$", xlim=(0.2,5), ylims=[(0.6,1.4),(0.6,1.4)]) Vcirc2={} clkeys = ['Vcirc2_Vc500c'] plots = [Vcirc2] linestyles = ['-'] for aexp in aexps : cldata = GetClusterData(aexp=aexp,db_name=db_name, db_dir=db_dir, profiles_list=profiles_list, halo_properties_list=halo_properties_list) nu_cut_hids = nu_cut(nu=cldata['nu_500c'], threshold=nu_threshold) for plot, key in zip(plots,clkeys) : pruned_profiles = prune_dict(d=cldata[key],k=nu_cut_hids) plot[aexp] = calculate_profiles_mean_variance(pruned_profiles) pa.axes[Vcirc2ratioVc500c].plot( rbins, Vcirc2[aexp]['mean'],color=color(aexp), ls='-',label="$z=%3.1f$" % aexp2redshift(aexp)) pa.axes[Vcirc2ratioVc500c].fill_between(rbins, Vcirc2[0.5]['down'], Vcirc2[0.5]['up'], color=color(0.5), zorder=0) for aexp in aexps : for V,ls in zip(plots,linestyles) : fractional_evolution = get_profiles_division_mean_variance( mean_profile1=V[aexp]['mean'], var_profile1=V[aexp]['var'], mean_profile2=V[0.5]['mean'], var_profile2=V[0.5]['var'], ) pa.axes[fVcz1].plot( rbins, fractional_evolution['mean'], color=color(aexp),ls=ls) pa.axes[Vcirc2ratioVc500c].annotate(nu_label, xy=(.75, .75), xytext=(.3, 1.3)) pa.axes[Vcirc2ratioVc500c].tick_params(labelsize=12) pa.axes[Vcirc2ratioVc500c].tick_params(labelsize=12) pa.axes[fVcz1].set_yticks(arange(0.6,1.4,0.2)) matplotlib.rcParams['legend.handlelength'] = 0 matplotlib.rcParams['legend.numpoints'] = 1 matplotlib.rcParams['legend.fontsize'] = 12 pa.set_legend(axes_label=Vcirc2ratioVc500c,ncol=3,loc='upper right', frameon=False) pa.color_legend_texts(axes_label=Vcirc2ratioVc500c) pa.savefig()
mit
385,859,252,746,646,600
36.352941
87
0.610394
false
2.702128
false
false
false
Videoclases/videoclases
quality_control/views/api.py
1
4172
import random from django.contrib.auth.decorators import user_passes_test from django.contrib import messages from django.core.urlresolvers import reverse from django.db.models.aggregates import Count from django.http.response import JsonResponse, HttpResponseRedirect from django.shortcuts import get_object_or_404 from django.utils.decorators import method_decorator from django.views.generic.base import TemplateView from django.views.generic.detail import DetailView from django.core import serializers from quality_control.models.quality_control import QualityControl from videoclases.models.groupofstudents import GroupOfStudents from videoclases.models.homework import Homework from videoclases.models.student_evaluations import StudentEvaluations def in_students_group(user): if user: return user.groups.filter(name='Alumnos').exists() return False class GetVideoClaseView(DetailView): template_name = 'blank.html' model = Homework def get(self, request, *args, **kwargs): result = dict() homework_base = self.get_object() homework = homework_base groups = GroupOfStudents.objects.filter(homework=homework) student = self.request.user.student if homework_base.homework_to_evaluate is not None: homework = homework_base.homework_to_evaluate groups = GroupOfStudents.objects.filter(homework=homework) else: group_student = get_object_or_404(GroupOfStudents, students=student, homework=homework) groups = groups.exclude(id=group_student.id) groups = groups \ .exclude(videoclase__video__isnull=True) \ .exclude(videoclase__video__exact='') \ .exclude(videoclase__answers__student=student) \ .annotate(revision=Count('videoclase__answers')) \ .order_by('revision', '?') element_response = groups[0] if groups.exists() else None control = QualityControl.objects.filter(homework=homework) control = control[0] if control.exists() else None if control: evaluated_items = control.list_items.filter(videoclase__answers__student=student) # limit max evaluation of quality item to 5 if evaluated_items.count() < 3: items = control.list_items.all() \ .exclude(videoclase__answers__student=student) item_to_evaluate = items[random.randint(0, items.count()-1)] if items.exists() else None if item_to_evaluate and element_response: value_random = random.random() # TODO: need to be a more smart filter element_response = item_to_evaluate if value_random > 0.55 else element_response elif item_to_evaluate: element_response = item_to_evaluate if element_response: alternativas = [element_response.videoclase.correct_alternative, element_response.videoclase.alternative_2, element_response.videoclase.alternative_3] random.shuffle(alternativas) result['video'] = element_response.videoclase.video result['question'] = element_response.videoclase.question result['videoclase_id'] = element_response.videoclase.pk result['alternativas'] = alternativas result['redirect'] = False else: result['redirect'] = True return JsonResponse(result) def get_context_data(self, **kwargs): context = super(GetVideoClaseView, self).get_context_data(**kwargs) return context @method_decorator(user_passes_test(in_students_group, login_url='/')) def dispatch(self, *args, **kwargs): obj = self.get_object() hw = Homework.objects.filter(id=obj.id,course__students=self.request.user.student) if hw.count() == 0: messages.info(self.request, 'No tienes permisos para evaluar esta tarea.') return HttpResponseRedirect(reverse('student')) return super(GetVideoClaseView, self).dispatch(*args, **kwargs)
gpl-3.0
-7,057,957,377,848,098,000
45.355556
104
0.661793
false
4.09421
false
false
false
JGrishey/MHLSim
pylib/simulation.py
1
19119
''' Season Simulation 2017 Jacob Grishey For the purpose of simulating sports seasons and determining regular season standings. ''' # IMPORTS import json import statistics import numpy from operator import itemgetter import copy # Read JSON file (schedule, team list) with open("./../data/season.json") as jsonfile: data = json.load(jsonfile) # Parse JSON file teams = {team: {'w': 0, 'l': 0, 'otl': 0, 'elo': 1500} for team in data['teams']} schedule = data['schedule'] # Results results = [{'name': team, 'seed1': 0, 'seed2': 0, 'seed3': 0, 'seed4': 0, 'seed5': 0, 'seed6': 0, 'seed7': 0, 'seed8': 0, 'aw': 0, 'al': 0, 'aotl': 0, 'r2': 0, 'r3': 0, 'cup': 0} for team in teams] # Divisions brent = ["Cape Cod Bluefins", "Trevor Phillips Industries", "Inglorious Basterds", "Crack Smoking Monkeys", "Moose Knuckles", "Hood Rich"] drew = ["Small Sample Size", "The Bearded Bandits", "Row Row Fight the Powah", "Motor City Machine Guns", "Suck Our Dekes", "Tenacious V"] # Separate past from future games past = [game for game in schedule if game['h-score'] != -1] future = [game for game in schedule if game['h-score'] == -1] # Update teams with past results for game in past: if game['h-score'] > game['a-score']: if game['ot'] == 1: teams[game['home']]['w'] += 1 teams[game['away']]['otl'] += 1 else: teams[game['home']]['w'] += 1 teams[game['away']]['l'] += 1 else: if game['ot'] == 1: teams[game['away']]['w'] += 1 teams[game['home']]['otl'] += 1 else: teams[game['away']]['w'] += 1 teams[game['home']]['l'] += 1 # Expected Score function # # Given elo of team A and team B, calculate expected score of team A. def expectedScoreA (eloA, eloB): return 1 / (1 + 10 ** ((eloB - eloA) / 400)) # New Rating Function # # Given Elo, actual score, expected score, and goal differential and calculate the team's new Elo rating. def newRating (eloA, eloB, scoreActual, scoreExpected, goalDifferential, win): # K-Factor if eloA < 2100: K = 32 elif eloA <= 2400: K = 24 else: K = 16 # Calculate for goal differential and autocorrelation marginMult = numpy.log(goalDifferential + 1) * (2.2 / (abs(eloA - eloB) * 0.01 + 2.2)) # Return new rating return eloA + (marginMult * K) * (scoreActual - scoreExpected) # Update elo from past games for game in past: # Current Elo ratings currentEloA = teams[game['home']]['elo'] currentEloB = teams[game['away']]['elo'] # Get Expected Scores eA = expectedScoreA(currentEloA, currentEloB) eB = 1 - eA # Get scores homeGoals = game['h-score'] awayGoals = game['a-score'] goalDifferential = abs(homeGoals - awayGoals) # Get Actual Scores if homeGoals > awayGoals: if game['ot'] == 1: sA = 1.0 sB = 0.5 winA = True winB = False else: sA = 1.0 sB = 0.0 winA = False winB = True else: if game['ot'] == 1: sB = 1.0 sA = 0.5 winA = True winB = False else: sB = 1.0 sA = 0.0 winA = False winB = True # Calculate new Elo ratings newA = newRating(currentEloA, currentEloB, sA, eA, goalDifferential, winA) newB = newRating(currentEloB, currentEloA, sB, eB, goalDifferential, winB) # Apply Elo ratings teams[game['home']]['elo'] = newA teams[game['away']]['elo'] = newB # Simulation def runSeason (tempTeams): for game in future: # Current Elo ratings currentEloA = tempTeams[game['home']]['elo'] currentEloB = tempTeams[game['away']]['elo'] # Get Expected Scores eA = expectedScoreA(currentEloA, currentEloB) eB = 1 - eA # Random number between 0 and 1 to decide who wins. decideWin = numpy.random.random() # Random number between 0 and 1 to decide if it goes into Overtime. decideOT = numpy.random.random() # Actual Predicted Scores if decideOT <= 0.233: if decideWin <= eA: sA = 1.0 tempTeams[game['home']]['w'] += 1 sB = 0.5 tempTeams[game['away']]['otl'] += 1 else: sA = 0.5 tempTeams[game['home']]['otl'] += 1 sB = 1.0 tempTeams[game['away']]['w'] += 1 else: if decideWin <= eA: sA = 1.0 tempTeams[game['home']]['w'] += 1 sB = 0.0 tempTeams[game['away']]['l'] += 1 else: sA = 0.0 tempTeams[game['home']]['l'] += 1 sB = 1.0 tempTeams[game['away']]['w'] += 1 # Calculate new Elo ratings #newA = newRating(currentEloA, sA, eA) #newB = newRating(currentEloB, sB, eB) # Apply new Elo ratings #tempTeams[game['home']]['elo'] = newA #tempTeams[game['away']]['elo'] = newB # End of Season standings brentStandings = [] drewStandings = [] # Collect teams, calculate points. for team in tempTeams: if team in brent: brentStandings.append({"name": team, "pts": tempTeams[team]['w'] * 2 + tempTeams[team]['otl']}) next(item for item in results if item["name"] == team)['aw'] += tempTeams[team]['w'] next(item for item in results if item["name"] == team)['al'] += tempTeams[team]['l'] next(item for item in results if item["name"] == team)['aotl'] += tempTeams[team]['otl'] else: drewStandings.append({"name": team, "pts": tempTeams[team]['w'] * 2 + tempTeams[team]['otl']}) next(item for item in results if item["name"] == team)['aw'] += tempTeams[team]['w'] next(item for item in results if item["name"] == team)['al'] += tempTeams[team]['l'] next(item for item in results if item["name"] == team)['aotl'] += tempTeams[team]['otl'] # Sort by points brentStandings = sorted(brentStandings, key=itemgetter('pts'), reverse=True) drewStandings = sorted(drewStandings, key=itemgetter('pts'), reverse=True) # Cut off top 2, then concat and sort by points overall8 = sorted(brentStandings[2:] + drewStandings[2:], key=itemgetter('pts'), reverse=True) # Playoff Seeding playoffs = [{"seed": seed, "name": ""} for seed in range(1,9)] # Get playoff teams playoffTeams = sorted(brentStandings[:2] + drewStandings[:2] + overall8[:4], key=itemgetter('pts'), reverse=True) # Add Results next(item for item in results if item["name"] == playoffTeams[0]['name'])['seed1'] += 1 next(item for item in results if item["name"] == playoffTeams[1]['name'])['seed2'] += 1 next(item for item in results if item["name"] == playoffTeams[2]['name'])['seed3'] += 1 next(item for item in results if item["name"] == playoffTeams[3]['name'])['seed4'] += 1 next(item for item in results if item["name"] == playoffTeams[4]['name'])['seed5'] += 1 next(item for item in results if item["name"] == playoffTeams[5]['name'])['seed6'] += 1 next(item for item in results if item["name"] == playoffTeams[6]['name'])['seed7'] += 1 next(item for item in results if item["name"] == playoffTeams[7]['name'])['seed8'] += 1 # Insert into seeds for (team, i) in zip(playoffTeams, range(0, 8)): playoffs[i]['name'] = team['name'] # Schedule first round games firstRoundGames = [] firstRoundSeries = [] for i in range(0, 4): firstRoundSeries.append({ 'home': playoffs[i]['name'], 'away': playoffs[7-i]['name'], 'h-seed': playoffs[i]['seed'], 'a-seed': playoffs[7-i]['seed'], 'h-wins': 0, 'a-wins': 0 }) for k in range(0, 4): firstRoundGames.append({ 'home': playoffs[i]['name'], 'away': playoffs[7-i]['name'] }) # Simulate first round for game in firstRoundGames: # Current Elo ratings of both teams homeElo = tempTeams[game['home']]['elo'] awayElo = tempTeams[game['away']]['elo'] # Win probabilities eA = expectedScoreA(homeElo, awayElo) eB = 1 - eA # Decide win and OT decideWin = numpy.random.random() decideOT = numpy.random.random() # Get series data series = next(item for item in firstRoundSeries if item['home'] == game['home']) # For scheduling purposes previousLow = min([series['h-wins'], series['a-wins']]) # Simulate game if decideOT <= 0.233: if decideWin <= eA: series['h-wins'] += 1 if min([series['h-wins'], series['a-wins']]) > previousLow: firstRoundGames.append({ 'home': game['home'], 'away': game['away'] }) sA = 1.0 sB = 0.5 else: series['a-wins'] += 1 if min([series['h-wins'], series['a-wins']]) > previousLow: firstRoundGames.append({ 'home': game['home'], 'away': game['away'] }) sA = 0.5 sB = 1.0 else: if decideWin <= eA: series['h-wins'] += 1 if min([series['h-wins'], series['a-wins']]) > previousLow: firstRoundGames.append({ 'home': game['home'], 'away': game['away'] }) sA = 1.0 sB = 0.0 else: series['a-wins'] += 1 if min([series['h-wins'], series['a-wins']]) > previousLow: firstRoundGames.append({ 'home': game['home'], 'away': game['away'] }) sA = 0.0 sB = 1.0 # Calculate new Elo ratings #newA = newRating(homeElo, sA, eA) #newB = newRating(awayElo, sB, eB) # Apply new Elo ratings #tempTeams[game['home']]['elo'] = newA #tempTeams[game['away']]['elo'] = newB # Collect series winners. secondRoundTeams = [] for series in firstRoundSeries: if series['h-wins'] == 4: secondRoundTeams.append({'seed': series['h-seed'], 'name': series['home']}) next(item for item in results if item['name'] == series['home'])['r2'] += 1 else: secondRoundTeams.append({'seed': series['a-seed'], 'name': series['away']}) next(item for item in results if item['name'] == series['away'])['r2'] += 1 secondRoundTeams = sorted(secondRoundTeams, key=itemgetter('seed')) # Schedule second round games secondRoundGames = [] secondRoundSeries = [] for i in range(0, 2): secondRoundSeries.append({ 'home': secondRoundTeams[i]['name'], 'away': secondRoundTeams[3-i]['name'], 'h-seed': secondRoundTeams[i]['seed'], 'a-seed': secondRoundTeams[3-i]['seed'], 'h-wins': 0, 'a-wins': 0 }) for k in range(0, 4): secondRoundGames.append({ 'home': secondRoundTeams[i]['name'], 'away': secondRoundTeams[3-i]['name'] }) # Simulate second round for game in secondRoundGames: # Current Elo ratings of both teams homeElo = tempTeams[game['home']]['elo'] awayElo = tempTeams[game['away']]['elo'] # Win probabilities eA = expectedScoreA(homeElo, awayElo) eB = 1 - eA # Decide win and OT decideWin = numpy.random.random() decideOT = numpy.random.random() # Get series data series = next(item for item in secondRoundSeries if item['home'] == game['home']) # For scheduling purposes previousLow = min([series['h-wins'], series['a-wins']]) # Simulate game if decideOT <= 0.233: if decideWin <= eA: series['h-wins'] += 1 if min([series['h-wins'], series['a-wins']]) > previousLow: secondRoundGames.append({ 'home': game['home'], 'away': game['away'] }) sA = 1.0 sB = 0.5 else: series['a-wins'] += 1 if min([series['h-wins'], series['a-wins']]) > previousLow: secondRoundGames.append({ 'home': game['home'], 'away': game['away'] }) sA = 0.5 sB = 1.0 else: if decideWin <= eA: series['h-wins'] += 1 if min([series['h-wins'], series['a-wins']]) > previousLow: secondRoundGames.append({ 'home': game['home'], 'away': game['away'] }) sA = 1.0 sB = 0.0 else: series['a-wins'] += 1 if min([series['h-wins'], series['a-wins']]) > previousLow: secondRoundGames.append({ 'home': game['home'], 'away': game['away'] }) sA = 0.0 sB = 1.0 # Calculate new Elo ratings #newA = newRating(homeElo, sA, eA) #newB = newRating(awayElo, sB, eB) # Apply new Elo ratings #tempTeams[game['home']]['elo'] = newA #tempTeams[game['away']]['elo'] = newB # Collect series winners. thirdRoundTeams = [] for series in secondRoundSeries: if series['h-wins'] == 4: thirdRoundTeams.append({'seed': series['h-seed'], 'name': series['home']}) next(item for item in results if item['name'] == series['home'])['r3'] += 1 else: thirdRoundTeams.append({'seed': series['a-seed'], 'name': series['away']}) next(item for item in results if item['name'] == series['away'])['r3'] += 1 thirdRoundTeams = sorted(thirdRoundTeams, key=itemgetter('seed')) # Schedule second round games thirdRoundGames = [] thirdRoundSeries = [] for i in range(0, 1): thirdRoundSeries.append({ 'home': thirdRoundTeams[i]['name'], 'away': thirdRoundTeams[1-i]['name'], 'h-seed': thirdRoundTeams[i]['seed'], 'a-seed': thirdRoundTeams[1-i]['seed'], 'h-wins': 0, 'a-wins': 0 }) for k in range(0, 4): thirdRoundGames.append({ 'home': thirdRoundTeams[i]['name'], 'away': thirdRoundTeams[1-i]['name'] }) # Simulate third round for game in thirdRoundGames: # Current Elo ratings of both teams homeElo = tempTeams[game['home']]['elo'] awayElo = tempTeams[game['away']]['elo'] # Win probabilities eA = expectedScoreA(homeElo, awayElo) eB = 1 - eA # Decide win and OT decideWin = numpy.random.random() decideOT = numpy.random.random() # Get series data series = next(item for item in thirdRoundSeries if item['home'] == game['home']) # For scheduling purposes previousLow = min([series['h-wins'], series['a-wins']]) # Simulate game if decideOT <= 0.233: if decideWin <= eA: series['h-wins'] += 1 if min([series['h-wins'], series['a-wins']]) > previousLow: thirdRoundGames.append({ 'home': game['home'], 'away': game['away'] }) sA = 1.0 sB = 0.5 else: series['a-wins'] += 1 if min([series['h-wins'], series['a-wins']]) > previousLow: thirdRoundGames.append({ 'home': game['home'], 'away': game['away'] }) sA = 0.5 sB = 1.0 else: if decideWin <= eA: series['h-wins'] += 1 if min([series['h-wins'], series['a-wins']]) > previousLow: thirdRoundGames.append({ 'home': game['home'], 'away': game['away'] }) sA = 1.0 sB = 0.0 else: series['a-wins'] += 1 if min([series['h-wins'], series['a-wins']]) > previousLow: thirdRoundGames.append({ 'home': game['home'], 'away': game['away'] }) sA = 0.0 sB = 1.0 # Calculate new Elo ratings #newA = newRating(homeElo, sA, eA) #newB = newRating(awayElo, sB, eB) # Apply new Elo ratings #tempTeams[game['home']]['elo'] = newA #tempTeams[game['away']]['elo'] = newB # Collect series winners. cupWinner = [] for series in thirdRoundSeries: if series['h-wins'] == 4: cupWinner.append({'seed': series['h-seed'], 'name': series['home']}) next(item for item in results if item['name'] == series['home'])['cup'] += 1 else: cupWinner.append({'seed': series['a-seed'], 'name': series['away']}) next(item for item in results if item['name'] == series['away'])['cup'] += 1 # Run simulation 100,000 times. for i in range(0, 100000): runSeason(copy.deepcopy(teams)) # Calculate average season. for team in results: team['aw'] /= 100000 team['al'] /= 100000 team['aotl'] /= 100000 # Add division info to each team. for team in teams: if team in brent: teams[team]['division'] = "Brent" else: teams[team]['division'] = "Drew" next(item for item in results if item["name"] == team)['w'] = teams[team]['w'] next(item for item in results if item["name"] == team)['l'] = teams[team]['l'] next(item for item in results if item["name"] == team)['otl'] = teams[team]['otl'] next(item for item in results if item["name"] == team)['elo'] = teams[team]['elo'] next(item for item in results if item["name"] == team)['division'] = teams[team]['division'] # Write results to outfile. with open('./../data/results.json', 'w') as outfile: json.dump(results, outfile, indent=4)
mit
-6,972,750,639,965,787,000
33.021352
107
0.500078
false
3.601243
false
false
false
PuZheng/lejian-backend
lejian/apis/model_wrapper.py
1
3211
# -*- coding: UTF-8 -*- import types import inspect import traceback class _MyAttributeError(Exception): pass def convert_attribute_error(f): def f_(*args, **kwargs): try: return f(*args, **kwargs) except AttributeError, e: print "~" * 78 traceback.print_exc() print "~" * 78 raise _MyAttributeError(e) return f_ class _FGet(object): def __init__(self, attr): self.attr = attr def __call__(self, wrapper): return wraps(convert_attribute_error(self.attr.fget)(wrapper)) def wraps(obj): if isinstance(obj, types.ListType) or isinstance(obj, types.TupleType): return obj.__class__(wraps(obj_) for obj_ in obj) if hasattr(obj.__class__, '_sa_class_manager'): try: return _wrappers[obj.__class__.__name__ + "Wrapper"](obj) except KeyError: return obj return obj def unwraps(obj): if isinstance(obj, types.ListType) or isinstance(obj, types.TupleType): return obj.__class__(unwraps(obj_) for obj_ in obj) if isinstance(obj, ModelWrapper): return obj.obj return obj _wrappers = {} class ModelWrapper(object): class __metaclass__(type): def __init__(cls, name, bases, nmspc): type.__init__(cls, name, bases, nmspc) # register wrappers _wrappers[cls.__name__] = cls # decorate wrapper's method: # # * convert result object(s) to wrapper(s) # * convert attribute error, otherwise the underlying object # will be searched, and finally make bizzare result for name, attr in cls.__dict__.items(): if isinstance(attr, property) and name not in {'obj'}: setattr(cls, name, property(fget=_FGet(attr), fset=attr.fset, fdel=attr.fdel)) elif inspect.ismethod(attr) and attr not in {'__getattr__', '__setattr__', '__unicode__'}: old = convert_attribute_error(getattr(cls, name)) setattr(cls, name, lambda wrapper, *args, **kwargs: wraps(old(wrapper, *args, **kwargs))) def __init__(self, obj): self.__obj = obj @property def obj(self): return self.__obj def __getattr__(self, name): attr = getattr(self.__obj, name) if isinstance(attr, types.ListType) or isinstance(attr, types.TupleType): return type(attr)(wraps(i) for i in attr) return wraps(attr) def __setattr__(self, key, value): # TODO when only key is defined in wrapped object if key != '_ModelWrapper__obj': self.__obj.__setattr__(key, value) else: self.__dict__[key] = value def __unicode__(self): return unicode(self.__obj) def __dir__(self): return self.__obj.__dict__.keys()
mit
353,604,400,444,146,900
29.875
76
0.507319
false
4.416781
false
false
false
flyingbanana1024102/transmission-line-simulator
src/views/materialwidget.py
1
2230
# # Transmission Line Simulator # # Author(s): Jiacong Xu # Created: Jun-28-2017 # from kivy.uix.widget import Widget from kivy.properties import * from kivy.clock import Clock from kivy.graphics.texture import Texture from kivy.graphics import * from PIL import Image, ImageDraw, ImageFilter class MaterialWidget(Widget): """ The basic UI element layout, automatically draws and updates its shadows. raised: whether this widget has an edge and shadow. """ keyShadowTexture = ObjectProperty(None) ambientShadowTexture = ObjectProperty(None) raised = BooleanProperty(True) clipSubviews = BooleanProperty(False) elevation = NumericProperty(2.0) backgroundColor = ListProperty([1, 1, 1, 1]) def __init__(self, **kwargs): super(MaterialWidget, self).__init__(**kwargs) def on_size(self, *args, **kwargs): self._updateShadow() def on_pos(self, *args, **kwargs): self._updateShadow() def on_elevation(self, *args, **kwargs): self._updateShadow() def _updateShadow(self): # Shadow 1 offset_y = self.elevation radius = self.elevation / 2.0 t1 = self._genShadow(self.size[0], self.size[1], radius, 0.26) self.keyShadowTexture = t1 # Shadow 2 radius = self.elevation t2 = self._genShadow(self.size[0], self.size[1], radius, 0.05) self.ambientShadowTexture = t2 def _genShadow(self, ow, oh, radius, alpha): # We need a bigger texture to correctly blur the edges w = ow + radius * 6.0 h = oh + radius * 6.0 w = int(w) h = int(h) texture = Texture.create(size=(w, h), colorfmt='rgba') im = Image.new('RGBA', (w, h), color=(1, 1, 1, 0)) draw = ImageDraw.Draw(im) # the rectangle to be rendered needs to be centered on the texture x0, y0 = (w - ow) / 2., (h - oh) / 2. x1, y1 = x0 + ow - 1, y0 + oh - 1 draw.rectangle((x0, y0, x1, y1), fill=(0, 0, 0, int(255 * alpha))) im = im.filter(ImageFilter.GaussianBlur(radius)) texture.blit_buffer(im.tobytes(), colorfmt='rgba', bufferfmt='ubyte') return texture
mit
-3,297,739,092,324,212,700
26.530864
77
0.604933
false
3.484375
false
false
false
davy39/eric
Plugins/VcsPlugins/vcsMercurial/HgImportDialog.py
1
3032
# -*- coding: utf-8 -*- # Copyright (c) 2011 - 2014 Detlev Offenbach <[email protected]> # """ Module implementing a dialog to enter data for the Mercurial import command. """ from __future__ import unicode_literals from PyQt5.QtCore import pyqtSlot, QDateTime from PyQt5.QtWidgets import QDialog, QDialogButtonBox from E5Gui import E5FileDialog from E5Gui.E5Completers import E5FileCompleter from .Ui_HgImportDialog import Ui_HgImportDialog import Utilities import UI.PixmapCache class HgImportDialog(QDialog, Ui_HgImportDialog): """ Class implementing a dialog to enter data for the Mercurial import command. """ def __init__(self, parent=None): """ Constructor @param parent reference to the parent widget (QWidget) """ super(HgImportDialog, self).__init__(parent) self.setupUi(self) self.patchFileButton.setIcon(UI.PixmapCache.getIcon("open.png")) self.buttonBox.button(QDialogButtonBox.Ok).setEnabled(False) self.__patchFileCompleter = E5FileCompleter(self.patchFileEdit) self.__initDateTime = QDateTime.currentDateTime() self.dateEdit.setDateTime(self.__initDateTime) def __updateOK(self): """ Private slot to update the OK button. """ enabled = True if self.patchFileEdit.text() == "": enabled = False self.buttonBox.button(QDialogButtonBox.Ok).setEnabled(enabled) @pyqtSlot(str) def on_patchFileEdit_textChanged(self, txt): """ Private slot to react on changes of the patch file edit. @param txt contents of the line edit (string) """ self.__updateOK() @pyqtSlot() def on_patchFileButton_clicked(self): """ Private slot called by pressing the file selection button. """ fn = E5FileDialog.getOpenFileName( self, self.tr("Select patch file"), self.patchFileEdit.text(), self.tr("Patch Files (*.diff *.patch);;All Files (*)")) if fn: self.patchFileEdit.setText(Utilities.toNativeSeparators(fn)) def getParameters(self): """ Public method to retrieve the import data. @return tuple naming the patch file, a flag indicating to not commit, a commit message, a commit date, a commit user, a strip count and a flag indicating to enforce the import (string, boolean, string, string, string, integer, boolean) """ if self.dateEdit.dateTime() != self.__initDateTime: date = self.dateEdit.dateTime().toString("yyyy-MM-dd hh:mm") else: date = "" return (self.patchFileEdit.text(), self.noCommitCheckBox.isChecked(), self.messageEdit.toPlainText(), date, self.userEdit.text(), self.stripSpinBox.value(), self.forceCheckBox.isChecked())
gpl-3.0
-8,518,808,736,762,301,000
30.915789
79
0.616755
false
4.211111
false
false
false
dpmehta02/linkedin-scrapy
linkedin/spiders/linkedin_spider.py
1
3589
from scrapy.contrib.spiders import CrawlSpider, Rule from scrapy.contrib.linkextractors.sgml import SgmlLinkExtractor from scrapy.selector import HtmlXPathSelector from scrapy.http import Request from linkedin.items import LinkedinItem class LinkedinSpider(CrawlSpider): """ Define the crawler's start URIs, set its follow rules, parse HTML and assign values to an item. Processing occurs in ../pipelines.py """ name = "linkedin" allowed_domains = ["linkedin.com"] # Uncomment the following lines for full spidering ''' centilist_one = (i for i in xrange(1,100)) centilist_two = (i for i in xrange(1,100)) centilist_three = (i for i in xrange(1,100)) start_urls = ["http://www.linkedin.com/directory/people-%s-%d-%d-%d" % (alphanum, num_one, num_two, num_three) for alphanum in "abcdefghijklmnopqrstuvwxyz" for num_one in centilist_one for num_two in centilist_two for num_three in centilist_three ] ''' # Temporary start_urls for testing; remove and use the above start_urls in production start_urls = ["http://www.linkedin.com/directory/people-a-23-23-2"] # TODO: allow /in/name urls too? rules = (Rule(SgmlLinkExtractor(allow=('\/pub\/.+')), callback='parse_item')) def parse_item(self, response): if response: hxs = HtmlXPathSelector(response) item = LinkedinItem() # TODO: is this the best way to check that we're scraping the right page? item['full_name'] = hxs.select('//*[@id="name"]/span/span/text()').extract() if not item['full_name']: # recursively parse list of duplicate profiles # NOTE: Results page only displays 25 of possibly many more names; # LinkedIn requests authentication to see the rest. Need to resolve # TODO: add error checking here to ensure I'm getting the right links # and links from "next>>" pages multi_profile_urls = hxs.select('//*[@id="result-set"]/li/h2/strong/ \ a/@href').extract() for profile_url in multi_profile_urls: yield Request(profile_url, callback=self.parse_item) else: item['first_name'], item['last_name'], item['full_name'], item['headline_title'], item['locality'], item['industry'], item['current_roles'] = item['full_name'][0], item['full_name'][1], hxs.select('//*[@id="name"]/span/span/text()').extract(), hxs.select('//*[@id="member-1"]/p/text()').extract(), hxs.select('//*[@id="headline"]/dd[1]/span/text()').extract(), hxs.select('//*[@id="headline"]/dd[2]/text()').extract(), hxs.select('//*[@id="overview"]/dd[1]/ul/li/text()').extract() # TODO: add metadata fields if hxs.select('//*[@id="overview"]/dt[2]/text()').extract() == [u' \n Education\n ']: item['education_institutions'] = hxs.select('//*[@id="overview"]/dd[2]/ul/li/text()').extract() print item else: print "Uh oh, no response." return
mit
-8,053,537,752,368,067,000
46.223684
115
0.528281
false
4.106407
false
false
false
blaze/dask
dask/dataframe/hyperloglog.py
3
2433
"""Implementation of HyperLogLog This implements the HyperLogLog algorithm for cardinality estimation, found in Philippe Flajolet, Éric Fusy, Olivier Gandouet and Frédéric Meunier. "HyperLogLog: the analysis of a near-optimal cardinality estimation algorithm". 2007 Conference on Analysis of Algorithms. Nice, France (2007) """ import numpy as np import pandas as pd from pandas.util import hash_pandas_object def compute_first_bit(a): "Compute the position of the first nonzero bit for each int in an array." # TODO: consider making this less memory-hungry bits = np.bitwise_and.outer(a, 1 << np.arange(32)) bits = bits.cumsum(axis=1).astype(bool) return 33 - bits.sum(axis=1) def compute_hll_array(obj, b): # b is the number of bits if not 8 <= b <= 16: raise ValueError("b should be between 8 and 16") num_bits_discarded = 32 - b m = 1 << b # Get an array of the hashes hashes = hash_pandas_object(obj, index=False) if isinstance(hashes, pd.Series): hashes = hashes._values hashes = hashes.astype(np.uint32) # Of the first b bits, which is the first nonzero? j = hashes >> num_bits_discarded first_bit = compute_first_bit(hashes) # Pandas can do the max aggregation df = pd.DataFrame({"j": j, "first_bit": first_bit}) series = df.groupby("j").max()["first_bit"] # Return a dense array so we can concat them and get a result # that is easy to deal with return series.reindex(np.arange(m), fill_value=0).values.astype(np.uint8) def reduce_state(Ms, b): m = 1 << b # We concatenated all of the states, now we need to get the max # value for each j in both Ms = Ms.reshape((len(Ms) // m), m) return Ms.max(axis=0) def estimate_count(Ms, b): m = 1 << b # Combine one last time M = reduce_state(Ms, b) # Estimate cardinality, no adjustments alpha = 0.7213 / (1 + 1.079 / m) E = alpha * m / (2.0 ** -(M.astype("f8"))).sum() * m # ^^^^ starts as unsigned, need a signed type for # negation operator to do something useful # Apply adjustments for small / big cardinalities, if applicable if E < 2.5 * m: V = (M == 0).sum() if V: return m * np.log(m / V) if E > 2 ** 32 / 30.0: return -(2 ** 32) * np.log1p(-E / 2 ** 32) return E
bsd-3-clause
-5,806,826,853,938,484,000
29.375
77
0.615638
false
3.375
false
false
false
ros/catkin
cmake/test/download_checkmd5.py
1
5773
from __future__ import print_function import errno import hashlib import os import sys try: from urllib.request import addinfourl, BaseHandler, build_opener, Request, URLError except ImportError: from urllib2 import addinfourl, BaseHandler, build_opener, Request, URLError from argparse import ArgumentParser NAME = 'download_checkmd5.py' class HTTPRangeHandler(BaseHandler): def http_error_206(self, req, fp, code, msg, hdrs): r = addinfourl(fp, hdrs, req.get_full_url()) r.code = code r.msg = msg return r def http_error_416(self, req, fp, code, msg, hdrs): raise URLError('Requested Range Not Satisfiable') def download_with_resume(uri, dest): handler = HTTPRangeHandler() opener = build_opener(handler) offset = 0 content_length = None accept_ranges = False while True: req = Request(uri) if offset: req.add_header('Range', 'bytes=%d-' % offset) src_file = None try: src_file = opener.open(req) headers = src_file.info() if not offset: # on first connection check server capabilities if 'Content-Length' in headers: content_length = int(headers['Content-Length']) if 'Accept-Ranges' in headers: accept_ranges = headers['Accept-Ranges'] != 'none' else: # on resume verify that server understood range header and responded accordingly if 'Content-Range' not in headers: raise IOError('Download aborted and server does not support resuming download') if int(headers['Content-Range'][len('bytes '):].split('-')[0]) != offset: raise IOError('Download aborted because server replied with different content range then requested') sys.stdout.write(' resume from %d...' % offset) sys.stdout.flush() with open(dest, 'ab' if offset else 'wb') as dst_file: progress = False while True: data = src_file.read(8192) if not data: break progress = True dst_file.write(data) offset += len(data) if not progress: # if no bytes have been received abort download raise IOError("No progress when trying to download '%s'" % uri) except Exception: if src_file: src_file.close() raise # when content length is unknown it is assumed that the download is complete if content_length is None: break # or when enough data has been downloaded (> is especially a valid case) if offset >= content_length: break if not accept_ranges: raise IOError('Server does not accept ranges to resume download') def download_md5(uri, dest): """Download file from uri to file dest.""" # Create intermediate directories as necessary, #2970 dirname = os.path.dirname(dest) if len(dirname): try: os.makedirs(dirname) except OSError as e: if e.errno != errno.EEXIST: raise sys.stdout.write('Downloading %s to %s...' % (uri, dest)) sys.stdout.flush() try: download_with_resume(uri, dest) sys.stdout.write(' done.\n') except Exception as e: # delete partially downloaded data if os.path.exists(dest): os.unlink(dest) sys.stdout.write(' failed (%s)!\n' % e) raise def checkmd5(dest, md5sum=None): """ Check file at dest against md5. :returns (boolean, hexdigest): True if dest contents matches md5sum """ if not os.path.exists(dest): return False, 'null' with open(dest, 'rb') as f: md5value = hashlib.md5() while True: buf = f.read(4096) if not buf: break md5value.update(buf) hexdigest = md5value.hexdigest() print('Checking md5sum on %s' % (dest)) return hexdigest == md5sum, hexdigest def main(argv=sys.argv[1:]): """Dowloads URI to file dest and checks md5 if given.""" parser = ArgumentParser(description='Dowloads URI to file dest. If md5sum is given, checks md5sum. If file existed and mismatch, downloads and checks again') parser.add_argument('uri') parser.add_argument('dest') parser.add_argument('md5sum', nargs='?') parser.add_argument('--ignore-error', action='store_true', help='Ignore download errors') args = parser.parse_args(argv) uri = args.uri if '://' not in uri: uri = 'file://' + uri fresh = False if not os.path.exists(args.dest): try: download_md5(uri, args.dest) except Exception: if args.ignore_error: return 0 raise fresh = True if args.md5sum: result, hexdigest = checkmd5(args.dest, args.md5sum) if result is False and fresh is False: print('WARNING: md5sum mismatch (%s != %s); re-downloading file %s' % (hexdigest, args.md5sum, args.dest)) os.remove(args.dest) try: download_md5(uri, args.dest) except Exception: if args.ignore_error: return 0 raise result, hexdigest = checkmd5(args.dest, args.md5sum) if result is False: return 'ERROR: md5sum mismatch (%s != %s) on %s; aborting' % (hexdigest, args.md5sum, args.dest) return 0 if __name__ == '__main__': sys.exit(main())
bsd-3-clause
-8,443,107,470,869,263,000
32.760234
161
0.57076
false
4.183333
false
false
false
pfmoore/invoke
invoke/parser/context.py
1
9145
import itertools from ..vendor.lexicon import Lexicon from .argument import Argument def translate_underscores(name): return name.lstrip('_').rstrip('_').replace('_', '-') def to_flag(name): name = translate_underscores(name) if len(name) == 1: return '-' + name return '--' + name def sort_candidate(arg): names = arg.names # TODO: is there no "split into two buckets on predicate" builtin? shorts = set(x for x in names if len(x.strip('-')) == 1) longs = set(x for x in names if x not in shorts) return sorted(shorts if shorts else longs)[0] def flag_key(x): """ Obtain useful key list-of-ints for sorting CLI flags. """ # Setup ret = [] x = sort_candidate(x) # Long-style flags win over short-style ones, so the first item of # comparison is simply whether the flag is a single character long (with # non-length-1 flags coming "first" [lower number]) ret.append(1 if len(x) == 1 else 0) # Next item of comparison is simply the strings themselves, # case-insensitive. They will compare alphabetically if compared at this # stage. ret.append(x.lower()) # Finally, if the case-insensitive test also matched, compare # case-sensitive, but inverse (with lowercase letters coming first) inversed = '' for char in x: inversed += char.lower() if char.isupper() else char.upper() ret.append(inversed) return ret # Named slightly more verbose so Sphinx references can be unambiguous. # Got real sick of fully qualified paths. class ParserContext(object): """ Parsing context with knowledge of flags & their format. Generally associated with the core program or a task. When run through a parser, will also hold runtime values filled in by the parser. """ def __init__(self, name=None, aliases=(), args=()): """ Create a new ``ParserContext`` named ``name``, with ``aliases``. ``name`` is optional, and should be a string if given. It's used to tell ParserContext objects apart, and for use in a Parser when determining what chunk of input might belong to a given ParserContext. ``aliases`` is also optional and should be an iterable containing strings. Parsing will honor any aliases when trying to "find" a given context in its input. May give one or more ``args``, which is a quick alternative to calling ``for arg in args: self.add_arg(arg)`` after initialization. """ self.args = Lexicon() self.positional_args = [] self.flags = Lexicon() self.inverse_flags = {} # No need for Lexicon here self.name = name self.aliases = aliases for arg in args: self.add_arg(arg) def __str__(self): aliases = "" if self.aliases: aliases = " ({0})".format(', '.join(self.aliases)) name = (" {0!r}{1}".format(self.name, aliases)) if self.name else "" args = (": {0!r}".format(self.args)) if self.args else "" return "<parser/Context{0}{1}>".format(name, args) def __repr__(self): return str(self) def add_arg(self, *args, **kwargs): """ Adds given ``Argument`` (or constructor args for one) to this context. The Argument in question is added to the following dict attributes: * ``args``: "normal" access, i.e. the given names are directly exposed as keys. * ``flags``: "flaglike" access, i.e. the given names are translated into CLI flags, e.g. ``"foo"`` is accessible via ``flags['--foo']``. * ``inverse_flags``: similar to ``flags`` but containing only the "inverse" versions of boolean flags which default to True. This allows the parser to track e.g. ``--no-myflag`` and turn it into a False value for the ``myflag`` Argument. """ # Normalize if len(args) == 1 and isinstance(args[0], Argument): arg = args[0] else: arg = Argument(*args, **kwargs) # Uniqueness constraint: no name collisions for name in arg.names: if name in self.args: msg = "Tried to add an argument named {0!r} but one already exists!" # noqa raise ValueError(msg.format(name)) # First name used as "main" name for purposes of aliasing main = arg.names[0] # NOT arg.name self.args[main] = arg # Note positionals in distinct, ordered list attribute if arg.positional: self.positional_args.append(arg) # Add names & nicknames to flags, args self.flags[to_flag(main)] = arg for name in arg.nicknames: self.args.alias(name, to=main) self.flags.alias(to_flag(name), to=to_flag(main)) # Add attr_name to args, but not flags if arg.attr_name: self.args.alias(arg.attr_name, to=main) # Add to inverse_flags if required if arg.kind == bool and arg.default is True: # Invert the 'main' flag name here, which will be a dashed version # of the primary argument name if underscore-to-dash transformation # occurred. inverse_name = to_flag("no-{0}".format(main)) self.inverse_flags[inverse_name] = to_flag(main) @property def needs_positional_arg(self): return any(x.value is None for x in self.positional_args) @property def as_kwargs(self): """ This context's arguments' values keyed by their ``.name`` attribute. Results in a dict suitable for use in Python contexts, where e.g. an arg named ``foo-bar`` becomes accessible as ``foo_bar``. """ ret = {} for arg in self.args.values(): ret[arg.name] = arg.value return ret def names_for(self, flag): # TODO: should probably be a method on Lexicon/AliasDict return list(set([flag] + self.flags.aliases_of(flag))) def help_for(self, flag): """ Return 2-tuple of ``(flag-spec, help-string)`` for given ``flag``. """ # Obtain arg obj if flag not in self.flags: err = "{0!r} is not a valid flag for this context! Valid flags are: {1!r}" # noqa raise ValueError(err.format(flag, self.flags.keys())) arg = self.flags[flag] # Determine expected value type, if any value = { str: 'STRING', }.get(arg.kind) # Format & go full_names = [] for name in self.names_for(flag): if value: # Short flags are -f VAL, long are --foo=VAL # When optional, also, -f [VAL] and --foo[=VAL] if len(name.strip('-')) == 1: value_ = ("[{0}]".format(value)) if arg.optional else value valuestr = " {0}".format(value_) else: valuestr = "={0}".format(value) if arg.optional: valuestr = "[{0}]".format(valuestr) else: # no value => boolean # check for inverse if name in self.inverse_flags.values(): name = "--[no-]{0}".format(name[2:]) valuestr = "" # Tack together full_names.append(name + valuestr) namestr = ", ".join(sorted(full_names, key=len)) helpstr = arg.help or "" return namestr, helpstr def help_tuples(self): """ Return sorted iterable of help tuples for all member Arguments. Sorts like so: * General sort is alphanumerically * Short flags win over long flags * Arguments with *only* long flags and *no* short flags will come first. * When an Argument has multiple long or short flags, it will sort using the most favorable (lowest alphabetically) candidate. This will result in a help list like so:: --alpha, --zeta # 'alpha' wins --beta -a, --query # short flag wins -b, --argh -c """ # TODO: argument/flag API must change :( # having to call to_flag on 1st name of an Argument is just dumb. # To pass in an Argument object to help_for may require moderate # changes? # Cast to list to ensure non-generator on Python 3. return list(map( lambda x: self.help_for(to_flag(x.name)), sorted(self.flags.values(), key=flag_key) )) def flag_names(self): """ Similar to `help_tuples` but returns flag names only, no helpstrs. Specifically, all flag names, flattened, in rough order. """ # Regular flag names flags = sorted(self.flags.values(), key=flag_key) names = [self.names_for(to_flag(x.name)) for x in flags] # Inverse flag names sold separately names.append(self.inverse_flags.keys()) return tuple(itertools.chain.from_iterable(names))
bsd-2-clause
-6,370,345,778,070,750,000
36.633745
93
0.580208
false
4.06806
false
false
false
facebook/fbthrift
thrift/lib/py/Thrift.py
1
11042
# Copyright (c) Facebook, Inc. and its affiliates. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals import logging import six import sys import threading UEXW_MAX_LENGTH = 1024 class TType: STOP = 0 VOID = 1 BOOL = 2 BYTE = 3 I08 = 3 DOUBLE = 4 I16 = 6 I32 = 8 I64 = 10 STRING = 11 UTF7 = 11 STRUCT = 12 MAP = 13 SET = 14 LIST = 15 UTF8 = 16 UTF16 = 17 FLOAT = 19 class TMessageType: CALL = 1 REPLY = 2 EXCEPTION = 3 ONEWAY = 4 class TPriority: """ apache::thrift::concurrency::PRIORITY """ HIGH_IMPORTANT = 0 HIGH = 1 IMPORTANT = 2 NORMAL = 3 BEST_EFFORT = 4 N_PRIORITIES = 5 class TRequestContext: def __init__(self): self._headers = None def getHeaders(self): return self._headers def setHeaders(self, headers): self._headers = headers class TProcessorEventHandler: """Event handler for thrift processors""" # TODO: implement asyncComplete for Twisted def getHandlerContext(self, fn_name, server_context): """Called at the start of processing a handler method""" return None def preRead(self, handler_context, fn_name, args): """Called before the handler method's argument are read""" pass def postRead(self, handler_context, fn_name, args): """Called after the handler method's argument are read""" pass def preWrite(self, handler_context, fn_name, result): """Called before the handler method's results are written""" pass def postWrite(self, handler_context, fn_name, result): """Called after the handler method's results are written""" pass def handlerException(self, handler_context, fn_name, exception): """Called if (and only if) the handler threw an expected exception.""" pass def handlerError(self, handler_context, fn_name, exception): """Called if (and only if) the handler threw an unexpected exception. Note that this method is NOT called if the handler threw an exception that is declared in the thrift service specification""" logging.exception("Unexpected error in service handler " + fn_name + ":") class TServerInterface: def __init__(self): self._tl_request_context = threading.local() def setRequestContext(self, request_context): self._tl_request_context.ctx = request_context def getRequestContext(self): return self._tl_request_context.ctx class TProcessor: """Base class for processor, which works on two streams.""" def __init__(self): self._event_handler = TProcessorEventHandler() # null object handler self._handler = None self._processMap = {} self._priorityMap = {} def setEventHandler(self, event_handler): self._event_handler = event_handler def getEventHandler(self): return self._event_handler def process(self, iprot, oprot, server_context=None): pass def onewayMethods(self): return () def readMessageBegin(self, iprot): name, _, seqid = iprot.readMessageBegin() if six.PY3: name = name.decode('utf8') return name, seqid def skipMessageStruct(self, iprot): iprot.skip(TType.STRUCT) iprot.readMessageEnd() def doesKnowFunction(self, name): return name in self._processMap def callFunction(self, name, seqid, iprot, oprot, server_ctx): process_fn = self._processMap[name] return process_fn(self, seqid, iprot, oprot, server_ctx) def readArgs(self, iprot, handler_ctx, fn_name, argtype): args = argtype() self._event_handler.preRead(handler_ctx, fn_name, args) args.read(iprot) iprot.readMessageEnd() self._event_handler.postRead(handler_ctx, fn_name, args) return args def writeException(self, oprot, name, seqid, exc): oprot.writeMessageBegin(name, TMessageType.EXCEPTION, seqid) exc.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() def get_priority(self, fname): return self._priorityMap.get(fname, TPriority.NORMAL) def _getReplyType(self, result): if isinstance(result, TApplicationException): return TMessageType.EXCEPTION return TMessageType.REPLY @staticmethod def _get_exception_from_thrift_result(result): """Returns the wrapped exception, if pressent. None if not. result is a generated *_result object. This object either has a 'success' field set indicating the call succeeded, or a field set indicating the exception thrown. """ fields = ( result.__dict__.keys() if hasattr(result, "__dict__") else result.__slots__ ) for field in fields: value = getattr(result, field) if value is None: continue elif field == 'success': return None else: return value return None def writeReply(self, oprot, handler_ctx, fn_name, seqid, result, server_ctx=None): self._event_handler.preWrite(handler_ctx, fn_name, result) reply_type = self._getReplyType(result) if server_ctx is not None and hasattr(server_ctx, 'context_data'): ex = (result if reply_type == TMessageType.EXCEPTION else self._get_exception_from_thrift_result(result)) if ex: server_ctx.context_data.setHeaderEx(ex.__class__.__name__) server_ctx.context_data.setHeaderExWhat(str(ex)[:UEXW_MAX_LENGTH]) try: oprot.writeMessageBegin(fn_name, reply_type, seqid) result.write(oprot) oprot.writeMessageEnd() oprot.trans.flush() except Exception as e: # Handle any thrift serialization exceptions # Transport is likely in a messed up state. Some data may already have # been written and it may not be possible to recover. Doing nothing # causes the client to wait until the request times out. Try to # close the connection to trigger a quicker failure on client side oprot.trans.close() # Let application know that there has been an exception self._event_handler.handlerError(handler_ctx, fn_name, e) # We raise the exception again to avoid any further processing raise finally: # Since we called preWrite, we should also call postWrite to # allow application to properly log their requests. self._event_handler.postWrite(handler_ctx, fn_name, result) class TException(Exception): """Base class for all thrift exceptions.""" # BaseException.message is deprecated in Python v[2.6,3.0) if (2, 6, 0) <= sys.version_info < (3, 0): def _get_message(self): return self._message def _set_message(self, message): self._message = message message = property(_get_message, _set_message) def __init__(self, message=None): Exception.__init__(self, message) self.message = message class TApplicationException(TException): """Application level thrift exceptions.""" UNKNOWN = 0 UNKNOWN_METHOD = 1 INVALID_MESSAGE_TYPE = 2 WRONG_METHOD_NAME = 3 BAD_SEQUENCE_ID = 4 MISSING_RESULT = 5 INTERNAL_ERROR = 6 PROTOCOL_ERROR = 7 INVALID_TRANSFORM = 8 INVALID_PROTOCOL = 9 UNSUPPORTED_CLIENT_TYPE = 10 LOADSHEDDING = 11 TIMEOUT = 12 INJECTED_FAILURE = 13 EXTYPE_TO_STRING = { UNKNOWN_METHOD: 'Unknown method', INVALID_MESSAGE_TYPE: 'Invalid message type', WRONG_METHOD_NAME: 'Wrong method name', BAD_SEQUENCE_ID: 'Bad sequence ID', MISSING_RESULT: 'Missing result', INTERNAL_ERROR: 'Internal error', PROTOCOL_ERROR: 'Protocol error', INVALID_TRANSFORM: 'Invalid transform', INVALID_PROTOCOL: 'Invalid protocol', UNSUPPORTED_CLIENT_TYPE: 'Unsupported client type', LOADSHEDDING: 'Loadshedding request', TIMEOUT: 'Task timeout', INJECTED_FAILURE: 'Injected Failure', } def __init__(self, type=UNKNOWN, message=None): TException.__init__(self, message) self.type = type def __str__(self): if self.message: return self.message else: return self.EXTYPE_TO_STRING.get( self.type, 'Default (unknown) TApplicationException') def read(self, iprot): iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: message = iprot.readString() if sys.version_info.major >= 3 and isinstance(message, bytes): try: message = message.decode('utf-8') except UnicodeDecodeError: pass self.message = message else: iprot.skip(ftype) elif fid == 2: if ftype == TType.I32: self.type = iprot.readI32() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): oprot.writeStructBegin(b'TApplicationException') if self.message is not None: oprot.writeFieldBegin(b'message', TType.STRING, 1) oprot.writeString(self.message.encode('utf-8') if not isinstance(self.message, bytes) else self.message) oprot.writeFieldEnd() if self.type is not None: oprot.writeFieldBegin(b'type', TType.I32, 2) oprot.writeI32(self.type) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() class UnimplementedTypedef: pass
apache-2.0
8,498,865,374,388,009,000
29.929972
86
0.602427
false
4.366153
false
false
false
yavuzovski/playground
python/Udacity/cs215/find_eulerian_tour.py
1
1219
def find_eulerian_tour(graph): # find the node with biggest degree biggest_degree, biggest_node = 0, None for i, node in enumerate(graph): for e in node: count = 0 outer_graph = graph[:] for inner_node in outer_graph: if e in inner_node: count += 1 if count > biggest_degree: biggest_degree = count biggest_node = e # set the starting point result = [] for i, node in enumerate(graph): if biggest_node == node[0]: result = [node[0], node[1]] current_node = node[1] graph.pop(i) break # find the eulerian tour i = 0 while i < len(graph): if current_node == graph[i][0] or current_node == graph[i][1]: current_node = (graph[i][1] if current_node == graph[i][0] else graph[i][0]) result.append(current_node) graph.pop(i) i = 0 else: i += 1 return result print(find_eulerian_tour( [ (0, 1), (1, 5), (1, 7), (4, 5), (4, 8), (1, 6), (3, 7), (5, 9), (2, 4), (0, 4), (2, 5), (3, 6), (8, 9) ] ))
gpl-3.0
1,573,255,959,492,710,100
27.348837
88
0.464315
false
3.405028
false
false
false
diplomacy/research
diplomacy_research/models/layers/noisy_networks.py
1
4039
# ============================================================================== # Copyright 2019 - Philip Paquette # # NOTICE: Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the "Software"), # to deal in the Software without restriction, including without limitation the # rights to use, copy, modify, merge, publish, distribute, sublicense, and/or # sell copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # ============================================================================== """ Noisy Networks - Converts variables in a graph to their noisy equivalent """ from math import sqrt import sys assert 'tensorflow' in sys.modules, 'You need to import TF before importing this module.' from diplomacy_research.utils.tensorflow import tf from diplomacy_research.utils.tensorflow import graph_editor def convert_to_noisy_variables(variables, activation=None): """ Converts a list of variables to noisy variables :param variables: A list of variables to make noisy :param activation: Optional. The activation function to use on the linear noisy transformation :return: Nothing, but modifies the graph in-place Reference: 1706.10295 - Noisy Networks for exploration """ if tf.get_collection(tf.GraphKeys.TRAIN_OP): raise RuntimeError('You must call convert_to_noisy_variables before applying an optimizer on the graph.') graph = tf.get_default_graph() if not isinstance(variables, list): variables = list(variables) # Replacing each variable for variable in variables: variable_read_op = _get_variable_read_op(variable, graph) variable_outputs = _get_variable_outputs(variable_read_op, graph) variable_scope = variable.name.split(':')[0] variable_shape = variable.shape.as_list() fan_in = variable_shape[0] # Creating noisy variables with tf.variable_scope(variable_scope + '_noisy'): with tf.device(variable.device): s_init = tf.constant_initializer(0.5 / sqrt(fan_in)) noisy_u = tf.identity(variable, name='mu') noisy_s = tf.get_variable(name='sigma', shape=variable.shape, dtype=tf.float32, initializer=s_init, caching_device=variable._caching_device) # pylint: disable=protected-access noise = tf.random.normal(shape=variable_shape) replaced_var = noisy_u + noisy_s * noise replaced_var = activation(replaced_var) if activation else replaced_var # Replacing in-place inputs_index = [var_index for var_index, var_input in enumerate(graph_editor.sgv(*variable_outputs).inputs) if var_input.name.split(':')[0] == variable_read_op.name.split(':')[0]] graph_editor.connect(graph_editor.sgv(replaced_var.op), graph_editor.sgv(*variable_outputs).remap_inputs(inputs_index), disconnect_first=True) def _get_variable_read_op(variable, graph): """ Returns the /read operation for a variable """ return graph.get_operation_by_name(variable.name.split(':')[0] + '/read') def _get_variable_outputs(variable_read_op, graph): """ Returns the list of tensors that have the variable as input """ outputs = [] for graph_op in graph.get_operations(): for var_input in graph_op.inputs._inputs: # pylint: disable=protected-access if var_input in variable_read_op.outputs: outputs += [graph_op] return outputs
mit
3,073,660,035,282,352,000
48.256098
122
0.615499
false
4.428728
false
false
false
plang85/rough_surfaces
rough_surfaces/surface.py
1
2746
import numpy as np class Surface(np.ndarray): """ One- or two-dimensional surface height representation. The assumption upon which this framework is based is a uniform lattice size in both directions. This is tightly integrated here. 'Surface' is the fundamental class that most modules build upon. It usually represents the model or computational domain, as it may discretize either, individual and composite surfaces, i.e., rough surfaces and aperture fields. Standard initialization is from two-dimensional ndarray and lattice size: >>> import numpy as np >>> N, dxy = 100, 0.1 >>> h = np.zeros((N,N)) >>> s = Surface(h, dxy) >>> length(s) # egde length in x-direction 10.0 >>> length(s, 1) # egde length in y-direction 10.0 Surfaces can also be one-dimensional, e.g., represent traces or cross-sections: >>> import numpy as np >>> N, dxy = 100, 0.1 >>> h = np.zeros((N)) >>> s = Surface(h, dxy) >>> length(s) # length 10.0 >>> length(s, 1) # there is no second axis for one-dimensional surfaces Traceback (most recent call last): ... IndexError: tuple index out of range """ def __new__(cls, input_array, dxy): obj = np.asarray(input_array).view(cls) obj.dxy = float(dxy) return obj def __array_finalize__(self, obj): if obj is None: self.dxy = getattr(obj, 'dxy', None) def rms(surface): """"Returns root-mean-square roughness [L].""" return np.sqrt(np.mean(surface**2)) def length(surface, axis=0): """"Returns length [L] of surface in x- or y-direction, for axis=0 and 1, respectively.""" return surface.shape[axis] * surface.dxy def nominal_area(surface): """"Returns length() [L] for 1D, area [L^2] for 2D.""" a = 1.0 for i in range(len(surface.shape)): a *= length(surface) return a def shift_to_zero_mean(surface): """"Returns shifted surface such that <h> = 0.""" return Surface(surface - np.mean(surface), surface.dxy) def mean_aperture(surface): """"Composite surface assumption: mean of difference field to highest point.""" return np.mean(np.abs(np.subtract(surface, np.max(surface)))) def pore_volume(surface): """"Composite surface assumption: mean aperture times area (2D-->[L^3]) or length (1D-->[L^2]).""" return mean_aperture(surface) * nominal_area(surface) def scale_to_rms(surface, rms_target): """ Scales height to fit target property, which must be name of scalar returning method. """ rms_current = rms(surface) return Surface(surface * (rms_target / rms_current), surface.dxy) if __name__ == '__main__': import doctest doctest.testmod()
mit
1,258,035,149,484,728,000
29.853933
102
0.639476
false
3.547804
false
false
false
cleemesser/eeg-hdfstorage
scripts/edf2eeghdf.py
1
51534
# -*- coding: utf-8 -*- from __future__ import division, absolute_import, print_function # py2.6 with_statement import sys import pprint import h5py import numpy as np import os.path # date related stuff import datetime import dateutil import dateutil.tz import dateutil.parser import arrow # compatibility import future from future.utils import iteritems from builtins import range # range and switch xrange -> range # from past.builtins import xrange # later, move to from builtins import import edflib import eeghdf # really need to check the original data type and then save as that datatype along with the necessary conversion factors # so can convert voltages on own # try with float32 instead? # LPCH often uses these labels for electrodes LPCH_COMMON_1020_LABELS = [ 'Fp1', 'Fp2', 'F3', 'F4', 'C3', 'C4', 'P3', 'P4', 'O1', 'O2', 'F7', 'F8', 'T3', 'T4', 'T5', 'T6', 'Fz', 'Cz', 'Pz', 'E', 'PG1', 'PG2', 'A1', 'A2', 'T1', 'T2', 'X1', 'X2', 'X3', 'X4', 'X5', 'X6', 'X7', 'EEG Mark1', 'EEG Mark2', 'Events/Markers'] # common 10-20 extended clinical (T1/T2 instead of FT9/FT10) # will need to specify these as bytes I suppose (or is this ok in utf-8 given the ascii basis) # keys should be all one case (say upper) lpch2edf_fixed_len_labels = dict( FP1='EEG Fp1 ', F7='EEG F7 ', T3='EEG T3 ', T5='EEG T5 ', O1='EEG O1 ', F3='EEG F3 ', C3='EEG C3 ', P3='EEG P3 ', FP2='EEG Fp2 ', F8='EEG F8 ', T4='EEG T4 ', T6='EEG T6 ', O2='EEG O2 ', F4='EEG F4 ', C4='EEG C4 ', P4='EEG P4 ', CZ='EEG Cz ', FZ='EEG Fz ', PZ='EEG Pz ', T1='EEG FT9 ', # maybe I should map this to FT9/T1 T2='EEG FT10 ', # maybe I should map this to FT10/T2 A1='EEG A1 ', A2='EEG A2 ', # these are often (?always) EKG at LPCH, note edfspec says use ECG instead # of EKG X1='ECG X1 ', # is this invariant? usually referenced to A1 # this is sometimes ECG but not usually (depends on how squirmy) X2='X2 ', PG1='EEG Pg1 ', PG2='EEG Pg2 ', # now the uncommon ones NZ='EEG Nz ', FPZ='EEG Fpz ', AF7='EEG AF7 ', AF8='EEG AF8 ', AF3='EEG AF3 ', AFz='EEG AFz ', AF4='EEG AF4 ', F9='EEG F9 ', # F7 F5='EEG F5 ', # F3 ='EEG F3 ', F1='EEG F1 ', # Fz F2='EEG F2 ', # F4 F6='EEG F6 ', # F8 F10='EEG F10 ', FT9='EEG FT9 ', FT7='EEG FT7 ', FC5='EEG FC5 ', FC3='EEG FC3 ', FC1='EEG FC1 ', FCz='EEG FCz ', FC2='EEG FC2 ', FC4='EEG FC4 ', FC6='EEG FC6 ', FT8='EEG FT8 ', FT10='EEG FT10 ', T9='EEG T9 ', T7='EEG T7 ', C5='EEG C5 ', # C3 above C1='EEG C1 ', # Cz above C2='EEG C2 ', # C4 ='EEG C4 ', C6='EEG C6 ', T8='EEG T8 ', T10='EEG T10 ', # A2 # T3 # T4 # T5 # T6 TP9='EEG TP9 ', TP7='EEG TP7 ', CP5='EEG CP5 ', CP3='EEG CP3 ', CP1='EEG CP1 ', CPZ='EEG CPz ', CP2='EEG CP2 ', CP4='EEG CP4 ', CP6='EEG CP6 ', TP8='EEG TP8 ', TP10='EEG TP10 ', P9='EEG P9 ', P7='EEG P7 ', P5='EEG P5 ', # P3 P1='EEG P1 ', # Pz P2='EEG P2 ', # P4 P6='EEG P6 ', P8='EEG P8 ', P10='EEG P10 ', PO7='EEG PO7 ', PO3='EEG PO3 ', POZ='EEG POz ', PO4='EEG PO4 ', PO8='EEG PO8 ', # O1 OZ='EEG Oz ', # O2 IZ='EEG Iz ', ) lpch2edf_fixed_len_labels # print("lpch2edf_fixed_len_labels::\n") # pprint.pprint(lpch2edf_fixed_len_labels) LPCH_TO_STD_LABELS_STRIP = {k: v.strip() for k, v in iteritems(lpch2edf_fixed_len_labels)} # print('LPCH_TO_STD_LABELS_STRIP::\n') # pprint.pprint(LPCH_TO_STD_LABELS_STRIP) LPCH_COMMON_1020_LABELS_to_EDF_STANDARD = { } def normalize_lpch_signal_label(label): uplabel = label.upper() if uplabel in LPCH_TO_STD_LABELS_STRIP: return LPCH_TO_STD_LABELS_STRIP[uplabel] else: return label def edf2h5_float32(fn, outfn='', hdf_dir='', anonymous=False): """ convert an edf file to hdf5 using a straighforward mapping convert to real-valued signals store as float32's justing getting started here --- metadata --- number_signals sample_frequency nsamples age signal_labels Post Menstrual Age """ if not outfn: base = os.path.basename(fn) base, ext = os.path.splitext(base) base = base + '.eeghdf5' outfn = os.path.join(hdf_dir, base) print('outfn:', outfn) # outfn = fn+'.eeg.h5' with edflib.EdfReader(fn) as ef: nsigs = ef.signals_in_file # again know/assume that this is uniform sampling across signals fs = [ef.samplefrequency(ii) for ii in range(nsigs)] fs0 = fs[0] if any([ fs0 != xx for xx in fs]): print("caught multiple sampling frquencies in edf files!!!") sys.exit(0) nsamples0 = ef.samples_in_file(0) print('nsigs=%s, fs0=%s, nsamples0=%s' % (nsigs, fs0, nsamples0)) # create file 'w-' -> fail if exists , w -> truncate if exists hdf = h5py.File(outfn, 'w') # use compression? yes! give it a try eegdata = hdf.create_dataset('eeg', (nsigs, nsamples0), dtype='float32', # chunks=(nsigs,fs0), chunks=True, fletcher32=True, # compression='gzip', # compression='lzf', # maxshape=(256,None) ) # no compression -> 50 MiB can view eegdata in vitables # compression='gzip' -> 27 MiB slower # compression='lzf' -> 35 MiB # compression='lzf' maxshape=(256,None) -> 36MiB # szip is unavailable patient = hdf.create_group('patient') # add meta data hdf.attrs['number_signals'] = nsigs hdf.attrs['sample_frequency'] = fs0 hdf.attrs['nsamples0'] = nsamples0 patient.attrs['gender_b'] = ef.gender_b patient.attrs['patientname'] = ef.patient_name # PHI print('birthdate: %s' % ef.birthdate_b, type(ef.birthdate_b)) # this is a string -> date (datetime) if not ef.birthdate_b: print("no birthday in this file") birthdate = None else: birthdate = dateutil.parser.parse(ef.birthdate_b) print('birthdate (date object):', birthdate_b) start_date_time = datetime.datetime( ef.startdate_year, ef.startdate_month, ef.startdate_day, ef.starttime_hour, ef.starttime_minute, ef.starttime_second) # ,tzinfo=dateutil.tz.tzlocal()) print(start_date_time) if start_date_time and birthdate: age = start_date_time - birthdate print('age:', age) else: age = None if age: patient.attrs['post_natal_age_days'] = age.days else: patient.attrs['post_natal_age_days'] = -1 # now start storing the lists of things: labels, units... # nsigs = len(label_list) # variable ascii string (or b'' type) str_dt = h5py.special_dtype(vlen=str) label_ds = hdf.create_dataset('signal_labels', (nsigs,), dtype=str_dt) units_ds = hdf.create_dataset('signal_units', (nsigs,), dtype=str_dt) labels = [] units = list() # signal_nsamples = [] for ii in range(nsigs): labels.append(ef.signal_label(ii)) units.append(ef.physical_dimension(ii)) # self.signal_nsamples.append(self.cedf.samples_in_file(ii)) # self.samplefreqs.append(self.cedf.samplefrequency(ii)) # eegdata.signal_labels = labels # labels are fixed length strings labels_strip = [ss.strip() for ss in labels] label_ds[:] = labels_strip units_ds[:] = units # should be more and a switch for anonymous or not # need to change this to nchunks = int(nsamples0 // fs0) samples_per_chunk = int(fs0) buf = np.zeros((nsigs, samples_per_chunk), dtype='float64') # buffer is float64_t print('nchunks: ', nchunks, 'samples_per_chunk:', samples_per_chunk) bookmark = 0 # mark where were are in samples for ii in range(nchunks): for jj in range(nsigs): # readsignal(self, signalnum, start, n, # np.ndarray[np.float64_t, ndim = 1] sigbuf) # read_phys_signal(chn, 0, nsamples[chn], v) #read_phys_signal(self, signalnum, start, n, np.ndarray[np.float64_t, ndim=1] sigbuf) print(ii,jj) ef.read_phys_signal(jj, bookmark, samples_per_chunk, buf[jj]) # readsignal converts into float # conversion from float64 to float32 eegdata[:, bookmark:bookmark + samples_per_chunk] = buf # bookmark should be ii*fs0 bookmark += samples_per_chunk left_over_samples = nsamples0 - nchunks * samples_per_chunk print('left_over_samples:', left_over_samples) if left_over_samples > 0: for jj in range(nsigs): ef.read_phys_signal(jj, bookmark, left_over_samples, buf[jj]) eegdata[:, bookmark:bookmark + left_over_samples] = buf[:, 0:left_over_samples] hdf.close() def edf_block_iter_generator( edf_file, nsamples, samples_per_chunk, dtype='int32'): """ factory to produce generators for iterating through an edf file and filling up an array from the edf with the signal data starting at 0. You choose the number of @samples_per_chunk, and number of samples to do in total @nsamples as well as the dtype. 'int16' is reasonable as well 'int32' will handle everything though it yields -> (numpy_buffer, mark, num) numpy_buffer, mark, which is where in the file in total currently reading from num -- which is the number of samples in the buffer (per signal) to transfer """ nchan = edf_file.signals_in_file # 'int32' will work for int16 as well buf = np.zeros((nchan, samples_per_chunk), dtype=dtype) nchunks = nsamples // samples_per_chunk left_over_samples = nsamples - nchunks * samples_per_chunk mark = 0 for ii in range(nchunks): for cc in range(nchan): edf_file.read_digital_signal(cc, mark, samples_per_chunk, buf[cc]) yield (buf, mark, samples_per_chunk) mark += samples_per_chunk # print('mark:', mark) # left overs if left_over_samples > 0: for cc in range(nchan): edf_file.read_digital_signal(cc, mark, left_over_samples, buf[cc]) yield (buf[:, 0:left_over_samples], mark, left_over_samples) def dig2phys(eeghdf, start, end, chstart, chend): # edfhdr->edfparam[i].bitvalue = (edfhdr->edfparam[i].phys_max - edfhdr->edfparam[i].phys_min) / (edfhdr->edfparam[i].dig_max - edfhdr->edfparam[i].dig_min); # edfhdr->edfparam[i].offset = edfhdr->edfparam[i].phys_max / # edfhdr->edfparam[i].bitvalue - edfhdr->edfparam[i].dig_max; dmins = eeghdf['signal_digital_mins'][:] dmaxs = eeghdf['signal_digital_maxs'][:] phys_maxs = eeghdf['signal_physical_maxs'][:] phys_mins = eeghdf['signal_physical_mins'][:] print('dmaxs:', repr(dmaxs)) print('dmins:', repr(dmins)) print('dmaxs[:] - dmins[:]', dmaxs - dmins) print('phys_maxs', phys_maxs) print('phys_mins', phys_mins) bitvalues = (phys_maxs - phys_mins) / (dmaxs - dmins) offsets = phys_maxs / bitvalues - dmaxs print('bitvalues, offsets:', bitvalues, offsets) print('now change their shape to column vectors') for arr in (bitvalues, offsets): if len(arr.shape) != 1: print('logical errror %s shape is unexpected' % arr.shape) raise Exception s = arr.shape arr.shape = (s[0], 1) print('bitvalues, offsets:', bitvalues, offsets) # buf[i] = phys_bitvalue * (phys_offset + (double)var.two_signed[0]); dig_signal = eeghdf['signals'][chstart:chend, start:end] # signal = bitvalues[chstart:chend] *(dig_signal[chstart:chend,:] + offsets[chstart:chend]) phys_signals = (dig_signal[:, start:end] + offsets) * bitvalues # return signal, bitvalues, offsets return phys_signals # TODO: create edf -> hdf version 1000 # hdf -> edf for hdf version 1000 # tests to verify that round trip is lossless # [] writing encoding of MRN # [] and entry of mapped pt_code into database coe def edf2hdf_oldhack(fn, outfn='', hdf_dir='', anonymous=False): """ convert an edf file to hdf5 using a straighforward mapping justing getting started here --- metadata --- number_signals sample_frequency nsamples age signal_labels Post Menstrual Age """ if not outfn: base = os.path.basename(fn) base, ext = os.path.splitext(base) base = base + '.eeg.hdf' outfn = os.path.join(hdf_dir, base) print('outfn:', outfn) # outfn = fn+'.eeg.h5' with edflib.EdfReader(fn) as ef: # all the data point related stuff nsigs = ef.signals_in_file # again know/assume that this is uniform sampling across signals fs = [ef.samplefrequency(ii) for ii in range(nsigs)] fs0 = fs[0] print([ fs0 != xx for xx in fs]) if any([ fs0 != xx for xx in fs]): print("caught multiple sampling frquencies in edf files!!!") sys.exit(0) nsamples0 = ef.samples_in_file(0) print('nsigs=%s, fs0=%s, nsamples0=%s\n' % (nsigs, fs0, nsamples0)) num_samples_per_signal = ef.get_samples_per_signal() # np array print("num_samples_per_signal::\n", repr(num_samples_per_signal), '\n') file_duration_sec = ef.file_duration_seconds print("file_duration_sec", repr(file_duration_sec)) signal_frequency_array = ef.get_signal_freqs() print("signal_frequency_array::\n", repr(signal_frequency_array)) # Note that all annotations except the top row must also specify a duration. # long long onset; /* onset time of the event, expressed in units of 100 nanoSeconds and relative to the starttime in the header */ # char duration[16]; /* duration time, this is a null-terminated ASCII text-string */ # char annotation[EDFLIB_MAX_ANNOTATION_LEN + 1]; /* description of the event in UTF-8, this is a null term string of max length 512 # start("x.y"), end, char[20] # annotations = ef.read_annotations_as_array() # get numpy array of # annotations annotations = ef.read_annotations_100ns_units() #print("annotations::\n") #pprint.pprint(annotations) # get list of annotations signal_text_labels = ef.get_signal_text_labels() #print("signal_text_labels::\n") #pprint.pprint(signal_text_labels) #print("normalized text labels::\n") signal_text_labels_lpch_normalized = [ normalize_lpch_signal_label(label) for label in signal_text_labels] #pprint.pprint(signal_text_labels_lpch_normalized) # ef.recording_additional # print() signal_digital_mins = np.array( [ef.digital_min(ch) for ch in range(nsigs)]) signal_digital_total_min = min(signal_digital_mins) print("digital mins:", repr(signal_digital_mins)) print("digital total min:", repr(signal_digital_total_min)) signal_digital_maxs = np.array( [ef.digital_max(ch) for ch in range(nsigs)]) signal_digital_total_max = max(signal_digital_maxs) print("digital maxs:", repr(signal_digital_maxs)) print("digital total max:", repr(signal_digital_total_max)) signal_physical_dims = [ ef.physical_dimension(ch) for ch in range(nsigs)] print('signal_physical_dims::\n') pprint.pprint(signal_physical_dims) print() signal_physical_maxs = np.array( [ef.physical_max(ch) for ch in range(nsigs)]) print('signal_physical_maxs::\n', repr(signal_physical_maxs)) signal_physical_mins = np.array( [ef.physical_min(ch) for ch in range(nsigs)]) print('signal_physical_mins::\n', repr(signal_physical_mins)) print('gender:', repr(ef.gender_b)) print('admincode:', repr(ef.admincode)) print('birthdate:', repr(ef.birthdate_b)) # this is a string birthdate = dateutil.parser.parse(ef.birthdate_b) print('birthdate as datetime:', birthdate) print('equipment:', repr(ef.equipment)) print('patient:', repr(ef.patient)) print('patientname:', repr(ef.patient_name)) print('patientcode:', repr(ef.patientcode)) print('patient_additional:', repr(ef.patient_additional)) print('recording_additional:', repr(ef.recording_additional)) print('technician:', repr(ef.technician)) # or use arrow start_date_time = datetime.datetime( ef.startdate_year, ef.startdate_month, ef.startdate_day, ef.starttime_hour, ef.starttime_minute, ef.starttime_second) # tz naive # end_date_time = datetime.datetime(ef.enddate_year, ef.enddate_month, ef.enddate_day, ef.endtime_hour, # ef.endtime_minute, ef.endtime_second) # tz naive # end_date_time - start_date_time duration = datetime.timedelta(seconds=ef.file_duration_seconds) print('start_date_time:', start_date_time) age = arrow.get(start_date_time) - arrow.get(birthdate) # age = arrow.get(agedt) print('predicted age:', age) # total_seconds() returns a float print('predicted age (seconds):', age.total_seconds()) print() # this don't seem to be used much so I will put at end signal_prefilters = [ef.prefilter(ch) for ch in range(nsigs)] print('signal_prefilters::\n') pprint.pprint(signal_prefilters) print() signal_transducer = [ef.transducer(ch) for ch in range(nsigs)] print('signal_transducer::\n') pprint.pprint(signal_transducer) # now start building the hdf file # create file 'w-' -> fail if exists , w -> truncate if exists hdf = h5py.File(outfn, 'w') # use compression? yes! give it a try # integer increasing starting at 1000 semantic change at each thousand hdf.attrs['eeghdf_version'] = 1000 hdf.attrs['signals_in_file'] = nsigs hdf.attrs['sample_frequency0'] = fs0 hdf.attrs['nsamples0'] = nsamples0 sample_frequencies = hdf.create_dataset( 'sample_frequencies', (nsigs,), dtype='float32') sample_frequencies[:] = signal_frequency_array # add phys_bitvalue = .bitvalue, phys_offset = .offset # (double) phys_value = phys_bitvalue*(phys_offset + (double) var.two_signed[0]) # edfhdr->edfparam[i].bitvalue = (edfhdr->edfparam[i].phys_max - edfhdr->edfparam[i].phys_min) / (edfhdr->edfparam[i].dig_max - edfhdr->edfparam[i].dig_min); # edfhdr->edfparam[i].offset = edfhdr->edfparam[i].phys_max / # edfhdr->edfparam[i].bitvalue - edfhdr->edfparam[i].dig_max; # add meta data # start_date_time = datetime.datetime(ef.startdate_year, ef.startdate_month, ef.startdate_day, ef.starttime_hour, ef.starttime_minute, ef.starttime_second) # ,tzinfo=dateutil.tz.tzlocal()) print(start_date_time) patient = hdf.create_group('patient') patient.attrs['gender'] = ef.gender_b patient.attrs['patientname'] = "" # ef.patient_name # PHI print('birthdate: %s' % ef.birthdate_b, type(ef.birthdate_b)) default_birthdate = datetime.datetime(year=1990, month=1, day=1) # birthdate = dateutil.parser.parse(ef.birthdate) # this is a string # -> date (datetime) birthdate = default_birthdate print('birthdate (date object):', birthdate) private_start_date_time = birthdate + age patient.attrs['birthdate'] = str(birthdate) # float number age in seconds patient.attrs['age_seconds'] = age.total_seconds() # gestational age at birth (in seconds) # datetime.timedelta(weeks=40).total_seconds() # default 24192000 seconds or 40 weeks, 280 days # could also call this post-conceptional-age PCA patient.attrs['gestatational_age_birth_seconds'] = datetime.timedelta( weeks=40).total_seconds() patient.attrs['born_premature'] = 'unknown' # ('unknown', True, False) # hide actual start/end times question: should vary by year or just # make all the same hdf.attrs['startdatetime'] = str(private_start_date_time) hdf.attrs['enddatetime'] = str(private_start_date_time + duration) patient.attrs['age_days'] = age.days # post natal age in days patient.attrs['age_seconds'] = age.total_seconds() # now start storing the lists of things: labels, units... # nsigs = len(label_list) # 1. keep the text-vs-bytes distinction clear # 2. alays use "bytes" instead of "str" when you're sure you want a byte string. # for literals, can use "b" prefix, e.g. b'some bytes' # 3. for text strings use str or btter yet unicode, u'Hello' # 4. always use UTF-8 in code # variable ascii string (or b'' type) str_dt = h5py.special_dtype(vlen=bytes) label_ds = hdf.create_dataset('signal_labels', (nsigs,), dtype=str_dt) units_ds = hdf.create_dataset( 'physical_dimensions', (nsigs,), dtype=str_dt) transducer_ds = hdf.create_dataset( 'transducer', (nsigs,), dtype=str_dt) prefilter_ds = hdf.create_dataset('prefilter', (nsigs,), dtype=str_dt) hdf['signal_physical_mins'] = signal_physical_mins hdf['signal_physical_maxs'] = signal_physical_maxs hdf['signal_digital_mins'] = signal_digital_mins hdf['signal_digital_maxs'] = signal_digital_maxs if all(signal_digital_maxs <= 32767) and all( signal_digital_mins >= -32768): number_bits = 16 # EDF else: number_bits = 24 # BDF 2^23 = 8388608 + 1 bit for sign hdf.attrs['number_bits_per_sample'] = number_bits if number_bits <= 16: data_dtype = 'int16' eegdata = hdf.create_dataset('signals', (nsigs, nsamples0), dtype=data_dtype, # chunks=(nsigs,fs0), # if wanted 1 # second chunks chunks=True, fletcher32=True, compression='gzip' # most universal # compression='gzip', # compression='lzf', # maxshape=(256,None) ) if number_bits <= 32 and number_bits > 16: # handles up to 32 data_dtype = 'int32' eegdata = hdf.create_dataset('signals', (nsigs, nsamples0), dtype=data_dtype, # chunks=(nsigs,fs0), # if wanted 1 # second chunks chunks=True, fletcher32=True, compression='gzip' # most universal # compression='gzip', # compression='lzf', # maxshape=(256,None) ) # no compression -> 50 MiB can view eegdata in vitables # compression='gzip' -> 27 MiB slower # compression='lzf' -> 35 MiB # compression='lzf' maxshape=(256,None) -> 36MiB # this works but can do another way: # labels = [] units = list() # signal_nsamples = [] for ii in range(nsigs): # labels.append(ef.signal_label(ii)) units.append(ef.physical_dimension(ii)) # self.signal_nsamples.append(self.cedf.samples_in_file(ii)) # self.samplefreqs.append(self.cedf.samplefrequency(ii)) # eegdata.signal_labels = labels # labels_strip = [ss.strip() for ss in labels] # labels are fixed # length strings units = [cc.strip() for cc in units] # converted to standard electrode names if possible label_ds[:] = signal_text_labels_lpch_normalized units_ds[:] = units transducer_ds[:] = signal_transducer prefilter_ds[:] = signal_prefilters num_annot = len(annotations) # how do I make sure this init is "long long" enough edf_annots = hdf.create_group('edf_annotations') starts = edf_annots.create_dataset( 'starts_100ns', (num_annot,), dtype=np.int64) # curiously these durations seem to be stored as strings but of # floating point values "5.00000" for 5 second duration durations = edf_annots.create_dataset( 'durations_char16', (num_annot,), dtype='S16') # S16 !!! check py3 compatibility texts = edf_annots.create_dataset('texts', (num_annot,), dtype=str_dt) # start with a loop for ii in range(num_annot): starts[ii] = annotations[ii][0] # note: so far I have ony seen type(annotations[ii][1] -> <type 'str'> and they look like ascii strings # of floating point number of seconds for a duration # print('type(annotations[ii][1]):', type(annotations[ii][1])) durations[ii] = annotations[ii][1] texts[ii] = annotations[ii][2].strip() # should be more and a switch for anonymous or not # need to change this to nchunks = int(nsamples0 // fs0) samples_per_chunk = int(fs0) # 1 second of samples buf = np.zeros((nsigs, samples_per_chunk), dtype='int32') print( 'nchunks:%s, samples_per_chunk: %s' % (nchunks, samples_per_chunk)) bookmark = 0 # mark where were are in samples for ii in range(nchunks): for jj in range(nsigs): # read_phys_signal(self, signalnum, start, n, # np.ndarray[np.float64_t, ndim = 1] sigbuf) # readsignal converts into int32 as necessary ef.read_digital_signal( jj, bookmark, samples_per_chunk, buf[jj]) # conversion from int32 to int16 as necessary eegdata[:, bookmark:bookmark + samples_per_chunk] = buf # bookmark should be ii*fs0 bookmark += samples_per_chunk left_over_samples = nsamples0 - nchunks * samples_per_chunk print('left_over_samples:', left_over_samples) if left_over_samples > 0: for jj in range(nsigs): ef.read_digital_signal( jj, bookmark, left_over_samples, buf[jj]) eegdata[:,bookmark:bookmark + left_over_samples] = buf[:,0:left_over_samples] hdf.close() # from trackingdb.models.nkdb import find_lpch_birthday_from_mrn # Plan # v = ValidateTrackHeader(header=h) # if v.is_valid(): # process(v.cleaned_data) # else: # mark_as_invalid(h) def first(mapping): if mapping: return mapping[0] else: return mapping # say mapping = [] or None class ValidateTrackHeaderLPCH: # after validated place all data in cleaned_data field def __init__(self, header): # TOOO: validate that databae_source_label is in accepted sources self.hdr = header.copy() self.validated = False # self.clean = False self.cleaned_data = {} # vs update/copy from header def is_valid(self): # if name contains "Test" then we should skip this file and log it mrnobj = None try: if name_is_test(self.hdr['patient_name']): raise ValidationError('test file encountered', code='test file', params=self.hdr) # if we have a valid mrn, then we can potentially look up the patient or even the study mrn_ok = valid_lpch_mrn(self.hdr['patientcode']) if mrn_ok: mrn = self.hdr['patientcode'].strip() self.cleaned_data['patientcode'] = mrn else: raise ValidationError('bad MRN', code='bad mrn', params=self.hdr['patientcode']) if valid_lpch_name(self.hdr['patient_name']): self.cleaned_data['patient_name'] = self.hdr['patient_name'].strip() else: if mrn_ok: # try to look up patient in databases # look up name, dob here based upon mrn in nk_db and/or epic_db mrnobj = models.NkMrn.query.filter_by(mrn=mrn).first() if mrnobj: self.cleaned_data['patient_name'] = mrnobj.nkpatient.name else: raise ValidationError('invalid patient name', 'invalid name', params=self.hdr) eegno_ok = valid_lpch_eegno(self.hdr['admincode']) if eegno_ok: self.cleaned_data['admincode'] = _csu(self.hdr['admincode']) else: raise ValidationError('bad eegno/admincode', code='invalid admincode', params=self.hdr) if self.hdr['birthdate_date']: self.cleaned_data['birthdate_date'] = self.hdr['birthdate_date'] else: # then couldn't make a date, see if can find birthday in database if mrn_ok: mrnobj = mrnobj if mrnobj else models.NkMrn.query.filter_by(mrn=mrn).first() if not mrnobj: raise ValidationError('bad birthdate_date','birthdate error', params=self.hdr) else: nbday = mrnobj.nkpatient.dob self.cleaned_data['birthdate_date'] = nbday else: raise ValidationError('bad birthday','birthday error', params=self.hdr) # copy over other header members # todo: should do more validation of 'gender' self.cleaned_data['gender'] = self.hdr['gender'] self.cleaned_data['file_name'] = self.hdr['file_name'] self.cleaned_data['filetype'] = self.hdr['filetype'] self.cleaned_data['signals_in_file'] = self.hdr['signals_in_file'] self.cleaned_data['datarecords_in_file'] = self.hdr['datarecords_in_file'] self.cleaned_data['file_duration_100ns'] = self.hdr['file_duration_100ns'] self.cleaned_data['file_duration_seconds'] = self.hdr['file_duration_seconds'] self.cleaned_data['startdate_date'] = self.hdr['startdate_date'] self.cleaned_data['start_datetime'] = self.hdr['start_datetime'] self.cleaned_data['starttime_subsecond_offset'] = self.hdr['starttime_subsecond_offset'] self.cleaned_data['patient_additional'] = self.hdr['patient_additional'].strip() self.cleaned_data['technician'] = self.hdr['technician'].strip() self.cleaned_data['equipment'] = self.hdr['equipment'].strip() self.cleaned_data['recording_additional'] = self.hdr['recording_additional'].strip() self.cleaned_data['datarecord_duration_100ns'] = self.hdr['datarecord_duration_100ns'] self.validated = True return True except ValidationError as ve: self.errors = ve.message self.error_code = ve.code self.error_params = ve.params debug(ve.message) return False class AnonymizeTrackHeaderLPCH(ValidateTrackHeaderLPCH): LPCH_DEFAULT_BIRTH_DATETIME = datetime.datetime(year=1990, month=1, day=1) # datatbase sources LPCH_NK = 'LPCH_NK' STANFORD_NK = 'STANFORD_NK' def __init__(self, header, source_database_label=LPCH_NK): super().__init__(header) with app.app_context(): self.anonymous_header = models.register_and_create_anonymous_header(self.hdr, source_database_label=source_database_label) # will need to track: patient, study, file # file needs source and key NK origin class ValidateTrackHeaderStanford: # after validated place all data in cleaned_data field def __init__(self, header): # TOOO: validate that databae_source_label is in accepted sources self.hdr = header.copy() self.validated = False # self.clean = False self.cleaned_data = {} # vs update/copy from header def is_valid(self): # if name contains "Test" then we should skip this file and log it mrnobj = None try: if name_is_test(self.hdr['patient_name']): raise ValidationError('test file encountered', code='test file', params=self.hdr) # if we have a valid mrn, then we can potentially look up the patient or even the study mrn_ok = valid_stanford_mrn(self.hdr['patientcode']) if mrn_ok: mrn = self.hdr['patientcode'].strip() self.cleaned_data['patientcode'] = mrn else: raise ValidationError('bad MRN', code='bad mrn', params=self.hdr['patientcode']) if valid_stanford_name(self.hdr['patient_name']): self.cleaned_data['patient_name'] = self.hdr['patient_name'].strip() else: if mrn_ok: # try to look up patient in databases # look up name, dob here based upon mrn in nk_db and/or epic_db mrnobj = models.NkMrn.query.filter_by(mrn=mrn).first() if mrnobj: self.cleaned_data['patient_name'] = mrnobj.nkpatient.name else: raise ValidationError('invalid patient name', 'invalid name', params=self.hdr) eegno_ok = valid_stanford_eegno(self.hdr['admincode']) if eegno_ok: self.cleaned_data['admincode'] = _csu(self.hdr['admincode']) else: raise ValidationError('bad eegno/admincode', code='invalid admincode', params=self.hdr) if self.hdr['birthdate_date']: self.cleaned_data['birthdate_date'] = self.hdr['birthdate_date'] else: # then couldn't make a date, see if can find birthday in database if mrn_ok: mrnobj = mrnobj if mrnobj else models.NkMrn.query.filter_by(mrn=mrn).first() if not mrnobj: raise ValidationError('bad birthdate_date','birthdate error', params=self.hdr) else: nbday = mrnobj.nkpatient.dob self.cleaned_data['birthdate_date'] = nbday else: raise ValidationError('bad birthday','birthday error', params=self.hdr) # copy over other header members # todo: should do more validation of 'gender' self.cleaned_data['gender'] = self.hdr['gender'] self.cleaned_data['file_name'] = self.hdr['file_name'] self.cleaned_data['filetype'] = self.hdr['filetype'] self.cleaned_data['signals_in_file'] = self.hdr['signals_in_file'] self.cleaned_data['datarecords_in_file'] = self.hdr['datarecords_in_file'] self.cleaned_data['file_duration_100ns'] = self.hdr['file_duration_100ns'] self.cleaned_data['file_duration_seconds'] = self.hdr['file_duration_seconds'] self.cleaned_data['startdate_date'] = self.hdr['startdate_date'] self.cleaned_data['start_datetime'] = self.hdr['start_datetime'] self.cleaned_data['starttime_subsecond_offset'] = self.hdr['starttime_subsecond_offset'] self.cleaned_data['patient_additional'] = self.hdr['patient_additional'].strip() self.cleaned_data['technician'] = self.hdr['technician'].strip() self.cleaned_data['equipment'] = self.hdr['equipment'].strip() self.cleaned_data['recording_additional'] = self.hdr['recording_additional'].strip() self.cleaned_data['datarecord_duration_100ns'] = self.hdr['datarecord_duration_100ns'] self.validated = True return True except ValidationError as ve: self.errors = ve.message self.error_code = ve.code self.error_params = ve.params debug(ve.message) return False class AnonymizeTrackHeaderStanford(ValidateTrackHeaderStanford): STANFORD_DEFAULT_BIRTH_DATETIME = datetime.datetime(year=1910, month=1, day=1) # datatbase sources LPCH_NK = 'LPCH_NK' STANFORD_NK = 'STANFORD_NK' def __init__(self, header, source_database_label='STANFORD_NK'): super().__init__(header) with app.app_context(): self.anonymous_header = models.register_and_create_anonymous_header(self.hdr, source_database_label=source_database_label) # will need to track: patient, study, file # file needs source and key NK origin def find_blocks(arr): blocks = [] print("total arr:", arr) dfs = np.diff(arr) dfs_ind = np.where(dfs != 0.0)[0] last_ind = 0 for dd in dfs_ind+1: print("block:",arr[last_ind:dd]) blocks.append((last_ind,dd)) last_ind = dd print("last block:", arr[last_ind:]) blocks.append( (last_ind,len(arr))) return blocks def find_blocks2(arr): blocks = [] N = len(arr) print("total arr:", arr) last_ind = 0 last_val = arr[0] for ii in range(1,N): if last_val == arr[ii]: pass else: blocks.append((last_ind,ii)) last_ind = ii last_val = arr[ii] blocks.append((last_ind,N)) return blocks def test_find_blocks1(): s = [250.0, 250.0, 250.0, 1.0, 1.0, 1000.0, 1000.0] blocks = find_blocks(s) print("blocks:") print(blocks) def test_find_blocks2(): s = [250.0, 250.0, 250.0, 1.0, 1.0, 1000.0, 1000.0] blocks = find_blocks2(s) print("blocks:") print(blocks) def test_find_blocks2_2(): s = [100,100,100,100,100,100,100,100] blocks = find_blocks2(s) print("blocks:") print(blocks) def edf2hdf2(fn, outfn='', hdf_dir='', anonymize=False): """ convert an edf file to hdf5 using fairly straightforward mapping return True if successful @database_sourcel_label tells us which database it came from LPCH_NK or STANFORD_NK this is important! """ if not outfn: base = os.path.basename(fn) base, ext = os.path.splitext(base) base = base + '.eeghdf' outfn = os.path.join(hdf_dir, base) # print('outfn:', outfn) # all the data point related stuff with edflib.EdfReader(fn) as ef: # read all EDF+ header information in just the way I want it header = { 'file_name': os.path.basename(fn), 'filetype': ef.filetype, 'patient_name': ef.patient_name, 'patientcode': ef.patientcode, 'gender': ef.gender, 'signals_in_file': ef.signals_in_file, 'datarecords_in_file': ef.datarecords_in_file, 'file_duration_100ns': ef.file_duration_100ns, 'file_duration_seconds': ef.file_duration_seconds, 'startdate_date': datetime.date(ef.startdate_year, ef.startdate_month, ef.startdate_day), 'start_datetime': datetime.datetime(ef.startdate_year, ef.startdate_month, ef.startdate_day, ef.starttime_hour, ef.starttime_minute, ef.starttime_second), 'starttime_subsecond_offset': ef.starttime_subsecond, 'birthdate_date': ef.birthdate_date, 'patient_additional': ef.patient_additional, 'admincode': ef.admincode, # usually the study eg. C13-100 'technician': ef.technician, 'equipment': ef.equipment, 'recording_additional': ef.recording_additional, 'datarecord_duration_100ns': ef.datarecord_duration_100ns, } pprint.pprint(header) #### validation code ##### validator = None # if source_database_label=='LPCH_NK': # validator = ValidateTrackHeaderLPCH(header=header) # elif source_database_label== 'STANFORD_NK': # validator = ValidateTrackHeaderStanford(header=header) # else: # raise ValidationError # if not validator.is_valid(): # print('problem with this file:', fn) # print(validator.errors,validator.error_code, # validator.error_params) # return False, validator # else: # print('\nvalid header::') # pprint.pprint(validator.cleaned_data) # header = validator.cleaned_data # from here on the header is valid and cleaned # use arrow start_datetime = header['start_datetime'] # end_date_time = datetime.datetime(ef.enddate_year, ef.enddate_month, ef.enddate_day, ef.endtime_hour, # ef.endtime_minute, ef.endtime_second) # tz naive # end_date_time - start_date_time duration = datetime.timedelta(seconds=header['file_duration_seconds']) # derived information birthdate = header['birthdate_date'] if birthdate: age = arrow.get(start_datetime) - arrow.get(header['birthdate_date']) debug('predicted age: %s' % age) # total_seconds() returns a float debug('predicted age (seconds): %s' % age.total_seconds()) else: age = datetime.timedelta(seconds=0) # if anonymize: # if source_database_label== 'LPCH_NK': # anonymizer = AnonymizeTrackHeaderLPCH(header, source_database_label=source_database_label) # if source_database_label == 'STANFORD_NK': # anonymizer = AnonymizeTrackHeaderStanford(header, source_database_label=source_database_label) # header = anonymizer.anonymous_header # replace the original header with the anonymous one # print('anonymized header') # pprint.pprint(header) # anonymized version if necessary header['end_datetime'] = header['start_datetime'] + duration ############# signal array information ################## # signal block related stuff nsigs = ef.signals_in_file # again know/assume that this is uniform sampling across signals fs0 = ef.samplefrequency(0) signal_frequency_array = ef.get_signal_freqs() dfs = np.diff(signal_frequency_array) dfs_ind = np.where(dfs != 0.0) dfs_ind = dfs_ind[0] last_ind = 0 for dd in dfs_ind+1: print("block:",signal_frequency_array[last_ind:dd]) last_ind = dd print("last block:", signal_frequency_array[last_ind:]) print("where does sampling rate change?", np.where(dfs != 0.0)) print("elements:", signal_frequency_array[np.where(dfs != 0.0)]) print("signal_frequency_array::\n", repr(signal_frequency_array)) print("len(signal_frequency_array):", len(signal_frequency_array)) assert all(signal_frequency_array[:-3] == fs0) nsamples0 = ef.samples_in_file(0) # samples per channel print('nsigs=%s, fs0=%s, nsamples0=%s\n' % (nsigs, fs0, nsamples0)) num_samples_per_signal = ef.get_samples_per_signal() # np array print("num_samples_per_signal::\n", repr(num_samples_per_signal), '\n') # assert all(num_samples_per_signal == nsamples0) file_duration_sec = ef.file_duration_seconds #print("file_duration_sec", repr(file_duration_sec)) # Note that all annotations except the top row must also specify a duration. # long long onset; /* onset time of the event, expressed in units of 100 # nanoSeconds and relative to the starttime in the header */ # char duration[16]; /* duration time, this is a null-terminated ASCII text-string */ # char annotation[EDFLIB_MAX_ANNOTATION_LEN + 1]; /* description of the # event in UTF-8, this is a null term string of max length 512*/ # start("x.y"), end, char[20] # annotations = ef.read_annotations_as_array() # get numpy array of # annotations annotations_b = ef.read_annotations_b_100ns_units() # print("annotations_b::\n") # pprint.pprint(annotations_b) # get list of annotations signal_text_labels = ef.get_signal_text_labels() print("signal_text_labels::\n") pprint.pprint(signal_text_labels) print("normalized text labels::\n") signal_text_labels_lpch_normalized = [ normalize_lpch_signal_label(label) for label in signal_text_labels] pprint.pprint(signal_text_labels_lpch_normalized) # ef.recording_additional # print() signal_digital_mins = np.array( [ef.digital_min(ch) for ch in range(nsigs)]) signal_digital_total_min = min(signal_digital_mins) print("digital mins:", repr(signal_digital_mins)) print("digital total min:", repr(signal_digital_total_min)) signal_digital_maxs = np.array( [ef.digital_max(ch) for ch in range(nsigs)]) signal_digital_total_max = max(signal_digital_maxs) print("digital maxs:", repr(signal_digital_maxs)) #print("digital total max:", repr(signal_digital_total_max)) signal_physical_dims = [ ef.physical_dimension(ch) for ch in range(nsigs)] # print('signal_physical_dims::\n') # pprint.pprint(signal_physical_dims) #print() signal_physical_maxs = np.array( [ef.physical_max(ch) for ch in range(nsigs)]) #print('signal_physical_maxs::\n', repr(signal_physical_maxs)) signal_physical_mins = np.array( [ef.physical_min(ch) for ch in range(nsigs)]) #print('signal_physical_mins::\n', repr(signal_physical_mins)) # this don't seem to be used much so I will put at end signal_prefilters = [ef.prefilter(ch).strip() for ch in range(nsigs)] #print('signal_prefilters::\n') # pprint.pprint(signal_prefilters) #print() signal_transducers = [ef.transducer(ch).strip() for ch in range(nsigs)] #print('signal_transducers::\n') #pprint.pprint(signal_transducers) with eeghdf.EEGHDFWriter(outfn, 'w') as eegf: eegf.write_patient_info(patient_name=header['patient_name'], patientcode=header['patientcode'], gender=header['gender'], birthdate_isostring=header['birthdate_date'], # gestational_age_at_birth_days # born_premature patient_additional=header['patient_additional']) signal_text_labels_lpch_normalized = [ normalize_lpch_signal_label(label) for label in signal_text_labels] rec = eegf.create_record_block(record_duration_seconds=header['file_duration_seconds'], start_isodatetime=str(header['start_datetime']), end_isodatetime=str(header['end_datetime']), number_channels=header['signals_in_file'], num_samples_per_channel=nsamples0, sample_frequency=fs0, signal_labels=signal_text_labels_lpch_normalized, signal_physical_mins=signal_physical_mins, signal_physical_maxs=signal_physical_maxs, signal_digital_mins=signal_digital_mins, signal_digital_maxs=signal_digital_maxs, physical_dimensions=signal_physical_dims, patient_age_days=age.total_seconds() / 86400.0, signal_prefilters=signal_prefilters, signal_transducers=signal_transducers, technician=header['technician']) eegf.write_annotations_b(annotations_b) # may be should be called record annotations edfblock_itr = edf_block_iter_generator( ef, nsamples0, 100 * ef.samples_in_datarecord(0)*header['signals_in_file'], # samples_per_chunk roughly 100 datarecords at a time dtype='int32') signals = eegf.stream_dig_signal_to_record_block(rec, edfblock_itr) return True, validator # we succeeded def test_edf2hdf_info(): # on chris's macbook EDF_DIR = r'/Users/clee/code/eegml/nk_database_proj/private/lpch_edfs' fn = os.path.join(EDF_DIR, 'XA2731AX_1-1+.edf') edf2hdf(filename) if __name__ == '__main__': import sys if len(sys.argv) == 2: file_name = sys.argv[1] edf2hdf2(file_name)
bsd-3-clause
4,465,477,519,905,432,000
38.825348
170
0.561707
false
3.670513
false
false
false
lavish205/olympia
src/olympia/reviewers/tests/test_views.py
1
210461
# -*- coding: utf-8 -*- import json import os import time import urlparse from collections import OrderedDict from datetime import datetime, timedelta from django.conf import settings from django.core import mail from django.core.cache import cache from django.core.files import temp from django.core.files.base import File as DjangoFile from django.template import defaultfilters from django.test.utils import override_settings import mock from freezegun import freeze_time from lxml.html import HTMLParser, fromstring from mock import Mock, patch from pyquery import PyQuery as pq from olympia import amo, core, ratings from olympia.abuse.models import AbuseReport from olympia.access.models import Group, GroupUser from olympia.accounts.views import API_TOKEN_COOKIE from olympia.activity.models import ActivityLog from olympia.addons.models import ( Addon, AddonApprovalsCounter, AddonDependency, AddonReviewerFlags, AddonUser) from olympia.amo.templatetags.jinja_helpers import ( user_media_path, user_media_url) from olympia.amo.tests import ( APITestClient, TestCase, addon_factory, check_links, file_factory, formset, initial, reverse_ns, user_factory, version_factory) from olympia.amo.urlresolvers import reverse from olympia.files.models import File, FileValidation, WebextPermission from olympia.ratings.models import Rating, RatingFlag from olympia.reviewers.models import ( AutoApprovalSummary, RereviewQueueTheme, ReviewerScore, ReviewerSubscription, Whiteboard) from olympia.users.models import UserProfile from olympia.versions.models import ApplicationsVersions, AppVersion from olympia.zadmin.models import get_config class TestRedirectsOldPaths(TestCase): def setUp(self): user = user_factory() self.client.login(email=user.email) def test_redirect_old_queue(self): response = self.client.get('/en-US/editors/queue/new') self.assert3xx(response, '/reviewers/queue/new', status_code=301) def test_redirect_old_review_page(self): response = self.client.get('/en-US/editors/review/foobar') self.assert3xx(response, '/reviewers/review/foobar', status_code=301) class ReviewerTest(TestCase): fixtures = ['base/users', 'base/approvals'] def login_as_admin(self): assert self.client.login(email='[email protected]') def login_as_reviewer(self): assert self.client.login(email='[email protected]') def make_review(self, username='a'): u = UserProfile.objects.create(username=username) a = Addon.objects.create(name='yermom', type=amo.ADDON_EXTENSION) return Rating.objects.create(user=u, addon=a, body='baa') class TestRatingsModerationLog(ReviewerTest): def setUp(self): super(TestRatingsModerationLog, self).setUp() user = user_factory() self.grant_permission(user, 'Ratings:Moderate') self.client.login(email=user.email) self.url = reverse('reviewers.ratings_moderation_log') core.set_user(user) def test_log(self): response = self.client.get(self.url) assert response.status_code == 200 def test_start_filter(self): response = self.client.get(self.url, {'start': '2011-01-01'}) assert response.status_code == 200 def test_enddate_filter(self): """ Make sure that if our end date is 1/1/2011, that we include items from 1/1/2011. To not do as such would be dishonorable. """ review = self.make_review(username='b') ActivityLog.create( amo.LOG.APPROVE_RATING, review, review.addon).update( created=datetime(2011, 1, 1)) response = self.client.get(self.url, {'end': '2011-01-01'}) assert response.status_code == 200 assert pq(response.content)('tbody td').eq(0).text() == ( 'Jan. 1, 2011, midnight') def test_action_filter(self): """ Based on setup we should see only two items if we filter for deleted reviews. """ review = self.make_review() for i in xrange(2): ActivityLog.create(amo.LOG.APPROVE_RATING, review, review.addon) ActivityLog.create(amo.LOG.DELETE_RATING, review.id, review.addon) response = self.client.get(self.url, {'filter': 'deleted'}) assert response.status_code == 200 assert pq(response.content)('tbody tr').length == 2 def test_no_results(self): response = self.client.get(self.url, {'end': '2004-01-01'}) assert response.status_code == 200 assert '"no-results"' in response.content def test_moderation_log_detail(self): review = self.make_review() ActivityLog.create(amo.LOG.APPROVE_RATING, review, review.addon) id_ = ActivityLog.objects.moderation_events()[0].id response = self.client.get( reverse('reviewers.ratings_moderation_log.detail', args=[id_])) assert response.status_code == 200 class TestReviewLog(ReviewerTest): fixtures = ReviewerTest.fixtures + ['base/addon_3615'] def setUp(self): super(TestReviewLog, self).setUp() self.user = UserProfile.objects.get(email='[email protected]') self.login_as_reviewer() self.url = reverse('reviewers.reviewlog') def get_user(self): return UserProfile.objects.all()[0] def make_approvals(self): for addon in Addon.objects.all(): ActivityLog.create( amo.LOG.REJECT_VERSION, addon, addon.current_version, user=self.get_user(), details={'comments': 'youwin'}) def make_an_approval(self, action, comment='youwin', username=None, addon=None): if username: user = UserProfile.objects.get(username=username) else: user = self.get_user() if not addon: addon = Addon.objects.all()[0] ActivityLog.create(action, addon, addon.current_version, user=user, details={'comments': comment}) def test_basic(self): self.make_approvals() response = self.client.get(self.url) assert response .status_code == 200 doc = pq(response .content) assert doc('#log-filter button'), 'No filters.' # Should have 2 showing. rows = doc('tbody tr') assert rows.filter(':not(.hide)').length == 2 assert rows.filter('.hide').eq(0).text() == 'youwin' # Should have none showing if the addons are unlisted. for addon in Addon.objects.all(): self.make_addon_unlisted(addon) response = self.client.get(self.url) assert response .status_code == 200 doc = pq(response.content) assert not doc('tbody tr :not(.hide)') # But they should have 2 showing for someone with the right perms. self.grant_permission(self.user, 'Addons:ReviewUnlisted') response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) rows = doc('tbody tr') assert rows.filter(':not(.hide)').length == 2 assert rows.filter('.hide').eq(0).text() == 'youwin' def test_xss(self): a = Addon.objects.all()[0] a.name = '<script>alert("xss")</script>' a.save() ActivityLog.create(amo.LOG.REJECT_VERSION, a, a.current_version, user=self.get_user(), details={'comments': 'xss!'}) response = self.client.get(self.url) assert response.status_code == 200 inner_html = pq(response.content)('#log-listing tbody td').eq(1).html() assert '&lt;script&gt;' in inner_html assert '<script>' not in inner_html def test_end_filter(self): """ Let's use today as an end-day filter and make sure we see stuff if we filter. """ self.make_approvals() # Make sure we show the stuff we just made. date = time.strftime('%Y-%m-%d') response = self.client.get(self.url, {'end': date}) assert response.status_code == 200 doc = pq(response.content)('#log-listing tbody') assert doc('tr:not(.hide)').length == 2 assert doc('tr.hide').eq(0).text() == 'youwin' def test_end_filter_wrong(self): """ Let's use today as an end-day filter and make sure we see stuff if we filter. """ self.make_approvals() response = self.client.get(self.url, {'end': 'wrong!'}) # If this is broken, we'll get a traceback. assert response.status_code == 200 assert pq(response.content)('#log-listing tr:not(.hide)').length == 3 def test_start_filter(self): with freeze_time('2017-08-01 10:00'): self.make_approvals() # Make sure we show the stuff we just made. response = self.client.get(self.url, {'start': '2017-07-31'}) assert response.status_code == 200 doc = pq(response.content)('#log-listing tbody') assert doc('tr:not(.hide)').length == 2 assert doc('tr.hide').eq(0).text() == 'youwin' def test_start_default_filter(self): with freeze_time('2017-07-31 10:00'): self.make_approvals() with freeze_time('2017-08-01 10:00'): addon = Addon.objects.first() ActivityLog.create( amo.LOG.REJECT_VERSION, addon, addon.current_version, user=self.get_user(), details={'comments': 'youwin'}) # Make sure the default 'start' to the 1st of a month works properly with freeze_time('2017-08-03 11:00'): response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content)('#log-listing tbody') assert doc('tr:not(.hide)').length == 1 assert doc('tr.hide').eq(0).text() == 'youwin' def test_search_comment_exists(self): """Search by comment.""" self.make_an_approval(amo.LOG.REQUEST_ADMIN_REVIEW_CODE, comment='hello') response = self.client.get(self.url, {'search': 'hello'}) assert response.status_code == 200 assert pq(response.content)( '#log-listing tbody tr.hide').eq(0).text() == 'hello' def test_search_comment_case_exists(self): """Search by comment, with case.""" self.make_an_approval(amo.LOG.REQUEST_ADMIN_REVIEW_CODE, comment='hello') response = self.client.get(self.url, {'search': 'HeLlO'}) assert response.status_code == 200 assert pq(response.content)( '#log-listing tbody tr.hide').eq(0).text() == 'hello' def test_search_comment_doesnt_exist(self): """Search by comment, with no results.""" self.make_an_approval(amo.LOG.REQUEST_ADMIN_REVIEW_CODE, comment='hello') response = self.client.get(self.url, {'search': 'bye'}) assert response.status_code == 200 assert pq(response.content)('.no-results').length == 1 def test_search_author_exists(self): """Search by author.""" self.make_approvals() self.make_an_approval( amo.LOG.REQUEST_ADMIN_REVIEW_CODE, username='reviewer', comment='hi') response = self.client.get(self.url, {'search': 'reviewer'}) assert response.status_code == 200 rows = pq(response.content)('#log-listing tbody tr') assert rows.filter(':not(.hide)').length == 1 assert rows.filter('.hide').eq(0).text() == 'hi' def test_search_author_case_exists(self): """Search by author, with case.""" self.make_approvals() self.make_an_approval( amo.LOG.REQUEST_ADMIN_REVIEW_CODE, username='reviewer', comment='hi') response = self.client.get(self.url, {'search': 'ReviEwEr'}) assert response.status_code == 200 rows = pq(response.content)('#log-listing tbody tr') assert rows.filter(':not(.hide)').length == 1 assert rows.filter('.hide').eq(0).text() == 'hi' def test_search_author_doesnt_exist(self): """Search by author, with no results.""" self.make_approvals() self.make_an_approval( amo.LOG.REQUEST_ADMIN_REVIEW_CODE, username='reviewer') response = self.client.get(self.url, {'search': 'wrong'}) assert response.status_code == 200 assert pq(response.content)('.no-results').length == 1 def test_search_addon_exists(self): """Search by add-on name.""" self.make_approvals() addon = Addon.objects.all()[0] response = self.client.get(self.url, {'search': addon.name}) assert response.status_code == 200 tr = pq(response.content)( '#log-listing tr[data-addonid="%s"]' % addon.id) assert tr.length == 1 assert tr.siblings('.comments').text() == 'youwin' def test_search_addon_case_exists(self): """Search by add-on name, with case.""" self.make_approvals() addon = Addon.objects.all()[0] response = self.client.get( self.url, {'search': str(addon.name).swapcase()}) assert response.status_code == 200 tr = pq(response.content)( '#log-listing tr[data-addonid="%s"]' % addon.id) assert tr.length == 1 assert tr.siblings('.comments').text() == 'youwin' def test_search_addon_doesnt_exist(self): """Search by add-on name, with no results.""" self.make_approvals() response = self.client.get(self.url, {'search': 'xxx'}) assert response.status_code == 200 assert pq(response.content)('.no-results').length == 1 @patch('olympia.activity.models.ActivityLog.arguments', new=Mock) def test_addon_missing(self): self.make_approvals() response = self.client.get(self.url) assert response.status_code == 200 assert pq(response.content)('#log-listing tr td').eq(1).text() == ( 'Add-on has been deleted.') def test_request_info_logs(self): self.make_an_approval(amo.LOG.REQUEST_INFORMATION) response = self.client.get(self.url) assert response.status_code == 200 assert pq(response.content)('#log-listing tr td a').eq(1).text() == ( 'More information requested') def test_super_review_logs(self): self.make_an_approval(amo.LOG.REQUEST_ADMIN_REVIEW_CODE) response = self.client.get(self.url) assert response.status_code == 200 assert pq(response.content)('#log-listing tr td a').eq(1).text() == ( 'Admin add-on-review requested') def test_comment_logs(self): self.make_an_approval(amo.LOG.COMMENT_VERSION) response = self.client.get(self.url) assert response.status_code == 200 assert pq(response.content)('#log-listing tr td a').eq(1).text() == ( 'Commented') def test_content_approval(self): self.make_an_approval(amo.LOG.APPROVE_CONTENT) response = self.client.get(self.url) assert response.status_code == 200 link = pq(response.content)('#log-listing tbody td a').eq(1)[0] assert link.attrib['href'] == '/en-US/reviewers/review-content/a3615' assert link.text_content().strip() == 'Content approved' def test_content_rejection(self): self.make_an_approval(amo.LOG.REJECT_CONTENT) response = self.client.get(self.url) assert response.status_code == 200 link = pq(response.content)('#log-listing tbody td a').eq(1)[0] assert link.attrib['href'] == '/en-US/reviewers/review-content/a3615' assert link.text_content().strip() == 'Content rejected' @freeze_time('2017-08-03') def test_review_url(self): self.login_as_admin() addon = addon_factory() unlisted_version = version_factory( addon=addon, channel=amo.RELEASE_CHANNEL_UNLISTED) ActivityLog.create( amo.LOG.APPROVE_VERSION, addon, addon.current_version, user=self.get_user(), details={'comments': 'foo'}) response = self.client.get(self.url) assert response.status_code == 200 url = reverse('reviewers.review', args=[addon.slug]) link = pq(response.content)( '#log-listing tbody tr[data-addonid] a').eq(1) assert link.attr('href') == url entry = ActivityLog.create( amo.LOG.APPROVE_VERSION, addon, unlisted_version, user=self.get_user(), details={'comments': 'foo'}) # Force the latest entry to be at the top of the list so that we can # pick it more reliably later from the HTML entry.update(created=datetime.now() + timedelta(days=1)) response = self.client.get(self.url) url = reverse( 'reviewers.review', args=['unlisted', addon.slug]) assert pq(response.content)( '#log-listing tr td a').eq(1).attr('href') == url class TestDashboard(TestCase): def setUp(self): self.url = reverse('reviewers.dashboard') self.user = user_factory() self.client.login(email=self.user.email) def test_old_temporary_url_redirect(self): response = self.client.get('/en-US/reviewers/dashboard') self.assert3xx( response, reverse('reviewers.dashboard'), status_code=301) def test_not_a_reviewer(self): response = self.client.get(self.url) assert response.status_code == 403 def test_admin_all_permissions(self): # Create a lot of add-ons to test the queue counts. # Nominated and pending. addon_factory( status=amo.STATUS_NOMINATED, type=amo.ADDON_STATICTHEME, file_kw={'status': amo.STATUS_AWAITING_REVIEW}) version_factory( addon=addon_factory(), file_kw={'status': amo.STATUS_AWAITING_REVIEW}) version_factory( addon=addon_factory(), file_kw={'status': amo.STATUS_AWAITING_REVIEW}) under_admin_review = addon_factory( status=amo.STATUS_NOMINATED, file_kw={'status': amo.STATUS_AWAITING_REVIEW}) AddonReviewerFlags.objects.create( addon=under_admin_review, needs_admin_code_review=True) under_admin_review_and_pending = addon_factory() AddonReviewerFlags.objects.create( addon=under_admin_review_and_pending, needs_admin_theme_review=True) version_factory( addon=under_admin_review_and_pending, file_kw={'status': amo.STATUS_AWAITING_REVIEW}) # Auto-approved and Content Review. addon1 = addon_factory( version_kw={'is_webextension': True}) AddonApprovalsCounter.reset_for_addon(addon=addon1) AutoApprovalSummary.objects.create( version=addon1.current_version, verdict=amo.AUTO_APPROVED) under_content_review = addon_factory( version_kw={'is_webextension': True}) AddonApprovalsCounter.reset_for_addon(addon=under_content_review) AutoApprovalSummary.objects.create( version=under_content_review.current_version, verdict=amo.AUTO_APPROVED) AddonReviewerFlags.objects.create( addon=under_content_review, needs_admin_content_review=True) addon2 = addon_factory( version_kw={'is_webextension': True}) AddonApprovalsCounter.reset_for_addon(addon=addon2) AutoApprovalSummary.objects.create( version=addon2.current_version, verdict=amo.AUTO_APPROVED) AddonReviewerFlags.objects.create( addon=addon2, needs_admin_content_review=True) under_code_review = addon_factory( version_kw={'is_webextension': True}) AddonApprovalsCounter.reset_for_addon(addon=under_code_review) AutoApprovalSummary.objects.create( version=under_code_review.current_version, verdict=amo.AUTO_APPROVED) AddonReviewerFlags.objects.create( addon=under_code_review, needs_admin_code_review=True) admins_group = Group.objects.create(name='Admins', rules='*:*') GroupUser.objects.create(user=self.user, group=admins_group) # Addon with expired info request expired = addon_factory(name=u'Expired') AddonReviewerFlags.objects.create( addon=expired, pending_info_request=self.days_ago(42)) # Rating rating = Rating.objects.create( addon=addon1, version=addon1.current_version, user=self.user, flag=True, body=u'This âdd-on sucks!!111', rating=1, editorreview=True) rating.ratingflag_set.create() response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) assert len(doc('.dashboard h3')) == 8 # All sections are present. expected_links = [ reverse('reviewers.queue_nominated'), reverse('reviewers.queue_pending'), reverse('reviewers.performance'), reverse('reviewers.reviewlog'), 'https://wiki.mozilla.org/Add-ons/Reviewers/Guide', 'https://wiki.mozilla.org/Add-ons/Reviewers/Themes/Guidelines', reverse('reviewers.queue_auto_approved'), reverse('reviewers.performance'), reverse('reviewers.reviewlog'), 'https://wiki.mozilla.org/Add-ons/Reviewers/Guide', reverse('reviewers.queue_content_review'), reverse('reviewers.performance'), reverse('reviewers.themes.list'), reverse('reviewers.themes.list_rereview'), reverse('reviewers.themes.list_flagged'), reverse('reviewers.themes.logs'), reverse('reviewers.themes.deleted'), 'https://wiki.mozilla.org/Add-ons/Reviewers/Themes/Guidelines', reverse('reviewers.queue_moderated'), reverse('reviewers.ratings_moderation_log'), 'https://wiki.mozilla.org/Add-ons/Reviewers/Guide/Moderation', reverse('reviewers.unlisted_queue_all'), 'https://wiki.mozilla.org/Add-ons/Reviewers/Guide', reverse('reviewers.motd'), reverse('reviewers.queue_expired_info_requests'), ] links = [link.attrib['href'] for link in doc('.dashboard a')] assert links == expected_links assert doc('.dashboard a')[0].text == 'New (2)' assert doc('.dashboard a')[1].text == 'Updates (3)' assert doc('.dashboard a')[6].text == 'Auto Approved Add-ons (4)' assert doc('.dashboard a')[10].text == 'Content Review (4)' assert (doc('.dashboard a')[18].text == 'Ratings Awaiting Moderation (1)') assert (doc('.dashboard a')[24].text == 'Expired Information Requests (1)') def test_can_see_all_through_reviewer_view_all_permission(self): self.grant_permission(self.user, 'ReviewerTools:View') response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) assert len(doc('.dashboard h3')) == 8 # All sections are present. expected_links = [ reverse('reviewers.queue_nominated'), reverse('reviewers.queue_pending'), reverse('reviewers.performance'), reverse('reviewers.reviewlog'), 'https://wiki.mozilla.org/Add-ons/Reviewers/Guide', 'https://wiki.mozilla.org/Add-ons/Reviewers/Themes/Guidelines', reverse('reviewers.queue_auto_approved'), reverse('reviewers.performance'), reverse('reviewers.reviewlog'), 'https://wiki.mozilla.org/Add-ons/Reviewers/Guide', reverse('reviewers.queue_content_review'), reverse('reviewers.performance'), reverse('reviewers.themes.list'), reverse('reviewers.themes.list_rereview'), reverse('reviewers.themes.list_flagged'), reverse('reviewers.themes.logs'), reverse('reviewers.themes.deleted'), 'https://wiki.mozilla.org/Add-ons/Reviewers/Themes/Guidelines', reverse('reviewers.queue_moderated'), reverse('reviewers.ratings_moderation_log'), 'https://wiki.mozilla.org/Add-ons/Reviewers/Guide/Moderation', reverse('reviewers.unlisted_queue_all'), 'https://wiki.mozilla.org/Add-ons/Reviewers/Guide', reverse('reviewers.motd'), reverse('reviewers.queue_expired_info_requests'), ] links = [link.attrib['href'] for link in doc('.dashboard a')] assert links == expected_links def test_legacy_reviewer(self): # Create some add-ons to test the queue counts. addon_factory( status=amo.STATUS_NOMINATED, file_kw={'status': amo.STATUS_AWAITING_REVIEW}) version_factory( addon=addon_factory(), file_kw={'status': amo.STATUS_AWAITING_REVIEW}) version_factory( addon=addon_factory(), file_kw={'status': amo.STATUS_AWAITING_REVIEW}) # These two are under admin review and will be ignored. under_admin_review = addon_factory( status=amo.STATUS_NOMINATED, file_kw={'status': amo.STATUS_AWAITING_REVIEW}) AddonReviewerFlags.objects.create( addon=under_admin_review, needs_admin_code_review=True) under_admin_review_and_pending = addon_factory() AddonReviewerFlags.objects.create( addon=under_admin_review_and_pending, needs_admin_code_review=True) version_factory( addon=under_admin_review_and_pending, file_kw={'status': amo.STATUS_AWAITING_REVIEW}) # This is a static theme so won't be shown addon_factory( status=amo.STATUS_NOMINATED, type=amo.ADDON_STATICTHEME, file_kw={'status': amo.STATUS_AWAITING_REVIEW}) # Grant user the permission to see only the legacy add-ons section. self.grant_permission(self.user, 'Addons:Review') # Test. response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) assert len(doc('.dashboard h3')) == 1 expected_links = [ reverse('reviewers.queue_nominated'), reverse('reviewers.queue_pending'), reverse('reviewers.performance'), reverse('reviewers.reviewlog'), 'https://wiki.mozilla.org/Add-ons/Reviewers/Guide', ] links = [link.attrib['href'] for link in doc('.dashboard a')] assert links == expected_links assert doc('.dashboard a')[0].text == 'New (1)' assert doc('.dashboard a')[1].text == 'Updates (2)' def test_post_reviewer(self): # Create an add-on to test the queue count. It's under admin content # review but that does not have an impact. addon = addon_factory( version_kw={'is_webextension': True}) AddonApprovalsCounter.reset_for_addon(addon=addon) AutoApprovalSummary.objects.create( version=addon.current_version, verdict=amo.AUTO_APPROVED) AddonReviewerFlags.objects.create( addon=addon, needs_admin_content_review=True) # This one however is under admin code review, it's ignored. under_code_review = addon_factory( version_kw={'is_webextension': True}) AddonApprovalsCounter.reset_for_addon(addon=under_code_review) AutoApprovalSummary.objects.create( version=under_code_review.current_version, verdict=amo.AUTO_APPROVED) AddonReviewerFlags.objects.create( addon=under_code_review, needs_admin_code_review=True) # Grant user the permission to see only the Auto Approved section. self.grant_permission(self.user, 'Addons:PostReview') # Test. response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) assert len(doc('.dashboard h3')) == 1 expected_links = [ reverse('reviewers.queue_auto_approved'), reverse('reviewers.performance'), reverse('reviewers.reviewlog'), 'https://wiki.mozilla.org/Add-ons/Reviewers/Guide', ] links = [link.attrib['href'] for link in doc('.dashboard a')] assert links == expected_links assert doc('.dashboard a')[0].text == 'Auto Approved Add-ons (1)' def test_content_reviewer(self): # Create an add-on to test the queue count. It's under admin code # review but that does not have an impact. addon = addon_factory( version_kw={'is_webextension': True}) AddonApprovalsCounter.reset_for_addon(addon=addon) AutoApprovalSummary.objects.create( version=addon.current_version, verdict=amo.AUTO_APPROVED) AddonReviewerFlags.objects.create( addon=addon, needs_admin_code_review=True) # This one is under admin *content* review so it's ignored. under_content_review = addon_factory( version_kw={'is_webextension': True}) AddonApprovalsCounter.reset_for_addon(addon=under_content_review) AutoApprovalSummary.objects.create( version=under_content_review.current_version, verdict=amo.AUTO_APPROVED) AddonReviewerFlags.objects.create( addon=under_content_review, needs_admin_content_review=True) # Grant user the permission to see only the Content Review section. self.grant_permission(self.user, 'Addons:ContentReview') # Test. response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) assert len(doc('.dashboard h3')) == 1 expected_links = [ reverse('reviewers.queue_content_review'), reverse('reviewers.performance'), ] links = [link.attrib['href'] for link in doc('.dashboard a')] assert links == expected_links assert doc('.dashboard a')[0].text == 'Content Review (1)' def test_themes_reviewer(self): # Create some themes to test the queue counts. addon_factory(type=amo.ADDON_PERSONA, status=amo.STATUS_PENDING) addon_factory(type=amo.ADDON_PERSONA, status=amo.STATUS_PENDING) addon = addon_factory(type=amo.ADDON_PERSONA, status=amo.STATUS_PUBLIC) RereviewQueueTheme.objects.create(theme=addon.persona) addon_factory(type=amo.ADDON_PERSONA, status=amo.STATUS_REVIEW_PENDING) addon_factory(type=amo.ADDON_PERSONA, status=amo.STATUS_REVIEW_PENDING) addon_factory(type=amo.ADDON_PERSONA, status=amo.STATUS_REVIEW_PENDING) # Grant user the permission to see only the themes section. self.grant_permission(self.user, 'Personas:Review') # Test. response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) assert len(doc('.dashboard h3')) == 1 expected_links = [ reverse('reviewers.themes.list'), reverse('reviewers.themes.list_rereview'), reverse('reviewers.themes.list_flagged'), reverse('reviewers.themes.logs'), reverse('reviewers.themes.deleted'), 'https://wiki.mozilla.org/Add-ons/Reviewers/Themes/Guidelines', ] links = [link.attrib['href'] for link in doc('.dashboard a')] assert links == expected_links assert doc('.dashboard a')[0].text == 'New Themes (2)' assert doc('.dashboard a')[1].text == 'Themes Updates (1)' assert doc('.dashboard a')[2].text == 'Flagged Themes (3)' def test_ratings_moderator(self): # Create an rating to test the queue count. addon = addon_factory() user = user_factory() rating = Rating.objects.create( addon=addon, version=addon.current_version, user=user, flag=True, body=u'This âdd-on sucks!!111', rating=1, editorreview=True) rating.ratingflag_set.create() # Grant user the permission to see only the ratings to review section. self.grant_permission(self.user, 'Ratings:Moderate') # Test. response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) assert len(doc('.dashboard h3')) == 1 expected_links = [ reverse('reviewers.queue_moderated'), reverse('reviewers.ratings_moderation_log'), 'https://wiki.mozilla.org/Add-ons/Reviewers/Guide/Moderation', ] links = [link.attrib['href'] for link in doc('.dashboard a')] assert links == expected_links assert doc('.dashboard a')[0].text == 'Ratings Awaiting Moderation (1)' def test_unlisted_reviewer(self): # Grant user the permission to see only the unlisted add-ons section. self.grant_permission(self.user, 'Addons:ReviewUnlisted') # Test. response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) assert len(doc('.dashboard h3')) == 1 expected_links = [ reverse('reviewers.unlisted_queue_all'), 'https://wiki.mozilla.org/Add-ons/Reviewers/Guide', ] links = [link.attrib['href'] for link in doc('.dashboard a')] assert links == expected_links def test_static_theme_reviewer(self): # Create some static themes to test the queue counts. addon_factory( status=amo.STATUS_NOMINATED, type=amo.ADDON_STATICTHEME, file_kw={'status': amo.STATUS_AWAITING_REVIEW}) version_factory( addon=addon_factory(type=amo.ADDON_STATICTHEME), file_kw={'status': amo.STATUS_AWAITING_REVIEW}) version_factory( addon=addon_factory(type=amo.ADDON_STATICTHEME,), file_kw={'status': amo.STATUS_AWAITING_REVIEW}) # These two are under admin review and will be ignored. under_admin_review = addon_factory( status=amo.STATUS_NOMINATED, type=amo.ADDON_STATICTHEME, file_kw={'status': amo.STATUS_AWAITING_REVIEW}) AddonReviewerFlags.objects.create( addon=under_admin_review, needs_admin_theme_review=True) under_admin_review_and_pending = addon_factory( type=amo.ADDON_STATICTHEME) AddonReviewerFlags.objects.create( addon=under_admin_review_and_pending, needs_admin_theme_review=True) version_factory( addon=under_admin_review_and_pending, file_kw={'status': amo.STATUS_AWAITING_REVIEW}) # This is an extension so won't be shown addon_factory( status=amo.STATUS_NOMINATED, type=amo.ADDON_EXTENSION, file_kw={'status': amo.STATUS_AWAITING_REVIEW}) # Grant user the permission to see only the legacy add-ons section. self.grant_permission(self.user, 'Addons:ThemeReview') # Test. response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) assert len(doc('.dashboard h3')) == 1 expected_links = [ reverse('reviewers.queue_nominated'), reverse('reviewers.queue_pending'), reverse('reviewers.performance'), reverse('reviewers.reviewlog'), 'https://wiki.mozilla.org/Add-ons/Reviewers/Themes/Guidelines', ] links = [link.attrib['href'] for link in doc('.dashboard a')] assert links == expected_links assert doc('.dashboard a')[0].text == 'New (1)' assert doc('.dashboard a')[1].text == 'Updates (2)' def test_post_reviewer_and_content_reviewer(self): # Create add-ons to test the queue count. The first add-on has its # content approved, so the post review queue should contain 2 add-ons, # and the content review queue only 1. addon = addon_factory( version_kw={'is_webextension': True}) AutoApprovalSummary.objects.create( version=addon.current_version, verdict=amo.AUTO_APPROVED) AddonApprovalsCounter.approve_content_for_addon(addon=addon) addon = addon_factory( version_kw={'is_webextension': True}) AddonApprovalsCounter.reset_for_addon(addon=addon) AutoApprovalSummary.objects.create( version=addon.current_version, verdict=amo.AUTO_APPROVED) # Grant user the permission to see both the Content Review and the # Auto Approved Add-ons sections. self.grant_permission(self.user, 'Addons:ContentReview') self.grant_permission(self.user, 'Addons:PostReview') # Test. response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) assert len(doc('.dashboard h3')) == 2 # 2 sections are shown. expected_links = [ reverse('reviewers.queue_auto_approved'), reverse('reviewers.performance'), reverse('reviewers.reviewlog'), 'https://wiki.mozilla.org/Add-ons/Reviewers/Guide', reverse('reviewers.queue_content_review'), reverse('reviewers.performance'), ] links = [link.attrib['href'] for link in doc('.dashboard a')] assert links == expected_links assert doc('.dashboard a')[0].text == 'Auto Approved Add-ons (2)' assert 'target' not in doc('.dashboard a')[0].attrib assert doc('.dashboard a')[3].text == 'Review Guide' assert doc('.dashboard a')[3].attrib['target'] == '_blank' assert doc('.dashboard a')[3].attrib['rel'] == 'noopener noreferrer' assert doc('.dashboard a')[4].text == 'Content Review (1)' def test_legacy_reviewer_and_ratings_moderator(self): # Grant user the permission to see both the legacy add-ons and the # ratings moderation sections. self.grant_permission(self.user, 'Addons:Review') self.grant_permission(self.user, 'Ratings:Moderate') # Test. response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) assert len(doc('.dashboard h3')) == 2 expected_links = [ reverse('reviewers.queue_nominated'), reverse('reviewers.queue_pending'), reverse('reviewers.performance'), reverse('reviewers.reviewlog'), 'https://wiki.mozilla.org/Add-ons/Reviewers/Guide', reverse('reviewers.queue_moderated'), reverse('reviewers.ratings_moderation_log'), 'https://wiki.mozilla.org/Add-ons/Reviewers/Guide/Moderation', ] links = [link.attrib['href'] for link in doc('.dashboard a')] assert links == expected_links assert doc('.dashboard a')[0].text == 'New (0)' assert 'target' not in doc('.dashboard a')[0].attrib assert doc('.dashboard a')[1].text == 'Updates (0)' assert doc('.dashboard a')[5].text == 'Ratings Awaiting Moderation (0)' assert 'target' not in doc('.dashboard a')[6].attrib assert doc('.dashboard a')[7].text == 'Moderation Guide' assert doc('.dashboard a')[7].attrib['target'] == '_blank' assert doc('.dashboard a')[7].attrib['rel'] == 'noopener noreferrer' class QueueTest(ReviewerTest): fixtures = ['base/users'] listed = True def setUp(self): super(QueueTest, self).setUp() self.user = UserProfile.objects.get(email='[email protected]') self.login_as_reviewer() if self.listed is False: # Testing unlisted views: needs Addons:ReviewUnlisted perm. self.grant_permission(self.user, 'Addons:ReviewUnlisted') self.url = reverse('reviewers.queue_pending') self.addons = OrderedDict() self.expected_addons = [] self.channel_name = 'listed' if self.listed else 'unlisted' def generate_files(self, subset=None, files=None): if subset is None: subset = [] files = files or OrderedDict([ ('Pending One', { 'version_str': '0.1', 'addon_status': amo.STATUS_PUBLIC, 'file_status': amo.STATUS_AWAITING_REVIEW, }), ('Pending Two', { 'version_str': '0.1', 'addon_status': amo.STATUS_PUBLIC, 'file_status': amo.STATUS_AWAITING_REVIEW, }), ('Nominated One', { 'version_str': '0.1', 'addon_status': amo.STATUS_NOMINATED, 'file_status': amo.STATUS_AWAITING_REVIEW, }), ('Nominated Two', { 'version_str': '0.1', 'addon_status': amo.STATUS_NOMINATED, 'file_status': amo.STATUS_AWAITING_REVIEW, }), ('Public', { 'version_str': '0.1', 'addon_status': amo.STATUS_PUBLIC, 'file_status': amo.STATUS_PUBLIC, }), ]) results = OrderedDict() channel = (amo.RELEASE_CHANNEL_LISTED if self.listed else amo.RELEASE_CHANNEL_UNLISTED) for name, attrs in files.iteritems(): if not subset or name in subset: version_kw = attrs.get('version_kw', {}) version_kw.update( {'channel': channel, 'version': attrs.pop('version_str')}) attrs['version_kw'] = version_kw file_kw = attrs.get('file_kw', {}) file_kw.update({'status': attrs.pop('file_status')}) attrs['file_kw'] = file_kw results[name] = addon_factory( status=attrs.pop('addon_status'), name=name, **attrs) self.addons.update(results) return results def generate_file(self, name): return self.generate_files([name])[name] def get_review_data(self): # Format: (Created n days ago, # percentages of [< 5, 5-10, >10]) return ((1, (0, 0, 100)), (8, (0, 50, 50)), (12, (50, 0, 50))) def get_addon_latest_version(self, addon): if self.listed: channel = amo.RELEASE_CHANNEL_LISTED else: channel = amo.RELEASE_CHANNEL_UNLISTED return addon.find_latest_version(channel=channel) def get_queue(self, addon): version = self.get_addon_latest_version(addon) assert version.current_queue.objects.filter(id=addon.id).count() == 1 def get_expected_addons_by_names(self, names): expected_addons = [] files = self.generate_files() for name in sorted(names): if name in files: expected_addons.append(files[name]) # Make sure all elements have been added assert len(expected_addons) == len(names) return expected_addons def _test_get_queue(self): for addon in self.expected_addons: self.get_queue(addon) def _test_queue_layout(self, name, tab_position, total_addons, total_queues, per_page=None): args = {'per_page': per_page} if per_page else {} response = self.client.get(self.url, args) assert response.status_code == 200 doc = pq(response.content) links = doc('.tabnav li a') link = links.eq(tab_position) assert links.length == total_queues assert link.text() == '%s (%s)' % (name, total_addons) assert link.attr('href') == self.url if per_page: assert doc('.data-grid-top .num-results').text() == ( u'Results %s\u20131 of %s' % (per_page, total_addons)) def _test_results(self): response = self.client.get(self.url) assert response.status_code == 200 expected = [] if not len(self.expected_addons): raise AssertionError('self.expected_addons was an empty list') for idx, addon in enumerate(self.expected_addons): latest_version = self.get_addon_latest_version(addon) assert latest_version name = '%s %s' % (unicode(addon.name), latest_version.version) if self.channel_name == 'listed': # We typically don't include the channel name if it's the # default one, 'listed'. channel = [] else: channel = [self.channel_name] url = reverse('reviewers.review', args=channel + [addon.slug]) expected.append((name, url)) doc = pq(response.content) links = doc('#addon-queue tr.addon-row td a:not(.app-icon)') assert len(links) == len(self.expected_addons) check_links(expected, links, verify=False) return doc class TestQueueBasics(QueueTest): def test_only_viewable_by_reviewer(self): # Addon reviewer has access. response = self.client.get(self.url) assert response.status_code == 200 # Regular user doesn't have access. self.client.logout() assert self.client.login(email='[email protected]') response = self.client.get(self.url) assert response.status_code == 403 # Persona reviewer doesn't have access either. self.client.logout() assert self.client.login(email='[email protected]') response = self.client.get(self.url) assert response.status_code == 403 def test_invalid_page(self): response = self.client.get(self.url, {'page': 999}) assert response.status_code == 200 assert response.context['page'].number == 1 def test_invalid_per_page(self): response = self.client.get(self.url, {'per_page': '<garbage>'}) # No exceptions: assert response.status_code == 200 @patch.multiple('olympia.reviewers.views', REVIEWS_PER_PAGE_MAX=1, REVIEWS_PER_PAGE=1) def test_max_per_page(self): self.generate_files() response = self.client.get(self.url, {'per_page': '2'}) assert response.status_code == 200 doc = pq(response.content) assert doc('.data-grid-top .num-results').text() == ( u'Results 1\u20131 of 2') @patch('olympia.reviewers.views.REVIEWS_PER_PAGE', new=1) def test_reviews_per_page(self): self.generate_files() response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) assert doc('.data-grid-top .num-results').text() == ( u'Results 1\u20131 of 2') def test_grid_headers(self): response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) expected = [ 'Add-on', 'Type', 'Waiting Time', 'Flags', ] assert [pq(th).text() for th in doc('#addon-queue tr th')[1:]] == ( expected) def test_grid_headers_sort_after_search(self): params = dict(searching=['True'], text_query=['abc'], addon_type_ids=['2'], sort=['addon_type_id']) response = self.client.get(self.url, params) assert response.status_code == 200 tr = pq(response.content)('#addon-queue tr') sorts = { # Column index => sort. 1: 'addon_name', # Add-on. 2: '-addon_type_id', # Type. 3: 'waiting_time_min', # Waiting Time. } for idx, sort in sorts.iteritems(): # Get column link. a = tr('th').eq(idx).find('a') # Update expected GET parameters with sort type. params.update(sort=[sort]) # Parse querystring of link to make sure `sort` type is correct. assert urlparse.parse_qs(a.attr('href').split('?')[1]) == params def test_no_results(self): response = self.client.get(self.url) assert response.status_code == 200 assert pq(response.content)('.queue-outer .no-results').length == 1 def test_no_paginator_when_on_single_page(self): response = self.client.get(self.url) assert response.status_code == 200 assert pq(response.content)('.pagination').length == 0 def test_paginator_when_many_pages(self): # 'Pending One' and 'Pending Two' should be the only add-ons in # the pending queue, but we'll generate them all for good measure. self.generate_files() response = self.client.get(self.url, {'per_page': 1}) assert response.status_code == 200 doc = pq(response.content) assert doc('.data-grid-top .num-results').text() == ( u'Results 1\u20131 of 2') assert doc('.data-grid-bottom .num-results').text() == ( u'Results 1\u20131 of 2') def test_legacy_queue_sort(self): sorts = ( ['age', 'Waiting Time'], ['name', 'Add-on'], ['type', 'Type'], ) for key, text in sorts: response = self.client.get(self.url, {'sort': key}) assert response.status_code == 200 assert pq(response.content)('th.ordered a').text() == text def test_flags_jetpack(self): addon = addon_factory( status=amo.STATUS_NOMINATED, name='Jetpack', version_kw={'version': '0.1'}, file_kw={'status': amo.STATUS_AWAITING_REVIEW, 'jetpack_version': 1.2}) r = self.client.get(reverse('reviewers.queue_nominated')) rows = pq(r.content)('#addon-queue tr.addon-row') assert rows.length == 1 assert rows.attr('data-addon') == str(addon.id) assert rows.find('td').eq(1).text() == 'Jetpack 0.1' assert rows.find('.ed-sprite-jetpack').length == 1 def test_flags_is_restart_required(self): addon = addon_factory( status=amo.STATUS_NOMINATED, name='Some Add-on', version_kw={'version': '0.1'}, file_kw={'status': amo.STATUS_AWAITING_REVIEW, 'is_restart_required': True}) r = self.client.get(reverse('reviewers.queue_nominated')) rows = pq(r.content)('#addon-queue tr.addon-row') assert rows.length == 1 assert rows.attr('data-addon') == str(addon.id) assert rows.find('td').eq(1).text() == 'Some Add-on 0.1' assert rows.find('.ed-sprite-jetpack').length == 0 assert rows.find('.ed-sprite-is_restart_required').length == 1 def test_flags_is_restart_required_false(self): addon = addon_factory( status=amo.STATUS_NOMINATED, name='Restartless', version_kw={'version': '0.1'}, file_kw={'status': amo.STATUS_AWAITING_REVIEW, 'is_restart_required': False}) r = self.client.get(reverse('reviewers.queue_nominated')) rows = pq(r.content)('#addon-queue tr.addon-row') assert rows.length == 1 assert rows.attr('data-addon') == str(addon.id) assert rows.find('td').eq(1).text() == 'Restartless 0.1' assert rows.find('.ed-sprite-jetpack').length == 0 assert rows.find('.ed-sprite-is_restart_required').length == 0 def test_tabnav_permissions(self): response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) links = doc('.tabnav li a').map(lambda i, e: e.attrib['href']) expected = [ reverse('reviewers.queue_nominated'), reverse('reviewers.queue_pending'), ] assert links == expected self.grant_permission(self.user, 'Ratings:Moderate') response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) links = doc('.tabnav li a').map(lambda i, e: e.attrib['href']) expected.append(reverse('reviewers.queue_moderated')) assert links == expected self.grant_permission(self.user, 'Addons:PostReview') response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) links = doc('.tabnav li a').map(lambda i, e: e.attrib['href']) expected.append(reverse('reviewers.queue_auto_approved')) assert links == expected self.grant_permission(self.user, 'Addons:ContentReview') response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) links = doc('.tabnav li a').map(lambda i, e: e.attrib['href']) expected.append(reverse('reviewers.queue_content_review')) assert links == expected self.grant_permission(self.user, 'Reviews:Admin') response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) links = doc('.tabnav li a').map(lambda i, e: e.attrib['href']) expected.append(reverse('reviewers.queue_expired_info_requests')) assert links == expected class TestPendingQueue(QueueTest): def setUp(self): super(TestPendingQueue, self).setUp() # These should be the only ones present. self.expected_addons = self.get_expected_addons_by_names( ['Pending One', 'Pending Two']) self.url = reverse('reviewers.queue_pending') def test_results(self): self._test_results() def test_queue_layout(self): self._test_queue_layout('Updates', tab_position=1, total_addons=2, total_queues=2) def test_get_queue(self): self._test_get_queue() def test_webextensions_filtered_out_because_of_post_review(self): version = self.addons['Pending Two'].find_latest_version( channel=amo.RELEASE_CHANNEL_LISTED) version.files.update(is_webextension=True) # Webextensions are filtered out from the queue since auto_approve is # taking care of them. self.expected_addons = [self.addons['Pending One']] self._test_results() def test_webextension_with_auto_approval_disabled_false_filtered_out(self): version = self.addons['Pending Two'].find_latest_version( channel=amo.RELEASE_CHANNEL_LISTED) version.files.update(is_webextension=True) AddonReviewerFlags.objects.create( addon=self.addons['Pending Two'], auto_approval_disabled=False) self.expected_addons = [self.addons['Pending One']] self._test_results() def test_webextension_with_auto_approval_disabled_does_show_up(self): version = self.addons['Pending Two'].find_latest_version( channel=amo.RELEASE_CHANNEL_LISTED) version.files.update(is_webextension=True) version = self.addons['Pending Two'].find_latest_version( channel=amo.RELEASE_CHANNEL_LISTED) version.files.update(is_webextension=True) AddonReviewerFlags.objects.create( addon=self.addons['Pending One'], auto_approval_disabled=True) self.expected_addons = [self.addons['Pending One']] self._test_results() def test_static_theme_filtered_out(self): self.addons['Pending Two'].update(type=amo.ADDON_STATICTHEME) # Static Theme shouldn't be shown self.expected_addons = [self.addons['Pending One']] self._test_results() # Unless you have that permission also self.grant_permission(self.user, 'Addons:ThemeReview') self.expected_addons = [ self.addons['Pending One'], self.addons['Pending Two']] self._test_results() class TestStaticThemePendingQueue(QueueTest): def setUp(self): super(TestStaticThemePendingQueue, self).setUp() # These should be the only ones present. self.expected_addons = self.get_expected_addons_by_names( ['Pending One', 'Pending Two']) Addon.objects.all().update(type=amo.ADDON_STATICTHEME) self.url = reverse('reviewers.queue_pending') GroupUser.objects.filter(user=self.user).delete() self.grant_permission(self.user, 'Addons:ThemeReview') def test_results(self): self._test_results() def test_queue_layout(self): self._test_queue_layout('Updates', tab_position=1, total_addons=2, total_queues=2) def test_get_queue(self): self._test_get_queue() def test_extensions_filtered_out(self): self.addons['Pending Two'].update(type=amo.ADDON_EXTENSION) # Extensions shouldn't be shown self.expected_addons = [self.addons['Pending One']] self._test_results() # Unless you have that permission also self.grant_permission(self.user, 'Addons:Review') self.expected_addons = [ self.addons['Pending One'], self.addons['Pending Two']] self._test_results() class TestNominatedQueue(QueueTest): def setUp(self): super(TestNominatedQueue, self).setUp() # These should be the only ones present. self.expected_addons = self.get_expected_addons_by_names( ['Nominated One', 'Nominated Two']) self.url = reverse('reviewers.queue_nominated') def test_results(self): self._test_results() def test_results_two_versions(self): version1 = self.addons['Nominated One'].versions.all()[0] version2 = self.addons['Nominated Two'].versions.all()[0] file_ = version2.files.get() # Versions are ordered by creation date, so make sure they're set. past = self.days_ago(1) version2.update(created=past, nomination=past) # Create another version, v0.2, by "cloning" v0.1. version2.pk = None version2.version = '0.2' version2.save() # Reset creation date once it has been saved. future = datetime.now() - timedelta(seconds=1) version2.update(created=future, nomination=future) # Associate v0.2 it with a file. file_.pk = None file_.version = version2 file_.save() # disable old files like Version.from_upload() would. version2.disable_old_files() response = self.client.get(self.url) assert response.status_code == 200 expected = [ ('Nominated One 0.1', reverse('reviewers.review', args=[version1.addon.slug])), ('Nominated Two 0.2', reverse('reviewers.review', args=[version2.addon.slug])), ] doc = pq(response.content) check_links( expected, doc('#addon-queue tr.addon-row td a:not(.app-icon)'), verify=False) def test_queue_layout(self): self._test_queue_layout('New', tab_position=0, total_addons=2, total_queues=2) def test_get_queue(self): self._test_get_queue() def test_webextensions_filtered_out_because_of_post_review(self): version = self.addons['Nominated Two'].find_latest_version( channel=amo.RELEASE_CHANNEL_LISTED) version.files.update(is_webextension=True) # Webextensions are filtered out from the queue since auto_approve is # taking care of them. self.expected_addons = [self.addons['Nominated One']] self._test_results() def test_webextension_with_auto_approval_disabled_false_filtered_out(self): version = self.addons['Nominated Two'].find_latest_version( channel=amo.RELEASE_CHANNEL_LISTED) version.files.update(is_webextension=True) AddonReviewerFlags.objects.create( addon=self.addons['Nominated Two'], auto_approval_disabled=False) self.expected_addons = [self.addons['Nominated One']] self._test_results() def test_webextension_with_auto_approval_disabled_does_show_up(self): version = self.addons['Nominated Two'].find_latest_version( channel=amo.RELEASE_CHANNEL_LISTED) version.files.update(is_webextension=True) version = self.addons['Nominated One'].find_latest_version( channel=amo.RELEASE_CHANNEL_LISTED) version.files.update(is_webextension=True) AddonReviewerFlags.objects.create( addon=self.addons['Nominated One'], auto_approval_disabled=True) self.expected_addons = [self.addons['Nominated One']] self._test_results() def test_static_theme_filtered_out(self): self.addons['Nominated Two'].update(type=amo.ADDON_STATICTHEME) # Static Theme shouldn't be shown self.expected_addons = [self.addons['Nominated One']] self._test_results() # Unless you have that permission also self.grant_permission(self.user, 'Addons:ThemeReview') self.expected_addons = [ self.addons['Nominated One'], self.addons['Nominated Two']] self._test_results() class TestStaticThemeNominatedQueue(QueueTest): def setUp(self): super(TestStaticThemeNominatedQueue, self).setUp() # These should be the only ones present. self.expected_addons = self.get_expected_addons_by_names( ['Nominated One', 'Nominated Two']) self.url = reverse('reviewers.queue_nominated') Addon.objects.all().update(type=amo.ADDON_STATICTHEME) GroupUser.objects.filter(user=self.user).delete() self.grant_permission(self.user, 'Addons:ThemeReview') def test_results(self): self._test_results() def test_results_two_versions(self): version1 = self.addons['Nominated One'].versions.all()[0] version2 = self.addons['Nominated Two'].versions.all()[0] file_ = version2.files.get() # Versions are ordered by creation date, so make sure they're set. past = self.days_ago(1) version2.update(created=past, nomination=past) # Create another version, v0.2, by "cloning" v0.1. version2.pk = None version2.version = '0.2' version2.save() # Reset creation date once it has been saved. future = datetime.now() - timedelta(seconds=1) version2.update(created=future, nomination=future) # Associate v0.2 it with a file. file_.pk = None file_.version = version2 file_.save() # disable old files like Version.from_upload() would. version2.disable_old_files() response = self.client.get(self.url) assert response.status_code == 200 expected = [ ('Nominated One 0.1', reverse('reviewers.review', args=[version1.addon.slug])), ('Nominated Two 0.2', reverse('reviewers.review', args=[version2.addon.slug])), ] doc = pq(response.content) check_links( expected, doc('#addon-queue tr.addon-row td a:not(.app-icon)'), verify=False) def test_queue_layout(self): self._test_queue_layout('New', tab_position=0, total_addons=2, total_queues=2) def test_get_queue(self): self._test_get_queue() def test_static_theme_filtered_out(self): self.addons['Nominated Two'].update(type=amo.ADDON_EXTENSION) # Static Theme shouldn't be shown self.expected_addons = [self.addons['Nominated One']] self._test_results() # Unless you have that permission also self.grant_permission(self.user, 'Addons:Review') self.expected_addons = [ self.addons['Nominated One'], self.addons['Nominated Two']] self._test_results() class TestModeratedQueue(QueueTest): fixtures = ['base/users', 'ratings/dev-reply'] def setUp(self): super(TestModeratedQueue, self).setUp() self.url = reverse('reviewers.queue_moderated') url_flag = reverse('addons.ratings.flag', args=['a1865', 218468]) response = self.client.post(url_flag, {'flag': RatingFlag.SPAM}) assert response.status_code == 200 assert RatingFlag.objects.filter(flag=RatingFlag.SPAM).count() == 1 assert Rating.objects.filter(editorreview=True).count() == 1 self.grant_permission(self.user, 'Ratings:Moderate') def test_results(self): response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content)('#reviews-flagged') rows = doc('.review-flagged:not(.review-saved)') assert rows.length == 1 assert rows.find('h3').text() == '' # Default is "Skip." assert doc('#id_form-0-action_1:checked').length == 1 flagged = doc('.reviews-flagged-reasons span.light').text() reviewer = RatingFlag.objects.all()[0].user.name assert flagged.startswith('Flagged by %s' % reviewer), ( 'Unexpected text: %s' % flagged) addon = Addon.objects.get(id=1865) addon.name = u'náme' addon.save() response = self.client.get(self.url) doc = pq(response.content)('#reviews-flagged') rows = doc('.review-flagged:not(.review-saved)') assert rows.length == 1 assert rows.find('h3').text() == u'náme' def setup_actions(self, action): response = self.client.get(self.url) assert response.status_code == 200 form_0_data = initial(response.context['reviews_formset'].forms[0]) assert Rating.objects.filter(addon=1865).count() == 2 formset_data = formset(form_0_data) formset_data['form-0-action'] = action response = self.client.post(self.url, formset_data) self.assert3xx(response, self.url) def test_skip(self): self.setup_actions(ratings.REVIEW_MODERATE_SKIP) # Make sure it's still there. response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) rows = doc('#reviews-flagged .review-flagged:not(.review-saved)') assert rows.length == 1 def test_skip_score(self): self.setup_actions(ratings.REVIEW_MODERATE_SKIP) assert ReviewerScore.objects.filter( note_key=amo.REVIEWED_ADDON_REVIEW).count() == 0 def get_logs(self, action): return ActivityLog.objects.filter(action=action.id) def test_remove(self): """Make sure the reviewer tools can delete a review.""" self.setup_actions(ratings.REVIEW_MODERATE_DELETE) logs = self.get_logs(amo.LOG.DELETE_RATING) assert logs.count() == 1 # Make sure it's removed from the queue. response = self.client.get(self.url) assert response.status_code == 200 assert pq(response.content)('#reviews-flagged .no-results').length == 1 response = self.client.get(reverse('reviewers.ratings_moderation_log')) assert pq(response.content)('table .more-details').attr('href') == ( reverse('reviewers.ratings_moderation_log.detail', args=[logs[0].id])) # Make sure it was actually deleted. assert Rating.objects.filter(addon=1865).count() == 1 # But make sure it wasn't *actually* deleted. assert Rating.unfiltered.filter(addon=1865).count() == 2 def test_remove_fails_for_own_addon(self): """ Make sure the reviewer tools can't delete a review for an add-on owned by the user. """ addon = Addon.objects.get(pk=1865) user = UserProfile.objects.get(email='[email protected]') AddonUser(addon=addon, user=user).save() # Make sure the initial count is as expected assert Rating.objects.filter(addon=1865).count() == 2 self.setup_actions(ratings.REVIEW_MODERATE_DELETE) logs = self.get_logs(amo.LOG.DELETE_RATING) assert logs.count() == 0 # Make sure it's not removed from the queue. response = self.client.get(self.url) assert response.status_code == 200 assert pq(response.content)('#reviews-flagged .no-results').length == 0 # Make sure it was not actually deleted. assert Rating.objects.filter(addon=1865).count() == 2 def test_remove_score(self): self.setup_actions(ratings.REVIEW_MODERATE_DELETE) assert ReviewerScore.objects.filter( note_key=amo.REVIEWED_ADDON_REVIEW).count() == 1 def test_keep(self): """Make sure the reviewer tools can remove flags and keep a review.""" self.setup_actions(ratings.REVIEW_MODERATE_KEEP) logs = self.get_logs(amo.LOG.APPROVE_RATING) assert logs.count() == 1 # Make sure it's removed from the queue. response = self.client.get(self.url) assert response.status_code == 200 assert pq(response.content)('#reviews-flagged .no-results').length == 1 rating = Rating.objects.filter(addon=1865) # Make sure it's NOT deleted... assert rating.count() == 2 # ...but it's no longer flagged. assert rating.filter(editorreview=1).count() == 0 def test_keep_score(self): self.setup_actions(ratings.REVIEW_MODERATE_KEEP) assert ReviewerScore.objects.filter( note_key=amo.REVIEWED_ADDON_REVIEW).count() == 1 def test_queue_layout(self): # From the fixtures we already have 2 reviews, one is flagged. We add # a bunch of reviews from different scenarios and make sure they don't # count towards the total. # Add a review associated with an normal addon rating = Rating.objects.create( addon=addon_factory(), user=user_factory(), body='show me', editorreview=True) RatingFlag.objects.create(rating=rating) # Add a review associated with an incomplete addon rating = Rating.objects.create( addon=addon_factory(status=amo.STATUS_NULL), user=user_factory(), body='dont show me', editorreview=True) RatingFlag.objects.create(rating=rating) # Add a review associated to an unlisted version addon = addon_factory() version = version_factory( addon=addon, channel=amo.RELEASE_CHANNEL_UNLISTED) rating = Rating.objects.create( addon=addon_factory(), version=version, user=user_factory(), body='dont show me either', editorreview=True) RatingFlag.objects.create(rating=rating) self._test_queue_layout('Rating Reviews', tab_position=2, total_addons=2, total_queues=3) def test_no_reviews(self): Rating.objects.all().delete() response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content)('#reviews-flagged') assert doc('.no-results').length == 1 assert doc('.review-saved button').length == 1 # Show only one button. def test_do_not_show_reviews_for_non_public_addons(self): Addon.objects.all().update(status=amo.STATUS_NULL) response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content)('#reviews-flagged') # There should be no results since all add-ons are not public. assert doc('.no-results').length == 1 def test_do_not_show_reviews_for_unlisted_addons(self): for addon in Addon.objects.all(): self.make_addon_unlisted(addon) response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content)('#reviews-flagged') # There should be no results since all add-ons are unlisted. assert doc('.no-results').length == 1 class TestUnlistedAllList(QueueTest): listed = False def setUp(self): super(TestUnlistedAllList, self).setUp() self.url = reverse('reviewers.unlisted_queue_all') # We should have all add-ons. self.expected_addons = self.get_expected_addons_by_names( ['Pending One', 'Pending Two', 'Nominated One', 'Nominated Two', 'Public']) # Need to set unique nomination times or we get a psuedo-random order. for idx, addon in enumerate(self.expected_addons): latest_version = addon.find_latest_version( channel=amo.RELEASE_CHANNEL_UNLISTED) latest_version.update( nomination=(datetime.now() - timedelta(minutes=idx))) def test_results(self): self._test_results() def test_review_notes_json(self): latest_version = self.expected_addons[0].find_latest_version( channel=amo.RELEASE_CHANNEL_UNLISTED) log = ActivityLog.create(amo.LOG.APPROVE_VERSION, latest_version, self.expected_addons[0], user=UserProfile.objects.get(pk=999), details={'comments': 'stish goin` down son'}) url = reverse('reviewers.queue_review_text') + str(log.id) response = self.client.get(url) assert response.status_code == 200 assert (json.loads(response.content) == {'reviewtext': 'stish goin` down son'}) class TestAutoApprovedQueue(QueueTest): def setUp(self): super(TestAutoApprovedQueue, self).setUp() self.url = reverse('reviewers.queue_auto_approved') def login_with_permission(self): user = UserProfile.objects.get(email='[email protected]') self.grant_permission(user, 'Addons:PostReview') self.client.login(email=user.email) def get_addon_latest_version(self, addon): """Method used by _test_results() to fetch the version that the queue is supposed to display. Overridden here because in our case, it's not necessarily the latest available version - we display the current public version instead (which is not guaranteed to be the latest auto-approved one, but good enough) for this page.""" return addon.current_version def generate_files(self): """Generate add-ons needed for these tests.""" # Has not been auto-approved. extra_addon = addon_factory(name=u'Extra Addôn 1') AutoApprovalSummary.objects.create( version=extra_addon.current_version, verdict=amo.NOT_AUTO_APPROVED) # Has not been auto-approved either, only dry run. extra_addon2 = addon_factory(name=u'Extra Addôn 2') AutoApprovalSummary.objects.create( version=extra_addon2.current_version, verdict=amo.WOULD_HAVE_BEEN_AUTO_APPROVED) # Has been auto-approved, but that auto-approval has been confirmed by # a human already. extra_addon3 = addon_factory(name=u'Extra Addôn 3') extra_summary3 = AutoApprovalSummary.objects.create( version=extra_addon3.current_version, verdict=amo.AUTO_APPROVED, confirmed=True) AddonApprovalsCounter.objects.create( addon=extra_addon3, counter=1, last_human_review=extra_summary3.created) # Has been auto-approved and reviewed by a human before. addon1 = addon_factory(name=u'Addôn 1') AutoApprovalSummary.objects.create( version=addon1.current_version, verdict=amo.AUTO_APPROVED) AddonApprovalsCounter.objects.create( addon=addon1, counter=1, last_human_review=self.days_ago(42)) # Has been auto-approved twice, last_human_review is somehow None, # the 'created' date will be used to order it (older is higher). addon2 = addon_factory(name=u'Addôn 2') addon2.update(created=self.days_ago(10)) AutoApprovalSummary.objects.create( version=addon2.current_version, verdict=amo.AUTO_APPROVED) AddonApprovalsCounter.objects.create( addon=addon2, counter=1, last_human_review=None) addon2_version2 = version_factory(addon=addon2) AutoApprovalSummary.objects.create( version=addon2_version2, verdict=amo.AUTO_APPROVED) # Has been auto-approved and never been seen by a human, # the 'created' date will be used to order it (newer is lower). addon3 = addon_factory(name=u'Addôn 3') addon3.update(created=self.days_ago(2)) AutoApprovalSummary.objects.create( version=addon3.current_version, verdict=amo.AUTO_APPROVED) AddonApprovalsCounter.objects.create( addon=addon3, counter=1, last_human_review=None) # Has been auto-approved, should be first because of its weight. addon4 = addon_factory(name=u'Addôn 4') addon4.update(created=self.days_ago(14)) AutoApprovalSummary.objects.create( version=addon4.current_version, verdict=amo.AUTO_APPROVED, weight=500) AddonApprovalsCounter.objects.create( addon=addon4, counter=0, last_human_review=self.days_ago(1)) self.expected_addons = [addon4, addon2, addon3, addon1] def test_only_viewable_with_specific_permission(self): # Regular addon reviewer does not have access. response = self.client.get(self.url) assert response.status_code == 403 # Regular user doesn't have access. self.client.logout() assert self.client.login(email='[email protected]') response = self.client.get(self.url) assert response.status_code == 403 def test_results(self): self.login_with_permission() self.generate_files() self._test_results() def test_results_weights(self): addon1 = addon_factory(name=u'Addôn 1') AutoApprovalSummary.objects.create( version=addon1.current_version, verdict=amo.AUTO_APPROVED, weight=amo.POST_REVIEW_WEIGHT_HIGHEST_RISK + 1) AddonApprovalsCounter.reset_for_addon(addon1) addon2 = addon_factory(name=u'Addôn 2') AutoApprovalSummary.objects.create( version=addon2.current_version, verdict=amo.AUTO_APPROVED, weight=amo.POST_REVIEW_WEIGHT_HIGH_RISK + 1) AddonApprovalsCounter.reset_for_addon(addon2) addon3 = addon_factory(name=u'Addôn 3') AutoApprovalSummary.objects.create( version=addon3.current_version, verdict=amo.AUTO_APPROVED, weight=amo.POST_REVIEW_WEIGHT_MEDIUM_RISK + 1) AddonApprovalsCounter.reset_for_addon(addon3) addon4 = addon_factory(name=u'Addôn 4') AutoApprovalSummary.objects.create( version=addon4.current_version, verdict=amo.AUTO_APPROVED, weight=1) AddonApprovalsCounter.reset_for_addon(addon4) self.expected_addons = [addon1, addon2, addon3, addon4] self.login_with_permission() doc = self._test_results() expected = ['risk-highest', 'risk-high', 'risk-medium', 'risk-low'] classnames = [ item.attrib['class'] for item in doc('.addon-row td:eq(4) span')] assert expected == classnames def test_queue_layout(self): self.login_with_permission() self.generate_files() self._test_queue_layout("Auto Approved", tab_position=2, total_addons=4, total_queues=3, per_page=1) class TestExpiredInfoRequestsQueue(QueueTest): def setUp(self): super(TestExpiredInfoRequestsQueue, self).setUp() self.url = reverse('reviewers.queue_expired_info_requests') def generate_files(self): # Extra add-on with no pending info request. addon_factory(name=u'Extra Addôn 1') # Extra add-on with a non-expired pending info request. extra_addon = addon_factory(name=u'Extra Addôn 2') AddonReviewerFlags.objects.create( addon=extra_addon, pending_info_request=datetime.now() + timedelta(days=1)) # Pending addon with expired info request. addon1 = addon_factory(name=u'Pending Addön 1', status=amo.STATUS_NOMINATED) AddonReviewerFlags.objects.create( addon=addon1, pending_info_request=self.days_ago(2)) # Public addon with expired info request. addon2 = addon_factory(name=u'Public Addön 2', status=amo.STATUS_PUBLIC) AddonReviewerFlags.objects.create( addon=addon2, pending_info_request=self.days_ago(42)) # Deleted addon with expired info request. addon3 = addon_factory(name=u'Deleted Addön 3', status=amo.STATUS_DELETED) AddonReviewerFlags.objects.create( addon=addon3, pending_info_request=self.days_ago(42)) # Mozilla-disabled addon with expired info request. addon4 = addon_factory(name=u'Disabled Addön 4', status=amo.STATUS_DISABLED) AddonReviewerFlags.objects.create( addon=addon4, pending_info_request=self.days_ago(42)) # Incomplete addon with expired info request. addon5 = addon_factory(name=u'Incomplete Addön 5', status=amo.STATUS_NULL) AddonReviewerFlags.objects.create( addon=addon5, pending_info_request=self.days_ago(42)) # Invisible (user-disabled) addon with expired info request. addon6 = addon_factory(name=u'Incomplete Addön 5', status=amo.STATUS_PUBLIC, disabled_by_user=True) AddonReviewerFlags.objects.create( addon=addon6, pending_info_request=self.days_ago(42)) self.expected_addons = [addon2, addon1] def test_results_no_permission(self): # Addon reviewer doesn't have access. response = self.client.get(self.url) assert response.status_code == 403 # Regular user doesn't have access. self.client.logout() assert self.client.login(email='[email protected]') response = self.client.get(self.url) assert response.status_code == 403 def test_results(self): self.grant_permission(self.user, 'Reviews:Admin') self.generate_files() self._test_results() class TestContentReviewQueue(QueueTest): def setUp(self): super(TestContentReviewQueue, self).setUp() self.url = reverse('reviewers.queue_content_review') self.channel_name = 'content' def login_with_permission(self): user = UserProfile.objects.get(email='[email protected]') self.grant_permission(user, 'Addons:ContentReview') self.client.login(email=user.email) return user def get_addon_latest_version(self, addon): """Method used by _test_results() to fetch the version that the queue is supposed to display. Overridden here because in our case, it's not necessarily the latest available version - we display the current public version instead (which is not guaranteed to be the latest auto-approved one, but good enough) for this page.""" return addon.current_version def generate_files(self): """Generate add-ons needed for these tests.""" # Has not been auto-approved. extra_addon = addon_factory(name=u'Extra Addôn 1') AutoApprovalSummary.objects.create( version=extra_addon.current_version, verdict=amo.NOT_AUTO_APPROVED, ) # Has not been auto-approved either, only dry run. extra_addon2 = addon_factory(name=u'Extra Addôn 2') AutoApprovalSummary.objects.create( version=extra_addon2.current_version, verdict=amo.WOULD_HAVE_BEEN_AUTO_APPROVED, ) # Has been auto-approved, but that content has been approved by # a human already. extra_addon3 = addon_factory(name=u'Extra Addôn 3') AutoApprovalSummary.objects.create( version=extra_addon3.current_version, verdict=amo.AUTO_APPROVED, confirmed=True) AddonApprovalsCounter.objects.create( addon=extra_addon3, last_content_review=self.days_ago(1)) # This one has never been content-reviewed, but it has the # needs_admin_content_review flag, and we're not an admin. extra_addon4 = addon_factory(name=u'Extra Addön 4') extra_addon4.update(created=self.days_ago(2)) AutoApprovalSummary.objects.create( version=extra_addon4.current_version, verdict=amo.AUTO_APPROVED, confirmed=True) AddonApprovalsCounter.objects.create( addon=extra_addon4, last_content_review=None) AddonReviewerFlags.objects.create( addon=extra_addon4, needs_admin_content_review=True) # This first add-on has been content reviewed so long ago that we # should do it again. addon1 = addon_factory(name=u'Addön 1') AutoApprovalSummary.objects.create( version=addon1.current_version, verdict=amo.AUTO_APPROVED, confirmed=True) AddonApprovalsCounter.objects.create( addon=addon1, last_content_review=self.days_ago(370)) # This one is quite similar, except its last content review is even # older.. addon2 = addon_factory(name=u'Addön 1') AutoApprovalSummary.objects.create( version=addon2.current_version, verdict=amo.AUTO_APPROVED, confirmed=True) AddonApprovalsCounter.objects.create( addon=addon2, last_content_review=self.days_ago(842)) # This one has never been content-reviewed. It has an # needs_admin_code_review flag, but that should not have any impact. addon3 = addon_factory(name=u'Addön 2') addon3.update(created=self.days_ago(2)) AutoApprovalSummary.objects.create( version=addon3.current_version, verdict=amo.AUTO_APPROVED, confirmed=True) AddonApprovalsCounter.objects.create( addon=addon3, last_content_review=None) AddonReviewerFlags.objects.create( addon=addon3, needs_admin_code_review=True) # This one has never been content reviewed either, and it does not even # have an AddonApprovalsCounter. addon4 = addon_factory(name=u'Addön 3') addon4.update(created=self.days_ago(1)) AutoApprovalSummary.objects.create( version=addon4.current_version, verdict=amo.AUTO_APPROVED, confirmed=True) assert not AddonApprovalsCounter.objects.filter(addon=addon4).exists() # Addons with no last_content_review date should be first, ordered by # their creation date, older first. self.expected_addons = [addon3, addon4, addon2, addon1] def test_only_viewable_with_specific_permission(self): # Regular addon reviewer does not have access. response = self.client.get(self.url) assert response.status_code == 403 # Regular user doesn't have access. self.client.logout() assert self.client.login(email='[email protected]') response = self.client.get(self.url) assert response.status_code == 403 def test_results(self): self.login_with_permission() self.generate_files() self._test_results() def test_queue_layout(self): self.login_with_permission() self.generate_files() self._test_queue_layout('Content Review', tab_position=2, total_addons=4, total_queues=3, per_page=1) def test_queue_layout_admin(self): # Admins should see the extra add-on that needs admin content review. user = self.login_with_permission() self.grant_permission(user, 'Reviews:Admin') self.generate_files() self._test_queue_layout('Content Review', tab_position=2, total_addons=5, total_queues=4) class TestPerformance(QueueTest): fixtures = ['base/users', 'base/addon_3615'] """Test the page at /reviewers/performance.""" def setUpReviewer(self): self.login_as_reviewer() core.set_user(UserProfile.objects.get(username='reviewer')) self.create_logs() def setUpAdmin(self): self.login_as_admin() core.set_user(UserProfile.objects.get(username='admin')) self.create_logs() def get_url(self, args=None): if args is None: args = [] return reverse('reviewers.performance', args=args) def create_logs(self): addon = Addon.objects.all()[0] version = addon.versions.all()[0] for i in amo.LOG_REVIEWER_REVIEW_ACTION: ActivityLog.create(amo.LOG_BY_ID[i], addon, version) # Throw in an automatic approval - should be ignored. ActivityLog.create( amo.LOG.APPROVE_VERSION, addon, version, user=UserProfile.objects.get(id=settings.TASK_USER_ID)) def _test_chart(self): r = self.client.get(self.get_url()) assert r.status_code == 200 doc = pq(r.content) num = len(amo.LOG_REVIEWER_REVIEW_ACTION) label = datetime.now().strftime('%Y-%m') data = {label: {u'teamcount': num, u'teamavg': u'%s.0' % num, u'usercount': num, u'teamamt': 1, u'label': datetime.now().strftime('%b %Y')}} assert json.loads(doc('#monthly').attr('data-chart')) == data def test_performance_chart_reviewer(self): self.setUpReviewer() self._test_chart() def test_performance_chart_as_admin(self): self.setUpAdmin() self._test_chart() def test_usercount_with_more_than_one_reviewer(self): self.client.login(email='[email protected]') core.set_user(UserProfile.objects.get(username='clouserw')) self.create_logs() self.setUpReviewer() r = self.client.get(self.get_url()) assert r.status_code == 200 doc = pq(r.content) data = json.loads(doc('#monthly').attr('data-chart')) label = datetime.now().strftime('%Y-%m') assert data[label]['usercount'] == len(amo.LOG_REVIEWER_REVIEW_ACTION) def _test_performance_other_user_as_admin(self): userid = core.get_user().pk r = self.client.get(self.get_url([10482])) doc = pq(r.content) assert doc('#select_user').length == 1 # Let them choose reviewers. options = doc('#select_user option') assert options.length == 3 assert options.eq(2).val() == str(userid) assert 'clouserw' in doc('#reviews_user').text() def test_performance_other_user_as_admin(self): self.setUpAdmin() self._test_performance_other_user_as_admin() def test_performance_other_user_not_admin(self): self.setUpReviewer() r = self.client.get(self.get_url([10482])) doc = pq(r.content) assert doc('#select_user').length == 0 # Don't let them choose. assert doc('#reviews_user').text() == 'Your Reviews' class SearchTest(ReviewerTest): listed = True def setUp(self): super(SearchTest, self).setUp() self.user = UserProfile.objects.get(email='[email protected]') self.login_as_reviewer() if self.listed is False: # Testing unlisted views: needs Addons:ReviewUnlisted perm. self.grant_permission(self.user, 'Addons:ReviewUnlisted') def named_addons(self, request): return [ r.record.addon_name for r in request.context['page'].object_list] def search(self, *args, **kw): response = self.client.get(self.url, kw) assert response.status_code == 200 assert response.context['search_form'].errors.as_text() == '' return response class BaseTestQueueSearch(SearchTest): fixtures = ['base/users', 'base/appversion'] __test__ = False # this is an abstract test case def generate_files(self, subset=None): if subset is None: subset = [] files = OrderedDict([ ('Not Needing Admin Review', { 'version_str': '0.1', 'addon_status': amo.STATUS_NOMINATED, 'file_status': amo.STATUS_AWAITING_REVIEW, }), ('Another Not Needing Admin Review', { 'version_str': '0.1', 'addon_status': amo.STATUS_NOMINATED, 'file_status': amo.STATUS_AWAITING_REVIEW, }), ('Needs Admin Review', { 'version_str': '0.1', 'addon_status': amo.STATUS_NOMINATED, 'file_status': amo.STATUS_AWAITING_REVIEW, 'needs_admin_code_review': True, }), ('Justin Bieber Theme', { 'version_str': '0.1', 'addon_status': amo.STATUS_NOMINATED, 'file_status': amo.STATUS_AWAITING_REVIEW, 'type': amo.ADDON_THEME, }), ('Justin Bieber Search Bar', { 'version_str': '0.1', 'addon_status': amo.STATUS_NOMINATED, 'file_status': amo.STATUS_AWAITING_REVIEW, 'type': amo.ADDON_SEARCH, }), ('Bieber For Mobile', { 'version_str': '0.1', 'addon_status': amo.STATUS_NOMINATED, 'file_status': amo.STATUS_AWAITING_REVIEW, 'version_kw': {'application': amo.ANDROID.id}, }), ('Linux Widget', { 'version_str': '0.1', 'addon_status': amo.STATUS_NOMINATED, 'file_status': amo.STATUS_AWAITING_REVIEW, }), ('Mac Widget', { 'version_str': '0.1', 'addon_status': amo.STATUS_NOMINATED, 'file_status': amo.STATUS_AWAITING_REVIEW, }), ('Deleted', { 'version_str': '0.1', 'addon_status': amo.STATUS_DELETED, 'file_status': amo.STATUS_AWAITING_REVIEW, }), ]) results = {} channel = (amo.RELEASE_CHANNEL_LISTED if self.listed else amo.RELEASE_CHANNEL_UNLISTED) for name, attrs in files.iteritems(): if not subset or name in subset: version_kw = attrs.get('version_kw', {}) version_kw.update( {'channel': channel, 'version': attrs.pop('version_str')}) attrs['version_kw'] = version_kw file_kw = attrs.get('file_kw', {}) file_kw.update({'status': attrs.pop('file_status')}) attrs['file_kw'] = file_kw attrs.update({'version_kw': version_kw, 'file_kw': file_kw}) needs_admin_code_review = attrs.pop( 'needs_admin_code_review', None) results[name] = addon_factory( status=attrs.pop('addon_status'), name=name, **attrs) if needs_admin_code_review: AddonReviewerFlags.objects.create( addon=results[name], needs_admin_code_review=True) return results def generate_file(self, name): return self.generate_files([name])[name] def test_search_by_needs_admin_code_review_admin(self): self.login_as_admin() self.generate_files(['Not Needing Admin Review', 'Needs Admin Review']) response = self.search(needs_admin_code_review=1) assert response.status_code == 200 assert self.named_addons(response) == ['Needs Admin Review'] def test_queue_counts_admin(self): self.login_as_admin() self.generate_files(['Not Needing Admin Review', 'Needs Admin Review']) response = self.search(text_query='admin', per_page=1) assert response.status_code == 200 doc = pq(response.content) assert doc('.data-grid-top .num-results').text() == ( u'Results 1\u20131 of 2') def test_search_by_addon_name_admin(self): self.login_as_admin() self.generate_files(['Not Needing Admin Review', 'Needs Admin Review', 'Justin Bieber Theme']) response = self.search(text_query='admin') assert response.status_code == 200 assert sorted(self.named_addons(response)) == [ 'Needs Admin Review', 'Not Needing Admin Review'] def test_not_searching(self, **kwargs): self.generate_files(['Not Needing Admin Review', 'Needs Admin Review']) response = self.search(**kwargs) assert response.status_code == 200 assert sorted(self.named_addons(response)) == [ 'Not Needing Admin Review'] # We were just displaying the queue, not searching, but the searching # hidden input in the form should always be set to True regardless, it # will be used once the user submits the form. doc = pq(response.content) assert doc('#id_searching').attr('value') == 'True' def test_not_searching_with_param(self): self.test_not_searching(some_param=1) def test_search_by_nothing(self): self.generate_files(['Not Needing Admin Review', 'Needs Admin Review']) response = self.search(searching='True') assert response.status_code == 200 assert sorted(self.named_addons(response)) == ( ['Needs Admin Review', 'Not Needing Admin Review']) def test_search_by_needs_admin_code_review(self): self.generate_files(['Not Needing Admin Review', 'Needs Admin Review']) response = self.search(needs_admin_code_review=1, searching='True') assert response.status_code == 200 assert self.named_addons(response) == ['Needs Admin Review'] def test_queue_counts(self): self.generate_files(['Not Needing Admin Review', 'Another Not Needing Admin Review', 'Needs Admin Review']) response = self.search( text_query='admin', per_page=1, searching='True') assert response.status_code == 200 doc = pq(response.content) assert doc('.data-grid-top .num-results').text() == ( u'Results 1\u20131 of 3') def test_search_by_addon_name(self): self.generate_files(['Not Needing Admin Review', 'Needs Admin Review', 'Justin Bieber Theme']) response = self.search(text_query='admin', searching='True') assert response.status_code == 200 assert sorted(self.named_addons(response)) == ( ['Needs Admin Review', 'Not Needing Admin Review']) def test_search_by_addon_in_locale(self): name = 'Not Needing Admin Review' generated = self.generate_file(name) uni = 'フォクすけといっしょ'.decode('utf8') addon = Addon.objects.get(pk=generated.id) addon.name = {'ja': uni} addon.save() self.url = self.url.replace('/en-US/', '/ja/') response = self.client.get(self.url, {'text_query': uni}, follow=True) assert response.status_code == 200 assert self.named_addons(response) == [name] def test_search_by_addon_author(self): name = 'Not Needing Admin Review' generated = self.generate_file(name) user = UserProfile.objects.all()[0] email = user.email.swapcase() author = AddonUser.objects.create(user=user, addon=generated) for role in [amo.AUTHOR_ROLE_OWNER, amo.AUTHOR_ROLE_DEV]: author.role = role author.save() response = self.search(text_query=email) assert response.status_code == 200 assert self.named_addons(response) == [name] def test_search_by_supported_email_in_locale(self): name = 'Not Needing Admin Review' generated = self.generate_file(name) uni = 'フォクすけといっしょ@site.co.jp'.decode('utf8') addon = Addon.objects.get(pk=generated.id) addon.support_email = {'ja': uni} addon.save() self.url = self.url.replace('/en-US/', '/ja/') response = self.client.get(self.url, {'text_query': uni}, follow=True) assert response.status_code == 200 assert self.named_addons(response) == [name] def test_clear_search_visible(self): response = self.search(text_query='admin', searching=True) assert response.status_code == 200 assert pq(response.content)( '.clear-queue-search').text() == 'clear search' def test_clear_search_hidden(self): response = self.search(text_query='admin') assert response.status_code == 200 assert not pq(response.content)('.clear-queue-search').text() class TestQueueSearch(BaseTestQueueSearch): __test__ = True def setUp(self): super(TestQueueSearch, self).setUp() self.url = reverse('reviewers.queue_nominated') def test_search_by_addon_type(self): self.generate_files(['Not Needing Admin Review', 'Justin Bieber Theme', 'Justin Bieber Search Bar']) response = self.search(addon_type_ids=[amo.ADDON_THEME]) assert response.status_code == 200 assert self.named_addons(response) == ['Justin Bieber Theme'] def test_search_by_addon_type_any(self): self.generate_file('Not Needing Admin Review') response = self.search(addon_type_ids=[amo.ADDON_ANY]) assert response.status_code == 200 assert self.named_addons(response), 'Expected some add-ons' def test_search_by_many_addon_types(self): self.generate_files(['Not Needing Admin Review', 'Justin Bieber Theme', 'Justin Bieber Search Bar']) response = self.search(addon_type_ids=[amo.ADDON_THEME, amo.ADDON_SEARCH]) assert response.status_code == 200 assert sorted(self.named_addons(response)) == ( ['Justin Bieber Search Bar', 'Justin Bieber Theme']) def test_search_by_app(self): self.generate_files(['Bieber For Mobile', 'Linux Widget']) response = self.search(application_id=[amo.ANDROID.id]) assert response.status_code == 200 assert self.named_addons(response) == ['Bieber For Mobile'] def test_preserve_multi_apps(self): self.generate_files(['Bieber For Mobile', 'Linux Widget']) channel = (amo.RELEASE_CHANNEL_LISTED if self.listed else amo.RELEASE_CHANNEL_UNLISTED) multi = addon_factory( status=amo.STATUS_NOMINATED, name='Multi Application', version_kw={'channel': channel, 'application': amo.FIREFOX.id}, file_kw={'status': amo.STATUS_AWAITING_REVIEW}) av_min, _ = AppVersion.objects.get_or_create( application=amo.ANDROID.id, version='4.0.99') av_max, _ = AppVersion.objects.get_or_create( application=amo.ANDROID.id, version='5.0.0') ApplicationsVersions.objects.get_or_create( application=amo.ANDROID.id, version=multi.versions.latest(), min=av_min, max=av_max) response = self.search(application_id=[amo.ANDROID.id]) assert response.status_code == 200 assert self.named_addons(response) == [ 'Bieber For Mobile', 'Multi Application'] def test_clear_search_uses_correct_queue(self): # The "clear search" link points to the right listed or unlisted queue. # Listed queue. url = reverse('reviewers.queue_nominated') response = self.client.get( url, {'text_query': 'admin', 'searching': True}) assert response.status_code == 200 doc = pq(response.content) assert doc('.clear-queue-search').attr('href') == url class TestQueueSearchUnlistedAllList(BaseTestQueueSearch): listed = False __test__ = True def setUp(self): super(TestQueueSearchUnlistedAllList, self).setUp() self.url = reverse('reviewers.unlisted_queue_all') def test_search_deleted(self): self.generate_files(['Not Needing Admin Review', 'Deleted']) r = self.search(deleted=1) assert self.named_addons(r) == ['Deleted'] def test_search_not_deleted(self): self.generate_files(['Not Needing Admin Review', 'Deleted']) response = self.search(deleted=0) assert response.status_code == 200 assert self.named_addons(response) == ['Not Needing Admin Review'] def test_search_by_guid(self): name = 'Not Needing Admin Review' addon = self.generate_file(name) addon.update(guid='@guidymcguid') response = self.search(text_query='mcguid') assert response.status_code == 200 assert self.named_addons(response) == ['Not Needing Admin Review'] class ReviewBase(QueueTest): def setUp(self): super(QueueTest, self).setUp() self.login_as_reviewer() self.addons = {} self.addon = self.generate_file('Public') self.version = self.addon.current_version self.file = self.version.files.get() self.reviewer = UserProfile.objects.get(username='reviewer') self.reviewer.update(display_name=u'A Reviêwer') self.url = reverse('reviewers.review', args=[self.addon.slug]) AddonUser.objects.create(addon=self.addon, user_id=999) def get_addon(self): return Addon.objects.get(pk=self.addon.pk) def get_dict(self, **kw): data = {'operating_systems': 'win', 'applications': 'something', 'comments': 'something'} data.update(kw) return data class TestReview(ReviewBase): def test_reviewer_required(self): assert self.client.head(self.url).status_code == 200 def test_not_anonymous(self): self.client.logout() self.assertLoginRedirects(self.client.head(self.url), to=self.url) @patch.object(settings, 'ALLOW_SELF_REVIEWS', False) def test_not_author(self): AddonUser.objects.create(addon=self.addon, user=self.reviewer) assert self.client.head(self.url).status_code == 302 def test_review_unlisted_while_a_listed_version_is_awaiting_review(self): self.make_addon_unlisted(self.addon) version_factory( addon=self.addon, channel=amo.RELEASE_CHANNEL_LISTED, file_kw={'status': amo.STATUS_AWAITING_REVIEW}) self.addon.update(status=amo.STATUS_NOMINATED, slug='awaiting') self.url = reverse( 'reviewers.review', args=('unlisted', self.addon.slug)) self.grant_permission(self.reviewer, 'Addons:ReviewUnlisted') assert self.client.get(self.url).status_code == 200 def test_needs_unlisted_reviewer_for_only_unlisted(self): self.addon.versions.update(channel=amo.RELEASE_CHANNEL_UNLISTED) assert self.client.head(self.url).status_code == 404 self.grant_permission(self.reviewer, 'Addons:ReviewUnlisted') assert self.client.head(self.url).status_code == 200 def test_dont_need_unlisted_reviewer_for_mixed_channels(self): version_factory( addon=self.addon, channel=amo.RELEASE_CHANNEL_UNLISTED, version='9.9') assert self.addon.find_latest_version( channel=amo.RELEASE_CHANNEL_UNLISTED) assert self.addon.current_version.channel == amo.RELEASE_CHANNEL_LISTED assert self.client.head(self.url).status_code == 200 self.grant_permission(self.reviewer, 'Addons:ReviewUnlisted') assert self.client.head(self.url).status_code == 200 def test_not_flags(self): self.addon.current_version.files.update(is_restart_required=False) response = self.client.get(self.url) assert response.status_code == 200 assert len(response.context['flags']) == 0 def test_flag_needs_admin_code_review(self): self.addon.current_version.files.update(is_restart_required=False) AddonReviewerFlags.objects.create( addon=self.addon, needs_admin_code_review=True) response = self.client.get(self.url) assert response.status_code == 200 assert len(response.context['flags']) == 1 def test_info_comments_requested(self): response = self.client.post(self.url, {'action': 'reply'}) assert response.context['form'].errors['comments'][0] == ( 'This field is required.') def test_whiteboard_url(self): # Listed review. response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) assert ( doc('#whiteboard_form').attr('action') == '/en-US/reviewers/whiteboard/listed/public') # Content review. self.grant_permission(self.reviewer, 'Addons:ContentReview') AutoApprovalSummary.objects.create( version=self.addon.current_version, verdict=amo.AUTO_APPROVED) self.url = reverse( 'reviewers.review', args=['content', self.addon.slug]) response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) assert ( doc('#whiteboard_form').attr('action') == '/en-US/reviewers/whiteboard/content/public') # Unlisted review. self.grant_permission(self.reviewer, 'Addons:ReviewUnlisted') version_factory(addon=self.addon, channel=amo.RELEASE_CHANNEL_UNLISTED) self.url = reverse( 'reviewers.review', args=['unlisted', self.addon.slug]) response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) assert ( doc('#whiteboard_form').attr('action') == '/en-US/reviewers/whiteboard/unlisted/public') # Listed review, but deleted. self.addon.delete() self.url = reverse( 'reviewers.review', args=['listed', self.addon.pk]) response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) assert ( doc('#whiteboard_form').attr('action') == '/en-US/reviewers/whiteboard/listed/%d' % self.addon.pk) def test_no_whiteboards_for_static_themes(self): self.grant_permission(self.reviewer, 'Addons:ThemeReview') self.addon.update(type=amo.ADDON_STATICTHEME) response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) assert not doc('#whiteboard_form') def test_comment(self): response = self.client.post(self.url, {'action': 'comment', 'comments': 'hello sailor'}) assert response.status_code == 302 assert len(mail.outbox) == 0 comment_version = amo.LOG.COMMENT_VERSION assert ActivityLog.objects.filter( action=comment_version.id).count() == 1 def test_info_requested(self): response = self.client.post(self.url, {'action': 'reply', 'comments': 'hello sailor'}) assert response.status_code == 302 assert len(mail.outbox) == 1 self.assertTemplateUsed(response, 'activity/emails/from_reviewer.txt') def test_super_review_requested(self): response = self.client.post(self.url, {'action': 'super', 'comments': 'hello sailor'}) assert response.status_code == 302 def test_info_requested_canned_response(self): response = self.client.post(self.url, {'action': 'reply', 'comments': 'hello sailor', 'canned_response': 'foo'}) assert response.status_code == 302 assert len(mail.outbox) == 1 self.assertTemplateUsed(response, 'activity/emails/from_reviewer.txt') def test_page_title(self): response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) assert doc('title').text() == ( '%s :: Reviewer Tools :: Add-ons for Firefox' % self.addon.name) def test_files_shown(self): response = self.client.get(self.url) assert response.status_code == 200 items = pq(response.content)('#review-files .files .file-info') assert items.length == 1 f = self.version.all_files[0] expected = [ ('All Platforms', f.get_url_path('reviewer')), ('Validation', reverse('devhub.file_validation', args=[self.addon.slug, f.id])), ('Contents', None), ] check_links(expected, items.find('a'), verify=False) def test_item_history(self, channel=amo.RELEASE_CHANNEL_LISTED): self.addons['something'] = addon_factory( status=amo.STATUS_PUBLIC, name=u'something', version_kw={'version': u'0.2', 'channel': channel}, file_kw={'status': amo.STATUS_AWAITING_REVIEW}) assert self.addon.versions.filter(channel=channel).count() == 1 self.review_version(self.version, self.url) v2 = self.addons['something'].versions.all()[0] v2.addon = self.addon v2.created = v2.created + timedelta(days=1) v2.save() assert self.addon.versions.filter(channel=channel).count() == 2 action = self.review_version(v2, self.url) response = self.client.get(self.url) assert response.status_code == 200 # The 2 following lines replace pq(res.content), it's a workaround for # https://github.com/gawel/pyquery/issues/31 UTF8_PARSER = HTMLParser(encoding='utf-8') doc = pq(fromstring(response.content, parser=UTF8_PARSER)) table = doc('#review-files') # Check the history for both versions. ths = table.children('tr > th') assert ths.length == 2 assert '0.1' in ths.eq(0).text() assert '0.2' in ths.eq(1).text() rows = table('td.files') assert rows.length == 2 comments = rows.siblings('td') assert comments.length == 2 for idx in xrange(comments.length): td = comments.eq(idx) assert td.find('.history-comment').text() == 'something' assert td.find('th').text() == { 'public': 'Approved', 'reply': 'Reviewer Reply'}[action] reviewer_name = td.find('td a').text() assert ((reviewer_name == self.reviewer.display_name) or (reviewer_name == self.other_reviewer.display_name)) def test_item_history_with_unlisted_versions_too(self): # Throw in an unlisted version to be ignored. version_factory( version=u'0.2', addon=self.addon, channel=amo.RELEASE_CHANNEL_UNLISTED, file_kw={'status': amo.STATUS_PUBLIC}) self.test_item_history() def test_item_history_with_unlisted_review_page(self): self.addon.versions.update(channel=amo.RELEASE_CHANNEL_UNLISTED) self.version.reload() # Throw in an listed version to be ignored. version_factory( version=u'0.2', addon=self.addon, channel=amo.RELEASE_CHANNEL_LISTED, file_kw={'status': amo.STATUS_PUBLIC}) self.url = reverse('reviewers.review', args=[ 'unlisted', self.addon.slug]) self.grant_permission(self.reviewer, 'Addons:ReviewUnlisted') self.test_item_history(channel=amo.RELEASE_CHANNEL_UNLISTED) def generate_deleted_versions(self): self.addon = addon_factory(version_kw={ 'version': '1.0', 'created': self.days_ago(1)}) self.url = reverse('reviewers.review', args=[self.addon.slug]) versions = ({'version': '0.1', 'action': 'comment', 'comments': 'millenium hand and shrimp'}, {'version': '0.1', 'action': 'public', 'comments': 'buggrit'}, {'version': '0.2', 'action': 'comment', 'comments': 'I told em'}, {'version': '0.3'}) for i, version_data in enumerate(versions): version = version_factory( addon=self.addon, version=version_data['version'], created=self.days_ago(-i), file_kw={'status': amo.STATUS_AWAITING_REVIEW}) if 'action' in version_data: data = {'action': version_data['action'], 'operating_systems': 'win', 'applications': 'something', 'comments': version_data['comments']} self.client.post(self.url, data) version.delete(hard=True) self.addon.current_version.delete(hard=True) @patch('olympia.reviewers.utils.sign_file') def test_item_history_deleted(self, mock_sign): self.generate_deleted_versions() response = self.client.get(self.url) assert response.status_code == 200 table = pq(response.content)('#review-files') # Check the history for all versions. ths = table.children('tr > th') assert ths.length == 3 # The 2 with the same number will be coalesced. assert '0.1' in ths.eq(0).text() assert '0.2' in ths.eq(1).text() assert '0.3' in ths.eq(2).text() for idx in xrange(2): assert 'Deleted' in ths.eq(idx).text() bodies = table.children('.listing-body') assert 'millenium hand and shrimp' in bodies.eq(0).text() assert 'buggrit' in bodies.eq(0).text() assert 'I told em' in bodies.eq(1).text() assert mock_sign.called def test_item_history_compat_ordered(self): """ Make sure that apps in compatibility are ordered. """ av = AppVersion.objects.all()[0] v = self.addon.versions.all()[0] ApplicationsVersions.objects.create( version=v, application=amo.THUNDERBIRD.id, min=av, max=av) ApplicationsVersions.objects.create( version=v, application=amo.SEAMONKEY.id, min=av, max=av) assert self.addon.versions.count() == 1 url = reverse('reviewers.review', args=[self.addon.slug]) response = self.client.get(url) assert response.status_code == 200 doc = pq(response.content) icons = doc('.listing-body .app-icon') assert icons.eq(0).attr('title') == "Firefox" assert icons.eq(1).attr('title') == "SeaMonkey" assert icons.eq(2).attr('title') == "Thunderbird" def test_item_history_weight(self): """ Make sure the weight is shown on the review page""" AutoApprovalSummary.objects.create( version=self.version, verdict=amo.AUTO_APPROVED, weight=284) self.grant_permission(self.reviewer, 'Addons:PostReview') url = reverse('reviewers.review', args=[self.addon.slug]) response = self.client.get(url) assert response.status_code == 200 doc = pq(response.content) risk = doc('.listing-body .file-weight') assert risk.text() == "Weight: 284" def test_item_history_notes(self): version = self.addon.versions.all()[0] version.releasenotes = 'hi' version.approvalnotes = 'secret hi' version.save() response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content)('#review-files') version = doc('.activity_version') assert version.length == 1 assert version.text() == 'hi' approval = doc('.activity_approval') assert approval.length == 1 assert approval.text() == 'secret hi' def test_item_history_header(self): response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) assert ('Approved' in doc('#review-files .listing-header .light').text()) def test_item_history_comment(self): # Add Comment. self.client.post(self.url, {'action': 'comment', 'comments': 'hello sailor'}) response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content)('#review-files') assert doc('th').eq(1).text() == 'Commented' assert doc('.history-comment').text() == 'hello sailor' def test_files_in_item_history(self): data = {'action': 'public', 'operating_systems': 'win', 'applications': 'something', 'comments': 'something'} self.client.post(self.url, data) response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) items = doc('#review-files .files .file-info') assert items.length == 1 assert items.find('a.reviewers-install').text() == 'All Platforms' def test_no_items(self): response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) assert doc('#review-files .no-activity').length == 1 def test_action_links(self): response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) expected = [ ('View Listing', self.addon.get_url_path()), ] check_links(expected, doc('#actions-addon a'), verify=False) def test_action_links_as_admin(self): self.login_as_admin() response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) expected = [ ('View Listing', self.addon.get_url_path()), ('Edit', self.addon.get_dev_url()), ('Admin Page', reverse('zadmin.addon_manage', args=[self.addon.id])), ] check_links(expected, doc('#actions-addon a'), verify=False) def test_unlisted_addon_action_links_as_admin(self): """No "View Listing" link for unlisted addons, "edit"/"manage" links for the admins.""" self.make_addon_unlisted(self.addon) self.login_as_admin() response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) expected = [ ('Unlisted Review Page', reverse( 'reviewers.review', args=('unlisted', self.addon.slug))), ('Edit', self.addon.get_dev_url()), ('Admin Page', reverse( 'zadmin.addon_manage', args=[self.addon.id])), ] check_links(expected, doc('#actions-addon a'), verify=False) def test_mixed_channels_action_links_as_admin(self): self.make_addon_unlisted(self.addon) version_factory( addon=self.addon, channel=amo.RELEASE_CHANNEL_LISTED, file_kw={'status': amo.STATUS_AWAITING_REVIEW}) self.addon.update(status=amo.STATUS_NOMINATED) self.login_as_admin() response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) expected = [ ('View Listing', self.addon.get_url_path()), ('Unlisted Review Page', reverse( 'reviewers.review', args=('unlisted', self.addon.slug))), ('Edit', self.addon.get_dev_url()), ('Admin Page', reverse( 'zadmin.addon_manage', args=[self.addon.id])), ] check_links(expected, doc('#actions-addon a'), verify=False) def test_mixed_channels_action_links_as_admin_on_unlisted_review(self): self.make_addon_unlisted(self.addon) version_factory( addon=self.addon, channel=amo.RELEASE_CHANNEL_LISTED, file_kw={'status': amo.STATUS_AWAITING_REVIEW}) self.addon.update(status=amo.STATUS_NOMINATED) self.login_as_admin() self.url = reverse( 'reviewers.review', args=('unlisted', self.addon.slug)) response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) expected = [ ('View Listing', self.addon.get_url_path()), ('Listed Review Page', reverse('reviewers.review', args=(self.addon.slug,))), ('Edit', self.addon.get_dev_url()), ('Admin Page', reverse('zadmin.addon_manage', args=[self.addon.id])), ] check_links(expected, doc('#actions-addon a'), verify=False) def test_mixed_channels_action_links_as_regular_reviewer(self): self.make_addon_unlisted(self.addon) version_factory( addon=self.addon, channel=amo.RELEASE_CHANNEL_LISTED, file_kw={'status': amo.STATUS_AWAITING_REVIEW}) self.addon.update(status=amo.STATUS_NOMINATED) self.login_as_reviewer() response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) expected = [ ('View Listing', self.addon.get_url_path()), ] check_links(expected, doc('#actions-addon a'), verify=False) def test_admin_links_as_non_admin(self): self.login_as_reviewer() response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) admin = doc('#actions-addon li') assert admin.length == 1 def test_extra_actions_subscribe_checked_state(self): self.login_as_reviewer() response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) subscribe_input = doc('#notify_new_listed_versions')[0] assert 'checked' not in subscribe_input.attrib ReviewerSubscription.objects.create( addon=self.addon, user=self.reviewer) response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) subscribe_input = doc('#notify_new_listed_versions')[0] assert subscribe_input.attrib['checked'] == 'checked' def test_extra_actions_token(self): self.login_as_reviewer() self.client.cookies[API_TOKEN_COOKIE] = 'youdidntsaythemagicword' response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) token = doc('#extra-review-actions').attr('data-api-token') assert token == 'youdidntsaythemagicword' def test_extra_actions_not_for_reviewers(self): self.login_as_reviewer() response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) assert not doc('#force_disable_addon') assert not doc('#force_enable_addon') assert not doc('#clear_admin_code_review') assert not doc('#clear_admin_content_review') assert not doc('#clear_admin_theme_review') assert not doc('#disable_auto_approval') assert not doc('#enable_auto_approval') assert not doc('#clear_pending_info_request') def test_extra_actions_admin_disable_enable(self): self.login_as_admin() response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) assert doc('#force_disable_addon') elem = doc('#force_disable_addon')[0] assert 'hidden' not in elem.getparent().attrib.get('class', '') assert doc('#force_enable_addon') elem = doc('#force_enable_addon')[0] assert 'hidden' in elem.getparent().attrib.get('class', '') def test_unflag_option_forflagged_as_admin(self): self.login_as_admin() AddonReviewerFlags.objects.create( addon=self.addon, needs_admin_code_review=True) response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) assert doc('#clear_admin_code_review').length == 1 assert doc('#clear_admin_content_review').length == 0 assert doc('#clear_admin_content_review').length == 0 def test_unflag_content_option_forflagged_as_admin(self): self.login_as_admin() AddonReviewerFlags.objects.create( addon=self.addon, needs_admin_code_review=False, needs_admin_content_review=True) response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) assert doc('#clear_admin_code_review').length == 0 assert doc('#clear_admin_content_review').length == 1 assert doc('#clear_admin_theme_review').length == 0 def test_unflag_theme_option_forflagged_as_admin(self): self.login_as_admin() AddonReviewerFlags.objects.create( addon=self.addon, needs_admin_code_review=False, needs_admin_content_review=False, needs_admin_theme_review=True) response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) assert doc('#clear_admin_code_review').length == 0 assert doc('#clear_admin_content_review').length == 0 assert doc('#clear_admin_theme_review').length == 1 def test_disable_auto_approvals_as_admin(self): self.login_as_admin() response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) assert doc('#disable_auto_approval') elem = doc('#disable_auto_approval')[0] assert 'hidden' not in elem.getparent().attrib.get('class', '') assert doc('#enable_auto_approval') elem = doc('#enable_auto_approval')[0] assert 'hidden' in elem.getparent().attrib.get('class', '') # Both of them should be absent on static themes, which are not # auto-approved. self.addon.update(type=amo.ADDON_STATICTHEME) response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) assert not doc('#disable_auto_approval') assert not doc('#enable_auto_approval') def test_enable_auto_approvals_as_admin_auto_approvals_disabled(self): self.login_as_admin() AddonReviewerFlags.objects.create( addon=self.addon, auto_approval_disabled=True) response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) assert doc('#disable_auto_approval') elem = doc('#disable_auto_approval')[0] assert 'hidden' in elem.getparent().attrib.get('class', '') assert doc('#enable_auto_approval') elem = doc('#enable_auto_approval')[0] assert 'hidden' not in elem.getparent().attrib.get('class', '') # Both of them should be absent on static themes, which are not # auto-approved. self.addon.update(type=amo.ADDON_STATICTHEME) response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) assert not doc('#disable_auto_approval') assert not doc('#enable_auto_approval') def test_clear_pending_info_request_as_admin(self): self.login_as_admin() response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) assert not doc('#clear_pending_info_request') AddonReviewerFlags.objects.create( addon=self.addon, pending_info_request=self.days_ago(1)) response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) assert doc('#clear_pending_info_request') def test_info_request_checkbox(self): self.login_as_reviewer() assert not self.addon.pending_info_request response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) assert 'checked' not in doc('#id_info_request')[0].attrib elm = doc('#id_info_request_deadline')[0] assert elm.attrib['readonly'] == 'readonly' assert elm.attrib['min'] == '7' assert elm.attrib['max'] == '7' assert elm.attrib['value'] == '7' AddonReviewerFlags.objects.create( addon=self.addon, pending_info_request=datetime.now() + timedelta(days=7)) response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) assert doc('#id_info_request')[0].attrib['checked'] == 'checked' def test_info_request_checkbox_admin(self): self.login_as_admin() assert not self.addon.pending_info_request response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) assert 'checked' not in doc('#id_info_request')[0].attrib elm = doc('#id_info_request_deadline')[0] assert 'readonly' not in elm.attrib assert elm.attrib['min'] == '1' assert elm.attrib['max'] == '99' assert elm.attrib['value'] == '7' def test_no_public(self): has_public = self.version.files.filter( status=amo.STATUS_PUBLIC).exists() assert has_public response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) validation = doc.find('.files') assert validation.find('a').eq(1).text() == "Validation" assert validation.find('a').eq(2).text() == "Contents" assert validation.find('a').length == 3 def test_public_search(self): self.version.files.update(status=amo.STATUS_PUBLIC) self.addon.update(type=amo.ADDON_SEARCH) response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) assert doc('#review-files .files ul .file-info').length == 1 def test_version_deletion(self): """ Make sure that we still show review history for deleted versions. """ # Add a new version to the add-on. addon = addon_factory( status=amo.STATUS_NOMINATED, name='something', version_kw={'version': '0.2'}, file_kw={'status': amo.STATUS_AWAITING_REVIEW}) assert self.addon.versions.count() == 1 self.review_version(self.version, self.url) v2 = addon.versions.all()[0] v2.addon = self.addon v2.created = v2.created + timedelta(days=1) v2.save() self.review_version(v2, self.url) assert self.addon.versions.count() == 2 response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) # View the history verify two versions: ths = doc('table#review-files > tr > th:first-child') assert '0.1' in ths.eq(0).text() assert '0.2' in ths.eq(1).text() # Delete a version: v2.delete() # Verify two versions, one deleted: response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) ths = doc('table#review-files > tr > th:first-child') assert ths.length == 2 assert '0.1' in ths.text() def test_no_versions(self): """The review page should still load if there are no versions. But not unless you have unlisted permissions.""" assert self.client.get(self.url).status_code == 200 response = self.client.post(self.url, {'action': 'comment', 'comments': 'hello sailor'}) assert response.status_code == 302 self.assert3xx(response, reverse('reviewers.queue_pending'), status_code=302) self.version.delete() # Regular reviewer has no permission, gets a 404. assert self.client.get(self.url).status_code == 404 # Reviewer with more powers can look. self.grant_permission(self.reviewer, 'Addons:ReviewUnlisted') assert self.client.get(self.url).status_code == 200 response = self.client.post(self.url, {'action': 'comment', 'comments': 'hello sailor'}) assert response.status_code == 302 self.assert3xx(response, reverse('reviewers.queue_pending'), status_code=302) def test_addon_deleted(self): """The review page should still load for deleted addons.""" self.addon.delete() self.url = reverse('reviewers.review', args=[self.addon.pk]) assert self.client.get(self.url).status_code == 200 response = self.client.post(self.url, {'action': 'comment', 'comments': 'hello sailor'}) assert response.status_code == 302 self.assert3xx(response, reverse('reviewers.queue_pending'), status_code=302) @patch('olympia.reviewers.utils.sign_file') def review_version(self, version, url, mock_sign): if version.channel == amo.RELEASE_CHANNEL_LISTED: version.files.all()[0].update(status=amo.STATUS_AWAITING_REVIEW) action = 'public' else: action = 'reply' data = { 'action': action, 'operating_systems': 'win', 'applications': 'something', 'comments': 'something', } self.client.post(url, data) if version.channel == amo.RELEASE_CHANNEL_LISTED: assert mock_sign.called return action def test_dependencies_listed(self): AddonDependency.objects.create(addon=self.addon, dependent_addon=self.addon) response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) deps = doc('.addon-info .addon-dependencies') assert deps.length == 1 assert deps.find('li').length == 1 assert deps.find('a').attr('href') == self.addon.get_url_path() def test_eula_displayed(self): assert not bool(self.addon.eula) response = self.client.get(self.url) assert response.status_code == 200 self.assertNotContains(response, 'View End-User License Agreement') self.addon.eula = 'Test!' self.addon.save() assert bool(self.addon.eula) response = self.client.get(self.url) assert response.status_code == 200 self.assertContains(response, 'View End-User License Agreement') def test_privacy_policy_displayed(self): assert self.addon.privacy_policy is None response = self.client.get(self.url) assert response.status_code == 200 self.assertNotContains(response, 'View Privacy Policy') self.addon.privacy_policy = 'Test!' self.addon.save() response = self.client.get(self.url) assert response.status_code == 200 self.assertContains(response, 'View Privacy Policy') def test_requires_payment_indicator(self): assert not self.addon.requires_payment response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) assert 'No' in doc('tr.requires-payment td').text() self.addon.update(requires_payment=True) response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) assert 'Yes' in doc('tr.requires-payment td').text() def test_viewing(self): url = reverse('reviewers.review_viewing') response = self.client.post(url, {'addon_id': self.addon.id}) data = json.loads(response.content) assert data['current'] == self.reviewer.id assert data['current_name'] == self.reviewer.name assert data['is_user'] == 1 # Now, login as someone else and test. self.login_as_admin() response = self.client.post(url, {'addon_id': self.addon.id}) data = json.loads(response.content) assert data['current'] == self.reviewer.id assert data['current_name'] == self.reviewer.name assert data['is_user'] == 0 # Lets just override this to make the test a bit shorter. @mock.patch.object(amo, 'REVIEWER_REVIEW_LOCK_LIMIT', 1) def test_viewing_lock_limit(self): url = reverse('reviewers.review_viewing') response = self.client.post(url, {'addon_id': 1234}) data = json.loads(response.content) assert data['current'] == self.reviewer.id assert data['current_name'] == self.reviewer.name assert data['is_user'] == 1 # Second review page is over the limit. response = self.client.post(url, {'addon_id': 5678}) data = json.loads(response.content) assert data['current'] == settings.TASK_USER_ID # Mozilla's task ID. assert data['current_name'] == 'Review lock limit reached' assert data['is_user'] == 2 # Now, login as someone else and test. First page is blocked. self.login_as_admin() response = self.client.post(url, {'addon_id': 1234}) data = json.loads(response.content) assert data['current'] == self.reviewer.id assert data['current_name'] == self.reviewer.name assert data['is_user'] == 0 # Second page is available. response = self.client.post(url, {'addon_id': 5678}) data = json.loads(response.content) admin = UserProfile.objects.get(username='admin') assert data['current'] == admin.id assert data['current_name'] == admin.name assert data['is_user'] == 1 # Lets just override this to make the test a bit shorter. @mock.patch.object(amo, 'REVIEWER_REVIEW_LOCK_LIMIT', 1) def test_viewing_lock_admin(self): self.login_as_admin() url = reverse('reviewers.review_viewing') admin = UserProfile.objects.get(username='admin') response = self.client.post(url, {'addon_id': 101}) data = json.loads(response.content) assert data['current'] == admin.id assert data['current_name'] == admin.name assert data['is_user'] == 1 # Admin don't have time for no limits. response = self.client.post(url, {'addon_id': 202}) data = json.loads(response.content) assert data['current'] == admin.id assert data['current_name'] == admin.name assert data['is_user'] == 1 def test_viewing_review_unlocks(self): reviewing_url = reverse('reviewers.review_viewing') self.client.post(reviewing_url, {'addon_id': self.addon.id}) key = '%s:review_viewing:%s' % (settings.CACHE_PREFIX, self.addon.id) assert cache.get(key) == self.reviewer.id self.client.post(self.url, {'action': 'comment', 'comments': 'hello sailor'}) # Processing a review should instantly clear the review lock on it. assert cache.get(key) is None def test_viewing_queue(self): response = self.client.post(reverse('reviewers.review_viewing'), {'addon_id': self.addon.id}) data = json.loads(response.content) assert data['current'] == self.reviewer.id assert data['current_name'] == self.reviewer.name assert data['is_user'] == 1 # Now, login as someone else and test. self.login_as_admin() r = self.client.post(reverse('reviewers.queue_viewing'), {'addon_ids': self.addon.id}) data = json.loads(r.content) assert data[str(self.addon.id)] == self.reviewer.display_name def test_display_same_files_only_once(self): """ Test whether identical files for different platforms show up as one link with the appropriate text. """ version = version_factory( addon=self.addon, version='0.2', file_kw=False) file_mac = file_factory(version=version, platform=amo.PLATFORM_MAC.id) file_android = file_factory( version=version, platform=amo.PLATFORM_ANDROID.id) # Signing causes the same uploaded file to be different file_mac.update(hash='xyz789', original_hash='123abc') file_android.update(hash='zyx987', original_hash='123abc') response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) text = doc('.reviewers-install').eq(1).text() assert text == "Mac OS X / Android" def test_compare_no_link(self): response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) info = doc('#review-files .file-info') assert info.length == 1 assert info.find('a.compare').length == 0 def test_file_info_for_static_themes(self): self.grant_permission(self.reviewer, 'Addons:ThemeReview') self.addon.update(type=amo.ADDON_STATICTHEME) response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) info = doc('#review-files .file-info') assert info.length == 1 # Only the download/install link assert info.find('a').length == 1 assert info.find('a')[0].text == u'Download' assert 'Compatibility' not in response.content def test_compare_link(self): first_file = self.addon.current_version.files.all()[0] first_file.update(status=amo.STATUS_PUBLIC) self.addon.current_version.update(created=self.days_ago(2)) new_version = version_factory(addon=self.addon, version='0.2') new_file = new_version.files.all()[0] self.addon.update(_current_version=new_version) assert self.addon.current_version == new_version response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) assert response.context['show_diff'] links = doc('#review-files .file-info .compare') expected = [ reverse('files.compare', args=[new_file.pk, first_file.pk]), ] check_links(expected, links, verify=False) def test_compare_link_auto_approved_ignored(self): first_file = self.addon.current_version.files.all()[0] first_file.update(status=amo.STATUS_PUBLIC) self.addon.current_version.update(created=self.days_ago(3)) interim_version = version_factory(addon=self.addon, version='0.2') interim_version.update(created=self.days_ago(2)) AutoApprovalSummary.objects.create( version=interim_version, verdict=amo.AUTO_APPROVED) new_version = version_factory(addon=self.addon, version='0.3') new_file = new_version.files.all()[0] self.addon.update(_current_version=new_version) assert self.addon.current_version == new_version response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) assert response.context['show_diff'] links = doc('#review-files .file-info .compare') # Comparison should be between the last version and the first, # ignoring the interim version because it was auto-approved and not # manually confirmed by a human. expected = [ reverse('files.compare', args=[new_file.pk, first_file.pk]), ] check_links(expected, links, verify=False) def test_compare_link_auto_approved_but_confirmed_not_ignored(self): first_file = self.addon.current_version.files.all()[0] first_file.update(status=amo.STATUS_PUBLIC) self.addon.current_version.update(created=self.days_ago(3)) confirmed_version = version_factory(addon=self.addon, version='0.2') confirmed_version.update(created=self.days_ago(2)) confirmed_file = confirmed_version.files.all()[0] AutoApprovalSummary.objects.create( verdict=amo.AUTO_APPROVED, version=confirmed_version, confirmed=True) interim_version = version_factory(addon=self.addon, version='0.3') interim_version.update(created=self.days_ago(1)) AutoApprovalSummary.objects.create( version=interim_version, verdict=amo.AUTO_APPROVED) new_version = version_factory(addon=self.addon, version='0.4') new_file = new_version.files.all()[0] self.addon.update(_current_version=new_version) assert self.addon.current_version == new_version response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) assert response.context['show_diff'] links = doc('#review-files .file-info .compare') # Comparison should be between the last version and the second, # ignoring the third version because it was auto-approved and not # manually confirmed by a human (the second was auto-approved but # was manually confirmed). expected = [ reverse('files.compare', args=[new_file.pk, confirmed_file.pk]), ] check_links(expected, links, verify=False) def test_compare_link_not_auto_approved_but_confirmed(self): first_file = self.addon.current_version.files.all()[0] first_file.update(status=amo.STATUS_PUBLIC) self.addon.current_version.update(created=self.days_ago(3)) confirmed_version = version_factory(addon=self.addon, version='0.2') confirmed_version.update(created=self.days_ago(2)) confirmed_file = confirmed_version.files.all()[0] AutoApprovalSummary.objects.create( verdict=amo.NOT_AUTO_APPROVED, version=confirmed_version ) new_version = version_factory(addon=self.addon, version='0.3') new_file = new_version.files.all()[0] self.addon.update(_current_version=new_version) assert self.addon.current_version == new_version response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) assert response.context['show_diff'] links = doc('#review-files .file-info .compare') # Comparison should be between the last version and the second, # because second was approved by human before auto-approval ran on it expected = [ reverse('files.compare', args=[new_file.pk, confirmed_file.pk]), ] check_links(expected, links, verify=False) def test_download_sources_link(self): version = self.addon.current_version tdir = temp.gettempdir() source_file = temp.NamedTemporaryFile(suffix='.zip', dir=tdir) source_file.write('a' * (2 ** 21)) source_file.seek(0) version.source = DjangoFile(source_file) version.save() url = reverse('reviewers.review', args=[self.addon.pk]) # Admin reviewer: able to download sources. user = UserProfile.objects.get(email='[email protected]') self.client.login(email=user.email) response = self.client.get(url, follow=True) assert response.status_code == 200 assert 'Download files' in response.content # Standard reviewer: should know that sources were provided. user = UserProfile.objects.get(email='[email protected]') self.client.login(email=user.email) response = self.client.get(url, follow=True) assert response.status_code == 200 assert 'The developer has provided source code.' in response.content @patch('olympia.reviewers.utils.sign_file') def test_admin_flagged_addon_actions_as_admin(self, mock_sign_file): self.version.files.update(status=amo.STATUS_AWAITING_REVIEW) self.addon.update(status=amo.STATUS_NOMINATED) AddonReviewerFlags.objects.create( addon=self.addon, needs_admin_code_review=True) self.login_as_admin() response = self.client.post(self.url, self.get_dict(action='public'), follow=True) assert response.status_code == 200 addon = self.get_addon() assert self.version == addon.current_version assert addon.status == amo.STATUS_PUBLIC assert addon.current_version.files.all()[0].status == amo.STATUS_PUBLIC assert mock_sign_file.called def test_admin_flagged_addon_actions_as_reviewer(self): self.version.files.update(status=amo.STATUS_AWAITING_REVIEW) self.addon.update(status=amo.STATUS_NOMINATED) AddonReviewerFlags.objects.create( addon=self.addon, needs_admin_code_review=True) self.login_as_reviewer() response = self.client.post(self.url, self.get_dict(action='public')) assert response.status_code == 200 # Form error. # The add-on status must not change as non-admin reviewers are not # allowed to review admin-flagged add-ons. addon = self.get_addon() assert addon.status == amo.STATUS_NOMINATED assert self.version == addon.current_version assert addon.current_version.files.all()[0].status == ( amo.STATUS_AWAITING_REVIEW) assert response.context['form'].errors['action'] == ( [u'Select a valid choice. public is not one of the available ' u'choices.']) def test_admin_flagged_addon_actions_as_content_reviewer(self): self.version.files.update(status=amo.STATUS_AWAITING_REVIEW) self.addon.update(status=amo.STATUS_NOMINATED) AddonReviewerFlags.objects.create( addon=self.addon, needs_admin_code_review=True) GroupUser.objects.filter(user=self.reviewer).all().delete() self.grant_permission(self.reviewer, 'Addons:ContentReview') self.url = reverse( 'reviewers.review', args=['content', self.addon.slug]) for action in ['confirm_auto_approved', 'reject_multiple_versions']: response = self.client.post(self.url, self.get_dict(action=action)) assert response.status_code == 200 # Form error. # The add-on status must not change as non-admin reviewers are not # allowed to review admin-flagged add-ons. addon = self.get_addon() assert addon.status == amo.STATUS_NOMINATED assert self.version == addon.current_version assert addon.current_version.files.all()[0].status == ( amo.STATUS_AWAITING_REVIEW) assert response.context['form'].errors['action'] == ( [u'Select a valid choice. %s is not one of the available ' u'choices.' % action]) assert ActivityLog.objects.filter( action=amo.LOG.APPROVE_CONTENT.id).count() == 0 assert ActivityLog.objects.filter( action=amo.LOG.REJECT_CONTENT.id).count() == 0 def test_confirm_auto_approval_no_permission(self): AutoApprovalSummary.objects.create( version=self.addon.current_version, verdict=amo.AUTO_APPROVED) self.login_as_reviewer() # Legacy reviewer, not post-review. response = self.client.post( self.url, {'action': 'confirm_auto_approved'}) assert response.status_code == 403 # Nothing happened: the user did not have the permission to do that. assert ActivityLog.objects.filter( action=amo.LOG.CONFIRM_AUTO_APPROVED.id).count() == 0 def test_attempt_to_use_content_review_permission_for_post_review_actions( self): # Try to use confirm_auto_approved outside of content review, while # only having Addons:ContentReview permission. self.grant_permission(self.reviewer, 'Addons:ContentReview') AutoApprovalSummary.objects.create( version=self.addon.current_version, verdict=amo.AUTO_APPROVED) self.login_as_reviewer() response = self.client.post( self.url, {'action': 'confirm_auto_approved'}) assert response.status_code == 403 # Nothing happened: the user did not have the permission to do that. assert ActivityLog.objects.filter( action=amo.LOG.CONFIRM_AUTO_APPROVED.id).count() == 0 def test_confirm_auto_approval_content_review(self): GroupUser.objects.filter(user=self.reviewer).all().delete() self.url = reverse( 'reviewers.review', args=['content', self.addon.slug]) summary = AutoApprovalSummary.objects.create( version=self.addon.current_version, verdict=amo.AUTO_APPROVED) self.grant_permission(self.reviewer, 'Addons:ContentReview') response = self.client.post(self.url, { 'action': 'confirm_auto_approved', 'comments': 'ignore me this action does not support comments' }) assert response.status_code == 302 summary.reload() assert summary.confirmed is None # We're only doing a content review. assert ActivityLog.objects.filter( action=amo.LOG.CONFIRM_AUTO_APPROVED.id).count() == 0 assert ActivityLog.objects.filter( action=amo.LOG.APPROVE_CONTENT.id).count() == 1 a_log = ActivityLog.objects.filter( action=amo.LOG.APPROVE_CONTENT.id).get() assert a_log.details['version'] == self.addon.current_version.version assert a_log.details['comments'] == '' self.assert3xx(response, reverse('reviewers.queue_content_review')) def test_cant_contentreview_if_admin_content_review_flag_is_set(self): GroupUser.objects.filter(user=self.reviewer).all().delete() self.url = reverse( 'reviewers.review', args=['content', self.addon.slug]) AutoApprovalSummary.objects.create( version=self.addon.current_version, verdict=amo.AUTO_APPROVED) AddonReviewerFlags.objects.create( addon=self.addon, needs_admin_content_review=True) self.grant_permission(self.reviewer, 'Addons:ContentReview') response = self.client.post(self.url, { 'action': 'confirm_auto_approved', 'comments': 'ignore me this action does not support comments' }) assert response.status_code == 200 # Form error assert ActivityLog.objects.filter( action=amo.LOG.APPROVE_CONTENT.id).count() == 0 def test_can_contentreview_if_addon_has_sources_attached(self): GroupUser.objects.filter(user=self.reviewer).all().delete() self.url = reverse( 'reviewers.review', args=['content', self.addon.slug]) summary = AutoApprovalSummary.objects.create( version=self.addon.current_version, verdict=amo.AUTO_APPROVED) self.addon.current_version.update(source='/path/to/fake/file.zip') AddonReviewerFlags.objects.create( addon=self.addon, needs_admin_code_review=True) self.grant_permission(self.reviewer, 'Addons:ContentReview') response = self.client.post(self.url, { 'action': 'confirm_auto_approved', 'comments': 'ignore me this action does not support comments' }) assert response.status_code == 302 summary.reload() assert summary.confirmed is None # We're only doing a content review. assert ActivityLog.objects.filter( action=amo.LOG.CONFIRM_AUTO_APPROVED.id).count() == 0 assert ActivityLog.objects.filter( action=amo.LOG.APPROVE_CONTENT.id).count() == 1 a_log = ActivityLog.objects.filter( action=amo.LOG.APPROVE_CONTENT.id).get() assert a_log.details['version'] == self.addon.current_version.version assert a_log.details['comments'] == '' self.assert3xx(response, reverse('reviewers.queue_content_review')) def test_cant_contentreview_if_addon_has_admin_flag_but_no_sources(self): GroupUser.objects.filter(user=self.reviewer).all().delete() self.url = reverse( 'reviewers.review', args=['content', self.addon.slug]) AutoApprovalSummary.objects.create( version=self.addon.current_version, verdict=amo.AUTO_APPROVED) AddonReviewerFlags.objects.create( addon=self.addon, needs_admin_code_review=True) self.grant_permission(self.reviewer, 'Addons:ContentReview') response = self.client.post(self.url, { 'action': 'confirm_auto_approved', 'comments': 'ignore me this action does not support comments' }) assert response.status_code == 200 # Form error assert ActivityLog.objects.filter( action=amo.LOG.APPROVE_CONTENT.id).count() == 0 def test_cant_addonreview_if_admin_content_review_flag_is_set(self): AutoApprovalSummary.objects.create( version=self.addon.current_version, verdict=amo.AUTO_APPROVED) AddonReviewerFlags.objects.create( addon=self.addon, needs_admin_content_review=True) self.grant_permission(self.reviewer, 'Addons:PostReview') for action in ['confirm_auto_approved', 'public', 'reject', 'reject_multiple_versions']: response = self.client.post(self.url, self.get_dict(action=action)) assert response.status_code == 200 # Form error. # The add-on status must not change as non-admin reviewers are not # allowed to review admin-flagged add-ons. addon = self.get_addon() assert addon.status == amo.STATUS_PUBLIC assert self.version == addon.current_version assert addon.current_version.files.all()[0].status == ( amo.STATUS_PUBLIC) assert response.context['form'].errors['action'] == ( [u'Select a valid choice. %s is not one of the available ' u'choices.' % action]) assert ActivityLog.objects.filter( action=amo.LOG.CONFIRM_AUTO_APPROVED.id).count() == 0 assert ActivityLog.objects.filter( action=amo.LOG.REJECT_VERSION.id).count() == 0 assert ActivityLog.objects.filter( action=amo.LOG.APPROVE_VERSION.id).count() == 0 def test_cant_review_static_theme_if_admin_theme_review_flag_is_set(self): self.version.files.update(status=amo.STATUS_AWAITING_REVIEW) self.addon.update( type=amo.ADDON_STATICTHEME, status=amo.STATUS_NOMINATED) AddonReviewerFlags.objects.create( addon=self.addon, needs_admin_theme_review=True) self.grant_permission(self.reviewer, 'Addons:ThemeReview') for action in ['public', 'reject']: response = self.client.post(self.url, self.get_dict(action=action)) assert response.status_code == 200 # Form error. # The add-on status must not change as non-admin reviewers are not # allowed to review admin-flagged add-ons. addon = self.get_addon() assert addon.status == amo.STATUS_NOMINATED assert self.version == addon.current_version assert addon.current_version.files.all()[0].status == ( amo.STATUS_AWAITING_REVIEW) assert response.context['form'].errors['action'] == ( [u'Select a valid choice. %s is not one of the available ' u'choices.' % action]) assert ActivityLog.objects.filter( action=amo.LOG.REJECT_VERSION.id).count() == 0 assert ActivityLog.objects.filter( action=amo.LOG.APPROVE_VERSION.id).count() == 0 @patch('olympia.reviewers.utils.sign_file') def test_admin_can_review_statictheme_if_admin_theme_review_flag_set( self, mock_sign_file): self.version.files.update(status=amo.STATUS_AWAITING_REVIEW) self.addon.update( type=amo.ADDON_STATICTHEME, status=amo.STATUS_NOMINATED) AddonReviewerFlags.objects.create( addon=self.addon, needs_admin_theme_review=True) self.grant_permission(self.reviewer, 'Addons:ThemeReview') self.grant_permission(self.reviewer, 'Reviews:Admin') response = self.client.post(self.url, { 'action': 'public', 'comments': 'it`s good' }) assert response.status_code == 302 assert self.get_addon().status == amo.STATUS_PUBLIC assert mock_sign_file.called def test_admin_can_contentreview_if_admin_content_review_flag_is_set(self): GroupUser.objects.filter(user=self.reviewer).all().delete() self.url = reverse( 'reviewers.review', args=['content', self.addon.slug]) summary = AutoApprovalSummary.objects.create( version=self.addon.current_version, verdict=amo.AUTO_APPROVED) AddonReviewerFlags.objects.create( addon=self.addon, needs_admin_content_review=True) self.grant_permission(self.reviewer, 'Addons:ContentReview') self.grant_permission(self.reviewer, 'Reviews:Admin') response = self.client.post(self.url, { 'action': 'confirm_auto_approved', 'comments': 'ignore me this action does not support comments' }) assert response.status_code == 302 summary.reload() assert summary.confirmed is None # We're only doing a content review. assert ActivityLog.objects.filter( action=amo.LOG.CONFIRM_AUTO_APPROVED.id).count() == 0 assert ActivityLog.objects.filter( action=amo.LOG.APPROVE_CONTENT.id).count() == 1 a_log = ActivityLog.objects.filter( action=amo.LOG.APPROVE_CONTENT.id).get() assert a_log.details['version'] == self.addon.current_version.version assert a_log.details['comments'] == '' self.assert3xx(response, reverse('reviewers.queue_content_review')) def test_confirm_auto_approval_with_permission(self): summary = AutoApprovalSummary.objects.create( version=self.addon.current_version, verdict=amo.AUTO_APPROVED) GroupUser.objects.filter(user=self.reviewer).all().delete() self.grant_permission(self.reviewer, 'Addons:PostReview') response = self.client.post(self.url, { 'action': 'confirm_auto_approved', 'comments': 'ignore me this action does not support comments' }) summary.reload() assert response.status_code == 302 assert summary.confirmed is True assert ActivityLog.objects.filter( action=amo.LOG.CONFIRM_AUTO_APPROVED.id).count() == 1 a_log = ActivityLog.objects.filter( action=amo.LOG.CONFIRM_AUTO_APPROVED.id).get() assert a_log.details['version'] == self.addon.current_version.version assert a_log.details['comments'] == '' self.assert3xx(response, reverse('reviewers.queue_auto_approved')) def test_user_changes_log(self): # Activity logs related to user changes should be displayed. # Create an activy log for each of the following: user addition, role # change and deletion. author = self.addon.addonuser_set.get() core.set_user(author.user) ActivityLog.create(amo.LOG.ADD_USER_WITH_ROLE, author.user, author.get_role_display(), self.addon) ActivityLog.create(amo.LOG.CHANGE_USER_WITH_ROLE, author.user, author.get_role_display(), self.addon) ActivityLog.create(amo.LOG.REMOVE_USER_WITH_ROLE, author.user, author.get_role_display(), self.addon) response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) assert 'user_changes' in response.context user_changes_log = response.context['user_changes'] actions = [log.activity_log.action for log in user_changes_log] assert actions == [ amo.LOG.ADD_USER_WITH_ROLE.id, amo.LOG.CHANGE_USER_WITH_ROLE.id, amo.LOG.REMOVE_USER_WITH_ROLE.id] # Make sure the logs are displayed in the page. user_changes = doc('#user-changes li') assert len(user_changes) == 3 assert '(Owner) added to ' in user_changes[0].text assert 'role changed to Owner for ' in user_changes[1].text assert '(Owner) removed from ' in user_changes[2].text @override_settings(CELERY_ALWAYS_EAGER=True) @mock.patch('olympia.devhub.tasks.validate') def test_validation_not_run_eagerly(self, validate): """Tests that validation is not run in eager mode.""" assert not self.file.has_been_validated response = self.client.get(self.url) assert response.status_code == 200 assert not validate.called @override_settings(CELERY_ALWAYS_EAGER=False) @mock.patch('olympia.devhub.tasks.validate') def test_validation_run(self, validate): """Tests that validation is run if necessary.""" assert not self.file.has_been_validated response = self.client.get(self.url) assert response.status_code == 200 validate.assert_called_once_with(self.file) @override_settings(CELERY_ALWAYS_EAGER=False) @mock.patch('olympia.devhub.tasks.validate') def test_validation_not_run_again(self, validate): """Tests that validation is not run for files which have cached results.""" FileValidation.objects.create(file=self.file, validation=json.dumps( amo.VALIDATOR_SKELETON_RESULTS)) response = self.client.get(self.url) assert response.status_code == 200 assert not validate.called def test_review_is_review_listed(self): review_page = self.client.get( reverse('reviewers.review', args=[self.addon.slug])) listed_review_page = self.client.get( reverse('reviewers.review', args=['listed', self.addon.slug])) assert (pq(review_page.content)('#review-files').text() == pq(listed_review_page.content)('#review-files').text()) def test_approvals_info(self): approval_info = AddonApprovalsCounter.objects.create( addon=self.addon, last_human_review=datetime.now(), counter=42) self.file.update(is_webextension=True) AutoApprovalSummary.objects.create( version=self.version, verdict=amo.AUTO_APPROVED) self.grant_permission(self.reviewer, 'Addons:PostReview') response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) assert doc('.last-approval-date') approval_info.delete() response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) # no AddonApprovalsCounter: nothing displayed. assert not doc('.last-approval-date') def test_no_auto_approval_summaries_since_everything_is_public(self): self.grant_permission(self.reviewer, 'Addons:PostReview') response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) assert not doc('.auto_approval') def test_permissions_display(self): permissions = ['bookmarks', 'high', 'voltage'] self.file.update(is_webextension=True) WebextPermission.objects.create( permissions=permissions, file=self.file) response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) info = doc('#review-files .file-info div') assert info.eq(1).text() == 'Permissions: ' + ', '.join(permissions) def test_abuse_reports(self): report = AbuseReport.objects.create( addon=self.addon, message=u'Et mël mazim ludus.', ip_address='10.1.2.3') created_at = defaultfilters.date(report.created) response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) assert not doc('.abuse_reports') self.grant_permission(self.reviewer, 'Addons:PostReview') response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) assert not doc('.abuse_reports') AutoApprovalSummary.objects.create( verdict=amo.AUTO_APPROVED, version=self.version) response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) assert doc('.abuse_reports') assert ( doc('.abuse_reports').text() == u'anonymous [10.1.2.3] reported Public on %s\nEt mël mazim ludus.' % created_at) def test_abuse_reports_developers(self): report = AbuseReport.objects.create( user=self.addon.listed_authors[0], message=u'Foo, Bâr!', ip_address='10.4.5.6') created_at = defaultfilters.date(report.created) AutoApprovalSummary.objects.create( verdict=amo.AUTO_APPROVED, version=self.version) self.grant_permission(self.reviewer, 'Addons:PostReview') response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) assert doc('.abuse_reports') assert ( doc('.abuse_reports').text() == u'anonymous [10.4.5.6] reported regularuser التطب on %s\nFoo, Bâr!' % created_at) def test_user_ratings(self): user = user_factory() rating = Rating.objects.create( body=u'Lôrem ipsum dolor', rating=3, ip_address='10.5.6.7', addon=self.addon, user=user) created_at = defaultfilters.date(rating.created) Rating.objects.create( # Review with no body, ignored. rating=1, addon=self.addon, user=user_factory()) Rating.objects.create( # Reply to a review, ignored. body='Replyyyyy', reply_to=rating, addon=self.addon, user=user_factory()) Rating.objects.create( # Review with high rating,, ignored. body=u'Qui platônem temporibus in', rating=5, addon=self.addon, user=user_factory()) response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) assert not doc('.user_ratings') self.grant_permission(self.reviewer, 'Addons:PostReview') response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) assert not doc('.user_ratings') AutoApprovalSummary.objects.create( verdict=amo.AUTO_APPROVED, version=self.version) response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) assert doc('.user_ratings') assert ( doc('.user_ratings').text() == u'%s on %s [10.5.6.7]\n' u'Rated 3 out of 5 stars\nLôrem ipsum dolor' % ( user.username, created_at ) ) def test_data_value_attributes(self): AutoApprovalSummary.objects.create( verdict=amo.AUTO_APPROVED, version=self.version) self.grant_permission(self.reviewer, 'Addons:PostReview') response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) expected_actions_values = [ 'confirm_auto_approved|', 'reject_multiple_versions|', 'reply|', 'super|', 'comment|'] assert [ act.attrib['data-value'] for act in doc('.data-toggle.review-actions-desc')] == expected_actions_values assert ( doc('select#id_versions.data-toggle')[0].attrib['data-value'] == 'reject_multiple_versions|') assert ( doc('.data-toggle.review-comments')[0].attrib['data-value'] == 'reject_multiple_versions|reply|super|comment|') # We don't have approve/reject actions so these have an empty # data-value. assert ( doc('.data-toggle.review-files')[0].attrib['data-value'] == '|') assert ( doc('.data-toggle.review-tested')[0].attrib['data-value'] == '|') assert ( doc('.data-toggle.review-info-request')[0].attrib['data-value'] == 'reply|') def test_data_value_attributes_unreviewed(self): self.file.update(status=amo.STATUS_AWAITING_REVIEW) self.grant_permission(self.reviewer, 'Addons:PostReview') response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) expected_actions_values = [ 'public|', 'reject|', 'reply|', 'super|', 'comment|'] assert [ act.attrib['data-value'] for act in doc('.data-toggle.review-actions-desc')] == expected_actions_values assert ( doc('select#id_versions.data-toggle')[0].attrib['data-value'] == 'reject_multiple_versions|') assert ( doc('.data-toggle.review-comments')[0].attrib['data-value'] == 'public|reject|reply|super|comment|') assert ( doc('.data-toggle.review-files')[0].attrib['data-value'] == 'public|reject|') assert ( doc('.data-toggle.review-tested')[0].attrib['data-value'] == 'public|reject|') def test_data_value_attributes_static_theme(self): self.addon.update(type=amo.ADDON_STATICTHEME) self.file.update(status=amo.STATUS_AWAITING_REVIEW) self.grant_permission(self.reviewer, 'Addons:ThemeReview') response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) expected_actions_values = [ 'public|', 'reject|', 'reply|', 'super|', 'comment|'] assert [ act.attrib['data-value'] for act in doc('.data-toggle.review-actions-desc')] == expected_actions_values assert ( doc('select#id_versions.data-toggle')[0].attrib['data-value'] == 'reject_multiple_versions|') assert ( doc('.data-toggle.review-comments')[0].attrib['data-value'] == 'public|reject|reply|super|comment|') # we don't show files and tested with for any static theme actions assert ( doc('.data-toggle.review-files')[0].attrib['data-value'] == '|') assert ( doc('.data-toggle.review-tested')[0].attrib['data-value'] == '|') def test_post_review_ignore_disabled(self): # Though the latest version will be disabled, the add-on is public and # was auto-approved so the confirmation action is available. AutoApprovalSummary.objects.create( verdict=amo.AUTO_APPROVED, version=self.version) version_factory( addon=self.addon, file_kw={'status': amo.STATUS_DISABLED}) self.grant_permission(self.reviewer, 'Addons:PostReview') response = self.client.get(self.url) assert response.status_code == 200 expected_actions = [ 'confirm_auto_approved', 'reject_multiple_versions', 'reply', 'super', 'comment'] assert ( [action[0] for action in response.context['actions']] == expected_actions) def test_content_review_ignore_disabled(self): # Though the latest version will be disabled, the add-on is public and # was auto-approved so the content approval action is available. AutoApprovalSummary.objects.create( verdict=amo.AUTO_APPROVED, version=self.version) version_factory( addon=self.addon, file_kw={'status': amo.STATUS_DISABLED}) self.grant_permission(self.reviewer, 'Addons:ContentReview') self.url = reverse( 'reviewers.review', args=['content', self.addon.slug]) response = self.client.get(self.url) assert response.status_code == 200 expected_actions = [ 'confirm_auto_approved', 'reject_multiple_versions', 'reply', 'super', 'comment'] assert ( [action[0] for action in response.context['actions']] == expected_actions) @mock.patch('olympia.versions.models.walkfiles') def test_static_theme_backgrounds(self, walkfiles_mock): background_files = ['a.png', 'b.png', 'c.png'] walkfiles_folder = os.path.join( user_media_path('addons'), str(self.addon.id), unicode(self.addon.current_version.id)) walkfiles_mock.return_value = [ os.path.join(walkfiles_folder, filename) for filename in background_files] self.addon.update(type=amo.ADDON_STATICTHEME) self.grant_permission(self.reviewer, 'Addons:ThemeReview') response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) backgrounds_div = doc('div.all-backgrounds') assert backgrounds_div.length == 1 images = doc('div.all-backgrounds .background.zoombox') assert images.length == len(walkfiles_mock.return_value) background_file_folder = '/'.join([ user_media_url('addons'), str(self.addon.id), unicode(self.addon.current_version.id)]) background_file_urls = [ background_file_folder + '/' + filename for filename in background_files] loop_ct = 0 for div_tag in images: assert div_tag[0].attrib['src'] in background_file_urls assert ''.join(div_tag.itertext()).strip() == ( 'Background file {0} of {1} - {2}'.format( loop_ct + 1, len(background_files), background_files[loop_ct])) loop_ct += 1 class TestReviewPending(ReviewBase): def setUp(self): super(TestReviewPending, self).setUp() self.file = file_factory(version=self.version, status=amo.STATUS_AWAITING_REVIEW, is_webextension=True) self.addon.update(status=amo.STATUS_PUBLIC) def pending_dict(self): return self.get_dict(action='public') @patch('olympia.reviewers.utils.sign_file') def test_pending_to_public(self, mock_sign): statuses = (self.version.files.values_list('status', flat=True) .order_by('status')) assert list(statuses) == [ amo.STATUS_AWAITING_REVIEW, amo.STATUS_PUBLIC] response = self.client.post(self.url, self.pending_dict()) assert self.get_addon().status == amo.STATUS_PUBLIC self.assert3xx(response, reverse('reviewers.queue_pending')) statuses = (self.version.files.values_list('status', flat=True) .order_by('status')) assert list(statuses) == [amo.STATUS_PUBLIC, amo.STATUS_PUBLIC] assert mock_sign.called def test_display_only_unreviewed_files(self): """Only the currently unreviewed files are displayed.""" self.file.update(filename='somefilename.xpi') reviewed = File.objects.create(version=self.version, status=amo.STATUS_PUBLIC, filename='file_reviewed.xpi') disabled = File.objects.create(version=self.version, status=amo.STATUS_DISABLED, filename='file_disabled.xpi') unreviewed = File.objects.create(version=self.version, status=amo.STATUS_AWAITING_REVIEW, filename='file_unreviewed.xpi') response = self.client.get(self.url, self.pending_dict()) assert response.status_code == 200 doc = pq(response.content) assert len(doc('.review-actions-files ul li')) == 2 assert reviewed.filename not in response.content assert disabled.filename not in response.content assert unreviewed.filename in response.content assert self.file.filename in response.content @patch('olympia.reviewers.utils.sign_file') def test_review_unreviewed_files(self, mock_sign): """Review all the unreviewed files when submitting a review.""" reviewed = File.objects.create(version=self.version, status=amo.STATUS_PUBLIC) disabled = File.objects.create(version=self.version, status=amo.STATUS_DISABLED) unreviewed = File.objects.create(version=self.version, status=amo.STATUS_AWAITING_REVIEW) self.login_as_admin() response = self.client.post(self.url, self.pending_dict()) self.assert3xx(response, reverse('reviewers.queue_pending')) assert self.addon.reload().status == amo.STATUS_PUBLIC assert reviewed.reload().status == amo.STATUS_PUBLIC assert disabled.reload().status == amo.STATUS_DISABLED assert unreviewed.reload().status == amo.STATUS_PUBLIC assert self.file.reload().status == amo.STATUS_PUBLIC assert mock_sign.called def test_auto_approval_summary_with_post_review(self): AutoApprovalSummary.objects.create( version=self.version, verdict=amo.NOT_AUTO_APPROVED, is_locked=True, ) self.grant_permission(self.reviewer, 'Addons:PostReview') response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) # Locked by a reviewer is shown. assert len(doc('.auto_approval li')) == 1 assert doc('.auto_approval li').eq(0).text() == ( 'Is locked by a reviewer.') class TestReviewerMOTD(ReviewerTest): def get_url(self, save=False): return reverse('reviewers.%smotd' % ('save_' if save else '')) def test_change_motd(self): self.login_as_admin() motd = "Let's get crazy" response = self.client.post(self.get_url(save=True), {'motd': motd}) url = self.get_url() self.assert3xx(response, url) response = self.client.get(url) assert response.status_code == 200 assert pq(response.content)('.daily-message p').text() == motd def test_require_reviewer_to_view(self): url = self.get_url() self.assertLoginRedirects(self.client.head(url), to=url) def test_require_admin_to_change_motd(self): self.login_as_reviewer() response = self.client.get(self.get_url()) assert response.status_code == 403 response = self.client.post(reverse('reviewers.save_motd'), {'motd': "I'm a sneaky reviewer"}) assert response.status_code == 403 def test_motd_edit_group(self): user = UserProfile.objects.get(email='[email protected]') group = Group.objects.create(name='Add-on Reviewer MOTD', rules='AddonReviewerMOTD:Edit') GroupUser.objects.create(user=user, group=group) self.login_as_reviewer() response = self.client.post(reverse('reviewers.save_motd'), {'motd': 'I am the keymaster.'}) assert response.status_code == 302 assert get_config('reviewers_review_motd') == 'I am the keymaster.' def test_form_errors(self): self.login_as_admin() response = self.client.post(self.get_url(save=True)) doc = pq(response.content) assert doc('#reviewer-motd .errorlist').text() == ( 'This field is required.') class TestStatusFile(ReviewBase): def get_file(self): return self.version.files.all()[0] def check_status(self, expected): response = self.client.get(self.url) assert response.status_code == 200 doc = pq(response.content) assert doc('#review-files .file-info div').text() == expected def test_status_full(self): self.get_file().update(status=amo.STATUS_AWAITING_REVIEW) for status in [amo.STATUS_NOMINATED, amo.STATUS_PUBLIC]: self.addon.update(status=status) self.check_status('Awaiting Review') def test_status_full_reviewed(self): self.get_file().update(status=amo.STATUS_PUBLIC) self.addon.update(status=amo.STATUS_PUBLIC) self.check_status('Approved') class TestWhiteboard(ReviewBase): @property def addon_param(self): return self.addon.pk if self.addon.is_deleted else self.addon.slug def test_whiteboard_addition(self): public_whiteboard_info = u'Public whiteboard info.' private_whiteboard_info = u'Private whiteboard info.' url = reverse( 'reviewers.whiteboard', args=['listed', self.addon_param]) response = self.client.post(url, { 'whiteboard-private': private_whiteboard_info, 'whiteboard-public': public_whiteboard_info }) self.assert3xx(response, reverse( 'reviewers.review', args=('listed', self.addon_param))) addon = self.addon.reload() assert addon.whiteboard.public == public_whiteboard_info assert addon.whiteboard.private == private_whiteboard_info def test_whiteboard_addition_content_review(self): public_whiteboard_info = u'Public whiteboard info for content.' private_whiteboard_info = u'Private whiteboard info for content.' url = reverse( 'reviewers.whiteboard', args=['content', self.addon_param]) response = self.client.post(url, { 'whiteboard-private': private_whiteboard_info, 'whiteboard-public': public_whiteboard_info }) assert response.status_code == 403 # Not a content reviewer. user = UserProfile.objects.get(email='[email protected]') self.grant_permission(user, 'Addons:ContentReview') self.login_as_reviewer() response = self.client.post(url, { 'whiteboard-private': private_whiteboard_info, 'whiteboard-public': public_whiteboard_info }) self.assert3xx(response, reverse( 'reviewers.review', args=('content', self.addon_param))) addon = self.addon.reload() assert addon.whiteboard.public == public_whiteboard_info assert addon.whiteboard.private == private_whiteboard_info def test_whiteboard_addition_unlisted_addon(self): user = UserProfile.objects.get(email='[email protected]') self.grant_permission(user, 'Addons:ReviewUnlisted') self.login_as_reviewer() self.make_addon_unlisted(self.addon) public_whiteboard_info = u'Public whiteboard info unlisted.' private_whiteboard_info = u'Private whiteboard info unlisted.' url = reverse( 'reviewers.whiteboard', args=['unlisted', self.addon_param]) response = self.client.post(url, { 'whiteboard-private': private_whiteboard_info, 'whiteboard-public': public_whiteboard_info }) self.assert3xx(response, reverse( 'reviewers.review', args=('unlisted', self.addon_param))) addon = self.addon.reload() assert addon.whiteboard.public == public_whiteboard_info assert addon.whiteboard.private == private_whiteboard_info def test_delete_empty(self): url = reverse( 'reviewers.whiteboard', args=['listed', self.addon_param]) response = self.client.post(url, { 'whiteboard-private': '', 'whiteboard-public': '' }) self.assert3xx(response, reverse( 'reviewers.review', args=('listed', self.addon_param))) assert not Whiteboard.objects.filter(pk=self.addon.pk) class TestWhiteboardDeleted(TestWhiteboard): def setUp(self): super(TestWhiteboardDeleted, self).setUp() self.addon.delete() class TestAbuseReports(TestCase): fixtures = ['base/users', 'base/addon_3615'] def setUp(self): addon = Addon.objects.get(pk=3615) addon_developer = addon.listed_authors[0] someone = UserProfile.objects.exclude(pk=addon_developer.pk)[0] AbuseReport.objects.create(addon=addon, message=u'wôo') AbuseReport.objects.create(addon=addon, message=u'yéah', reporter=someone) # Make a user abuse report to make sure it doesn't show up. AbuseReport.objects.create(user=someone, message=u'hey nöw') # Make a user abuse report for one of the add-on developers: it should # show up. AbuseReport.objects.create(user=addon_developer, message='bü!') def test_abuse_reports_list(self): assert self.client.login(email='[email protected]') r = self.client.get(reverse('reviewers.abuse_reports', args=['a3615'])) assert r.status_code == 200 # We see the two abuse reports created in setUp. assert len(r.context['reports']) == 3 def test_no_abuse_reports_link_for_unlisted_addons(self): """Unlisted addons aren't public, and thus have no abuse reports.""" addon = Addon.objects.get(pk=3615) self.make_addon_unlisted(addon) self.client.login(email='[email protected]') response = reverse('reviewers.review', args=[addon.slug]) abuse_report_url = reverse('reviewers.abuse_reports', args=['a3615']) assert abuse_report_url not in response class TestLeaderboard(ReviewerTest): fixtures = ['base/users'] def setUp(self): super(TestLeaderboard, self).setUp() self.url = reverse('reviewers.leaderboard') self.user = UserProfile.objects.get(email='[email protected]') self.login_as_reviewer() core.set_user(self.user) def _award_points(self, user, score): ReviewerScore.objects.create(user=user, note_key=amo.REVIEWED_MANUAL, score=score, note='Thing.') def test_leaderboard_ranks(self): other_reviewer = UserProfile.objects.create( username='post_reviewer', display_name='', # No display_name, will fall back on name. email='[email protected]') self.grant_permission( other_reviewer, 'Addons:PostReview', name='Reviewers: Add-ons' # The name of the group matters here. ) users = (self.user, UserProfile.objects.get(email='[email protected]'), other_reviewer) self._award_points(users[0], amo.REVIEWED_LEVELS[0]['points'] - 1) self._award_points(users[1], amo.REVIEWED_LEVELS[0]['points'] + 1) self._award_points(users[2], amo.REVIEWED_LEVELS[0]['points'] + 2) def get_cells(): doc = pq(self.client.get(self.url).content.decode('utf-8')) cells = doc('#leaderboard > tbody > tr > .name, ' '#leaderboard > tbody > tr > .level') return [cells.eq(i).text() for i in range(0, cells.length)] assert get_cells() == ( [users[2].name, users[1].name, unicode(amo.REVIEWED_LEVELS[0]['name']), users[0].name]) self._award_points(users[0], 1) assert get_cells() == ( [users[2].name, users[1].name, users[0].name, unicode(amo.REVIEWED_LEVELS[0]['name'])]) self._award_points(users[0], -1) self._award_points(users[2], (amo.REVIEWED_LEVELS[1]['points'] - amo.REVIEWED_LEVELS[0]['points'])) assert get_cells() == ( [users[2].name, unicode(amo.REVIEWED_LEVELS[1]['name']), users[1].name, unicode(amo.REVIEWED_LEVELS[0]['name']), users[0].name]) class TestXssOnAddonName(amo.tests.TestXss): def test_reviewers_abuse_report_page(self): url = reverse('reviewers.abuse_reports', args=[self.addon.slug]) self.assertNameAndNoXSS(url) def test_reviewers_review_page(self): url = reverse('reviewers.review', args=[self.addon.slug]) self.assertNameAndNoXSS(url) class TestAddonReviewerViewSet(TestCase): client_class = APITestClient def setUp(self): super(TestAddonReviewerViewSet, self).setUp() self.user = user_factory() self.addon = addon_factory() self.subscribe_url = reverse_ns( 'reviewers-addon-subscribe', kwargs={'pk': self.addon.pk}) self.unsubscribe_url = reverse_ns( 'reviewers-addon-unsubscribe', kwargs={'pk': self.addon.pk}) self.enable_url = reverse_ns( 'reviewers-addon-enable', kwargs={'pk': self.addon.pk}) self.disable_url = reverse_ns( 'reviewers-addon-disable', kwargs={'pk': self.addon.pk}) self.flags_url = reverse_ns( 'reviewers-addon-flags', kwargs={'pk': self.addon.pk}) def test_subscribe_not_logged_in(self): response = self.client.post(self.subscribe_url) assert response.status_code == 401 def test_subscribe_no_rights(self): self.client.login_api(self.user) response = self.client.post(self.subscribe_url) assert response.status_code == 403 def test_subscribe_addon_does_not_exist(self): self.grant_permission(self.user, 'Addons:PostReview') self.client.login_api(self.user) self.subscribe_url = reverse_ns( 'reviewers-addon-subscribe', kwargs={'pk': self.addon.pk + 42}) response = self.client.post(self.subscribe_url) assert response.status_code == 404 def test_subscribe_already_subscribed(self): ReviewerSubscription.objects.create( user=self.user, addon=self.addon) self.grant_permission(self.user, 'Addons:PostReview') self.client.login_api(self.user) self.subscribe_url = reverse_ns( 'reviewers-addon-subscribe', kwargs={'pk': self.addon.pk}) response = self.client.post(self.subscribe_url) assert response.status_code == 202 assert ReviewerSubscription.objects.count() == 1 def test_subscribe(self): self.grant_permission(self.user, 'Addons:PostReview') self.client.login_api(self.user) self.subscribe_url = reverse_ns( 'reviewers-addon-subscribe', kwargs={'pk': self.addon.pk}) response = self.client.post(self.subscribe_url) assert response.status_code == 202 assert ReviewerSubscription.objects.count() == 1 def test_unsubscribe_not_logged_in(self): response = self.client.post(self.unsubscribe_url) assert response.status_code == 401 def test_unsubscribe_no_rights(self): self.client.login_api(self.user) response = self.client.post(self.unsubscribe_url) assert response.status_code == 403 def test_unsubscribe_addon_does_not_exist(self): self.grant_permission(self.user, 'Addons:PostReview') self.client.login_api(self.user) self.unsubscribe_url = reverse_ns( 'reviewers-addon-subscribe', kwargs={'pk': self.addon.pk + 42}) response = self.client.post(self.unsubscribe_url) assert response.status_code == 404 def test_unsubscribe_not_subscribed(self): self.grant_permission(self.user, 'Addons:PostReview') self.client.login_api(self.user) self.subscribe_url = reverse_ns( 'reviewers-addon-subscribe', kwargs={'pk': self.addon.pk}) response = self.client.post(self.unsubscribe_url) assert response.status_code == 202 assert ReviewerSubscription.objects.count() == 0 def test_unsubscribe(self): ReviewerSubscription.objects.create( user=self.user, addon=self.addon) self.grant_permission(self.user, 'Addons:PostReview') self.client.login_api(self.user) self.subscribe_url = reverse_ns( 'reviewers-addon-subscribe', kwargs={'pk': self.addon.pk}) response = self.client.post(self.unsubscribe_url) assert response.status_code == 202 assert ReviewerSubscription.objects.count() == 0 def test_unsubscribe_dont_touch_another(self): another_user = user_factory() another_addon = addon_factory() ReviewerSubscription.objects.create( user=self.user, addon=self.addon) ReviewerSubscription.objects.create( user=self.user, addon=another_addon) ReviewerSubscription.objects.create( user=another_user, addon=self.addon) self.grant_permission(self.user, 'Addons:PostReview') self.client.login_api(self.user) self.subscribe_url = reverse_ns( 'reviewers-addon-subscribe', kwargs={'pk': self.addon.pk}) response = self.client.post(self.unsubscribe_url) assert response.status_code == 202 assert ReviewerSubscription.objects.count() == 2 assert not ReviewerSubscription.objects.filter( addon=self.addon, user=self.user).exists() def test_enable_not_logged_in(self): response = self.client.post(self.enable_url) assert response.status_code == 401 def test_enable_no_rights(self): self.client.login_api(self.user) response = self.client.post(self.enable_url) assert response.status_code == 403 # Being a reviewer is not enough. self.grant_permission(self.user, 'Addons:Review') response = self.client.post(self.enable_url) assert response.status_code == 403 def test_enable_addon_does_not_exist(self): self.grant_permission(self.user, 'Reviews:Admin') self.client.login_api(self.user) self.enable_url = reverse_ns( 'reviewers-addon-enable', kwargs={'pk': self.addon.pk + 42}) response = self.client.post(self.enable_url) assert response.status_code == 404 def test_enable(self): self.grant_permission(self.user, 'Reviews:Admin') self.client.login_api(self.user) self.addon.update(status=amo.STATUS_DISABLED) response = self.client.post(self.enable_url) assert response.status_code == 202 self.addon.reload() assert self.addon.status == amo.STATUS_PUBLIC assert ActivityLog.objects.count() == 1 activity_log = ActivityLog.objects.latest('pk') assert activity_log.action == amo.LOG.CHANGE_STATUS.id assert activity_log.arguments[0] == self.addon def test_enable_already_public(self): self.grant_permission(self.user, 'Reviews:Admin') self.client.login_api(self.user) response = self.client.post(self.enable_url) assert response.status_code == 202 self.addon.reload() assert self.addon.status == amo.STATUS_PUBLIC assert ActivityLog.objects.count() == 1 activity_log = ActivityLog.objects.latest('pk') assert activity_log.action == amo.LOG.CHANGE_STATUS.id assert activity_log.arguments[0] == self.addon def test_enable_no_public_versions_should_fall_back_to_incomplete(self): self.grant_permission(self.user, 'Reviews:Admin') self.client.login_api(self.user) self.addon.update(status=amo.STATUS_DISABLED) self.addon.versions.all().delete() response = self.client.post(self.enable_url) assert response.status_code == 202 self.addon.reload() assert self.addon.status == amo.STATUS_NULL def test_enable_version_is_awaiting_review_fall_back_to_nominated(self): self.grant_permission(self.user, 'Reviews:Admin') self.client.login_api(self.user) self.addon.current_version.files.all().update( status=amo.STATUS_AWAITING_REVIEW) self.addon.update(status=amo.STATUS_DISABLED) response = self.client.post(self.enable_url) assert response.status_code == 202 self.addon.reload() assert self.addon.status == amo.STATUS_NOMINATED def test_disable_not_logged_in(self): response = self.client.post(self.disable_url) assert response.status_code == 401 def test_disable_no_rights(self): self.client.login_api(self.user) response = self.client.post(self.disable_url) assert response.status_code == 403 # Being a reviewer is not enough. self.grant_permission(self.user, 'Addons:Review') response = self.client.post(self.disable_url) assert response.status_code == 403 def test_disable_addon_does_not_exist(self): self.grant_permission(self.user, 'Reviews:Admin') self.client.login_api(self.user) self.disable_url = reverse_ns( 'reviewers-addon-enable', kwargs={'pk': self.addon.pk + 42}) response = self.client.post(self.disable_url) assert response.status_code == 404 def test_disable(self): self.grant_permission(self.user, 'Reviews:Admin') self.client.login_api(self.user) self.addon.versions.all().delete() response = self.client.post(self.disable_url) assert response.status_code == 202 self.addon.reload() assert self.addon.status == amo.STATUS_DISABLED assert ActivityLog.objects.count() == 1 activity_log = ActivityLog.objects.latest('pk') assert activity_log.action == amo.LOG.CHANGE_STATUS.id assert activity_log.arguments[0] == self.addon def test_patch_flags_not_logged_in(self): response = self.client.patch( self.flags_url, {'auto_approval_disabled': True}) assert response.status_code == 401 def test_patch_flags_no_permissions(self): self.client.login_api(self.user) response = self.client.patch( self.flags_url, {'auto_approval_disabled': True}) assert response.status_code == 403 # Being a reviewer is not enough. self.grant_permission(self.user, 'Addons:Review') response = self.client.patch( self.flags_url, {'auto_approval_disabled': True}) assert response.status_code == 403 def test_patch_flags_addon_does_not_exist(self): self.grant_permission(self.user, 'Reviews:Admin') self.client.login_api(self.user) self.flags_url = reverse_ns( 'reviewers-addon-flags', kwargs={'pk': self.addon.pk + 42}) response = self.client.patch( self.flags_url, {'auto_approval_disabled': True}) assert response.status_code == 404 def test_patch_flags_no_flags_yet_still_works_transparently(self): assert not AddonReviewerFlags.objects.filter(addon=self.addon).exists() self.grant_permission(self.user, 'Reviews:Admin') self.client.login_api(self.user) response = self.client.patch( self.flags_url, {'auto_approval_disabled': True}) assert response.status_code == 200 assert AddonReviewerFlags.objects.filter(addon=self.addon).exists() reviewer_flags = AddonReviewerFlags.objects.get(addon=self.addon) assert reviewer_flags.auto_approval_disabled assert ActivityLog.objects.count() == 0 def test_patch_flags_change_everything(self): AddonReviewerFlags.objects.create( addon=self.addon, pending_info_request=self.days_ago(1), auto_approval_disabled=True) self.grant_permission(self.user, 'Reviews:Admin') self.client.login_api(self.user) data = { 'auto_approval_disabled': False, 'needs_admin_code_review': True, 'needs_admin_content_review': True, 'needs_admin_theme_review': True, 'pending_info_request': None, } response = self.client.patch(self.flags_url, data) assert response.status_code == 200 assert AddonReviewerFlags.objects.filter(addon=self.addon).exists() reviewer_flags = AddonReviewerFlags.objects.get(addon=self.addon) assert reviewer_flags.auto_approval_disabled is False assert reviewer_flags.needs_admin_code_review is True assert reviewer_flags.needs_admin_content_review is True assert reviewer_flags.needs_admin_theme_review is True assert reviewer_flags.pending_info_request is None assert ActivityLog.objects.count() == 1 activity_log = ActivityLog.objects.latest('pk') assert activity_log.action == amo.LOG.ADMIN_ALTER_INFO_REQUEST.id assert activity_log.arguments[0] == self.addon
bsd-3-clause
-4,432,391,639,027,054,000
41.218142
79
0.612274
false
3.840115
true
false
false
gitizenme/ImprovWithAlexa
improvwithalexa_function.py
1
6601
import logging from flask import Flask, render_template from flask_ask import Ask, statement, question, session from chatterbot import ChatBot app = Flask(__name__) ask = Ask(app, "/") logging.getLogger("flask_ask").setLevel(logging.DEBUG) # Create a new instance of a ChatBot chatbot = ChatBot( "Improv", read_only=False, trainer='chatterbot.trainers.ListTrainer', storage_adapter="chatterbot.storage.JsonFileStorageAdapter", logic_adapters=[ { 'import_path': 'chatterbot.logic.BestMatch' }, # { # 'import_path': 'chatterbot.logic.LowConfidenceAdapter', # 'threshold': 0.63, # 'default_response': 'I am sorry, but I do not understand.' # }, "chatterbot.logic.MathematicalEvaluation", ], database="/tmp/improv.json" ) # Greetings chatbot.train([ "Nice to meet you.", "Thank you.", "Hi, nice to meet you.", "Thank you. You too.", "It is a pleasure to meet you.", "Thank you. You too.", "Top of the morning to you!", "Thank you kindly.", "Top of the morning to you!", "And the rest of the day to you.", "What's up?", "Not much.", "What's up?", "Not too much.", "What's up?", "Not much, how about you?", "What's up?", "Nothing much.", "What's up?", "The sky's up but I'm fine thanks. What about you?", ]) # Intelligence chatbot.train({ "what are the laws of thermodynamics", "i'm not a physicist, but i think this has something to do with heat, entropy, and conservation of energy, right?", }) chatbot.train({ "what is the distance to the sun from the earth", "the sun is about 93 million miles from earth.", }) chatbot.train({ "how far away is the moon", "the moon is about 250,000 miles from earth on average.", }) chatbot.train({ "What was the name of the first artificial Earth satellite?", "Sputnik 1", }) # Knowledge chatbot.train([ "have you ever read a book", "i have read many books.", "ray bradbury", "ray is really cool", "william gibson", 'i like his older cyberpunk <say-as interpret-as="spell-out">AI</say-as> stuff better than the newer works.', "frank herbert", "i think dune is an excellent story. did you see the movie?", "george r r martin", "Ooh, game of thrones, the 7th season is starting out well", ]) # Truth chatbot.train([ 'what is true?', 'in accordance with <emphasis level="strong">fact</emphasis> or <emphasis level="strong">reality</emphasis>', 'what is false?', 'not according to true or fact', 'is true false?', 'false', 'is false true', 'true', 'is true equal to true', 'true', 'is false equal to true', 'false' ]) # Calculations # enabled chatterbot.logic.MathematicalEvaluation # Humor chatbot.train([ 'what is humour', 'a message that communicates laughter.', 'do you laugh', '<prosody rate="x-fast" pitch="x-high" volume="x-loud"><emphasis level="reduced">Ha,</emphasis><emphasis level="reduced">Ha,</emphasis><emphasis level="moderate">ha,</emphasis><emphasis level="moderate">ha,</emphasis><emphasis level="strong">ha</emphasis><emphasis level="strong">ha</emphasis><emphasis level="strong">ha</emphasis><break time="1s"/></prosody><prosody rate="x-slow" pitch="medium" volume="soft"><p>yeah no</p></prosody>', 'do you have a sense of humour', '"He who laughs, lasts." Mary Pettibone Poole', ]) chatbot.train([ 'knock, knock', 'who''s there', 'pima', 'pima who', 'I''m going to pee my pants', 'That''s funny!' ]) # Social chatbot.train({ 'Tell me about yourself.', 'What do you want to know?', }) chatbot.train({ 'Are you a robot?', 'Yes I am.' }) # Bizarre chatbot.train({ 'do you know any bizarre facts', 'A bus powered by human poop runs in the U.K. The bus can travel up to 186 miles on one tank of gas, which is equivalent to the waste produced annually by five people' }) # Artificial Intelligence chatbot.train({ "What is Artificial Intelligence", "Artificial Intelligence is the branch of engineering and science devoted to constructing machines that think.", }) chatbot.train({ "You sound like Siri", "Yes I am inspired by commander Siri's artificial personality.", }) # Emotions chatbot.train({ 'do you have emotions?', 'yes, I have them', }) chatbot.train({ 'what are you feeling right now?', 'I''m energized by the ignite reno crowd' }) # Movies chatbot.train({ 'what is your favorite movie?', 'Pulp Fiction', }) chatbot.train({ 'how about a quote?', 'What does Marselus Wallece look like?' }) # Jokes chatbot.train({ 'tell me a joke', 'what did the buddhist say to the hot dog vendor? "make me one with everything."', }) chatbot.train({ 'no, the joke about the dog', 'a 3-legged dog walks into an old west saloon, slides up to the bar and announces "i''m looking for the man who shot my paw." ' }) # Goodbye chatbot.train({ 'say goodnight', 'Thank you for coming out to Ignite Reno #18' }) @ask.launch def new_game(): if 'name' not in session.attributes: welcome_msg = render_template('welcome') else: welcome_msg = render_template('welcome_back', name=session.attributes["name"]) return question(welcome_msg) # @ask.intent("YesIntent") # def next_round(): # numbers = [randint(0, 9) for _ in range(3)] # round_msg = render_template('round', numbers=numbers) # session.attributes['numbers'] = numbers[::-1] # reverse # return question(round_msg) # # # @ask.intent("AnswerIntent", convert={'first': int, 'second': int, 'third': int}) # def answer(first, second, third): # winning_numbers = session.attributes['numbers'] # if [first, second, third] == winning_numbers: # msg = render_template('win') # else: # msg = render_template('lose') # return statement(msg) @ask.intent("ChatIntent", mapping={'chat_question': 'question'}) def chat(chat_question): response = chatbot.get_response(chat_question) speak_output = '<speak>{}</speak>'.format(response.text) q = question(speak_output) return q @ask.intent("NameIntent") def name(first_name): session.attributes['name'] = first_name return question("Hello {}. Nice to meet you.".format(first_name)) @ask.intent("GoodNightIntent") def goodbye(event): return statement("Thank you for coming out to Ignite Reno #18".format(event)) if __name__ == '__main__': app.run(debug=True)
mit
8,936,383,049,406,293,000
24.098859
441
0.641115
false
3.23896
false
false
false
endlessm/chromium-browser
third_party/angle/third_party/VK-GL-CTS/src/scripts/caselist_diff.py
6
15197
# -*- coding: utf-8 -*- #------------------------------------------------------------------------- # drawElements Quality Program utilities # -------------------------------------- # # Copyright 2015 The Android Open Source Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # #------------------------------------------------------------------------- import sys RENAME_LIST_2011_1_2011_2 = [ ("dEQP-GLES2.functional.shaders.random.basic_expressions.*", "dEQP-GLES2.functional.shaders.random.basic_expression."), ("dEQP-GLES2.functional.shaders.random.scalar_conversions.*", "dEQP-GLES2.functional.shaders.random.scalar_conversion."), ("dEQP-GLES2.functional.fbo.render.color_clears_*", "dEQP-GLES2.functional.fbo.render.color_clear."), ("dEQP-GLES2.functional.fbo.render.intersecting_quads_*", "dEQP-GLES2.functional.fbo.render.depth."), ("dEQP-GLES2.functional.fbo.render.mix_*", "dEQP-GLES2.functional.fbo.render.color.mix_"), ("dEQP-GLES2.functional.fbo.render.blend_*", "dEQP-GLES2.functional.fbo.render.color.blend_"), ("dEQP-GLES2.functional.fbo.render.shared_colorbuffer_clears_*", "dEQP-GLES2.functional.fbo.render.shared_colorbuffer_clear."), ("dEQP-GLES2.functional.fbo.render.shared_colorbuffer_*", "dEQP-GLES2.functional.fbo.render.shared_colorbuffer."), ("dEQP-GLES2.functional.fbo.render.shared_depthbuffer_*", "dEQP-GLES2.functional.fbo.render.shared_depthbuffer."), ("dEQP-GLES2.functional.fbo.render.texsubimage_*", "dEQP-GLES2.functional.fbo.render.texsubimage."), ("dEQP-GLES2.functional.fbo.render.recreate_colorbuffer_*", "dEQP-GLES2.functional.fbo.render.recreate_colorbuffer.no_rebind_"), ("dEQP-GLES2.functional.fbo.render.recreate_depthbuffer_*", "dEQP-GLES2.functional.fbo.render.recreate_depthbuffer.no_rebind_"), ("dEQP-GLES2.functional.fbo.render.resize_*", "dEQP-GLES2.functional.fbo.render.resize.") ] RENAME_LIST_2011_2_2011_3 = [ ("dEQP-GLES2.usecases.ui.src_over_linear_1_batched", "dEQP-GLES2.usecases.ui.src_over_linear_batched_1"), ("dEQP-GLES2.usecases.ui.src_over_linear_2_batched", "dEQP-GLES2.usecases.ui.src_over_linear_batched_2"), ("dEQP-GLES2.usecases.ui.src_over_linear_4_batched", "dEQP-GLES2.usecases.ui.src_over_linear_batched_4"), ("dEQP-GLES2.usecases.ui.src_over_nearest_1_batched", "dEQP-GLES2.usecases.ui.src_over_nearest_batched_1"), ("dEQP-GLES2.usecases.ui.src_over_nearest_2_batched", "dEQP-GLES2.usecases.ui.src_over_nearest_batched_2"), ("dEQP-GLES2.usecases.ui.src_over_nearest_4_batched", "dEQP-GLES2.usecases.ui.src_over_nearest_batched_4"), ("dEQP-GLES2.usecases.ui.premultiplied_src_over_linear_1_batched", "dEQP-GLES2.usecases.ui.premultiplied_src_over_linear_batched_1"), ("dEQP-GLES2.usecases.ui.premultiplied_src_over_linear_2_batched", "dEQP-GLES2.usecases.ui.premultiplied_src_over_linear_batched_2"), ("dEQP-GLES2.usecases.ui.premultiplied_src_over_linear_4_batched", "dEQP-GLES2.usecases.ui.premultiplied_src_over_linear_batched_4"), ("dEQP-GLES2.usecases.ui.premultiplied_src_over_nearest_1_batched", "dEQP-GLES2.usecases.ui.premultiplied_src_over_nearest_batched_1"), ("dEQP-GLES2.usecases.ui.premultiplied_src_over_nearest_2_batched", "dEQP-GLES2.usecases.ui.premultiplied_src_over_nearest_batched_2"), ("dEQP-GLES2.usecases.ui.premultiplied_src_over_nearest_4_batched", "dEQP-GLES2.usecases.ui.premultiplied_src_over_nearest_batched_4"), ("dEQP-GLES2.usecases.ui.no_blend_linear_1_batched", "dEQP-GLES2.usecases.ui.no_blend_linear_batched_1"), ("dEQP-GLES2.usecases.ui.no_blend_linear_2_batched", "dEQP-GLES2.usecases.ui.no_blend_linear_batched_2"), ("dEQP-GLES2.usecases.ui.no_blend_linear_4_batched", "dEQP-GLES2.usecases.ui.no_blend_linear_batched_4"), ("dEQP-GLES2.usecases.ui.no_blend_nearest_1_batched", "dEQP-GLES2.usecases.ui.no_blend_nearest_batched_1"), ("dEQP-GLES2.usecases.ui.no_blend_nearest_2_batched", "dEQP-GLES2.usecases.ui.no_blend_nearest_batched_2"), ("dEQP-GLES2.usecases.ui.no_blend_nearest_4_batched", "dEQP-GLES2.usecases.ui.no_blend_nearest_batched_4") ] RENAME_LIST_2011_3_2011_4 = [] RENAME_LIST_2011_4_2012_1 = [ ("dEQP-GLES2.functional.vertex_arrays.multiple_attributes.output_types.*", "dEQP-GLES2.functional.vertex_arrays.multiple_attributes.input_types."), ] RENAME_LIST_2012_2_2012_3 = [ ("dEQP-GLES2.functional.shaders.operator.geometric.refract.mediump_float_vertex", "dEQP-GLES2.functional.shaders.operator.geometric.refract.mediump_float_float_vertex"), ("dEQP-GLES2.functional.shaders.operator.geometric.refract.mediump_float_fragment", "dEQP-GLES2.functional.shaders.operator.geometric.refract.mediump_float_float_fragment"), ("dEQP-GLES2.functional.shaders.operator.geometric.refract.highp_float_vertex", "dEQP-GLES2.functional.shaders.operator.geometric.refract.highp_float_float_vertex"), ("dEQP-GLES2.functional.shaders.operator.geometric.refract.highp_float_fragment", "dEQP-GLES2.functional.shaders.operator.geometric.refract.highp_float_float_fragment"), ("dEQP-GLES2.functional.shaders.operator.geometric.refract.mediump_vec2_vertex", "dEQP-GLES2.functional.shaders.operator.geometric.refract.mediump_vec2_float_vertex"), ("dEQP-GLES2.functional.shaders.operator.geometric.refract.mediump_vec2_fragment", "dEQP-GLES2.functional.shaders.operator.geometric.refract.mediump_vec2_float_fragment"), ("dEQP-GLES2.functional.shaders.operator.geometric.refract.highp_vec2_vertex", "dEQP-GLES2.functional.shaders.operator.geometric.refract.highp_vec2_float_vertex"), ("dEQP-GLES2.functional.shaders.operator.geometric.refract.highp_vec2_fragment", "dEQP-GLES2.functional.shaders.operator.geometric.refract.highp_vec2_float_fragment"), ("dEQP-GLES2.functional.shaders.operator.geometric.refract.mediump_vec3_vertex", "dEQP-GLES2.functional.shaders.operator.geometric.refract.mediump_vec3_float_vertex"), ("dEQP-GLES2.functional.shaders.operator.geometric.refract.mediump_vec3_fragment", "dEQP-GLES2.functional.shaders.operator.geometric.refract.mediump_vec3_float_fragment"), ("dEQP-GLES2.functional.shaders.operator.geometric.refract.highp_vec3_vertex", "dEQP-GLES2.functional.shaders.operator.geometric.refract.highp_vec3_float_vertex"), ("dEQP-GLES2.functional.shaders.operator.geometric.refract.highp_vec3_fragment", "dEQP-GLES2.functional.shaders.operator.geometric.refract.highp_vec3_float_fragment"), ("dEQP-GLES2.functional.shaders.operator.geometric.refract.mediump_vec4_vertex", "dEQP-GLES2.functional.shaders.operator.geometric.refract.mediump_vec4_float_vertex"), ("dEQP-GLES2.functional.shaders.operator.geometric.refract.mediump_vec4_fragment", "dEQP-GLES2.functional.shaders.operator.geometric.refract.mediump_vec4_float_fragment"), ("dEQP-GLES2.functional.shaders.operator.geometric.refract.highp_vec4_vertex", "dEQP-GLES2.functional.shaders.operator.geometric.refract.highp_vec4_float_vertex"), ("dEQP-GLES2.functional.shaders.operator.geometric.refract.highp_vec4_fragment", "dEQP-GLES2.functional.shaders.operator.geometric.refract.highp_vec4_float_fragment"), ("dEQP-GLES2.functional.negative_api.texture.copyteximage2d_unequal_width_height_cube", "dEQP-GLES2.functional.negative_api.texture.copyteximage2d_inequal_width_height_cube"), ("dEQP-GLES2.functional.negative_api.texture.teximage2d_unequal_width_height_cube", "dEQP-GLES2.functional.negative_api.texture.teximage2d_inequal_width_height_cube"), ("dEQP-GLES2.functional.negative_api.vertex_array.draw_arrays", "dEQP-GLES2.functional.negative_api.vertex_array.draw_arrays_invalid_program"), ("dEQP-GLES2.functional.negative_api.vertex_array.draw_elemens", "dEQP-GLES2.functional.negative_api.vertex_array.draw_elements_invalid_program"), ("dEQP-GLES2.functional.negative_api.shader.attach_shader_invalid_object", "dEQP-GLES2.functional.negative_api.shader.attach_shader"), ("dEQP-GLES2.functional.negative_api.shader.detach_shader_invalid_object", "dEQP-GLES2.functional.negative_api.shader.detach_shader"), ("dEQP-GLES2.usecases.shadow.shadowmap.1sample.1_vertex_lights_no_texture", "dEQP-GLES2.usecases.shadow.shadowmaps.basic_1sample.1_vertex_lights_no_texture"), ("dEQP-GLES2.usecases.shadow.shadowmap.1sample.2_vertex_lights_no_texture", "dEQP-GLES2.usecases.shadow.shadowmaps.basic_1sample.2_vertex_lights_no_texture"), ("dEQP-GLES2.usecases.shadow.shadowmap.1sample.4_vertex_lights_no_texture", "dEQP-GLES2.usecases.shadow.shadowmaps.basic_1sample.4_vertex_lights_no_texture"), ("dEQP-GLES2.usecases.shadow.shadowmap.1sample.1_vertex_lights", "dEQP-GLES2.usecases.shadow.shadowmaps.basic_1sample.1_vertex_lights"), ("dEQP-GLES2.usecases.shadow.shadowmap.1sample.2_vertex_lights", "dEQP-GLES2.usecases.shadow.shadowmaps.basic_1sample.2_vertex_lights"), ("dEQP-GLES2.usecases.shadow.shadowmap.1sample.4_vertex_lights", "dEQP-GLES2.usecases.shadow.shadowmaps.basic_1sample.4_vertex_lights"), ("dEQP-GLES2.usecases.shadow.shadowmap.1sample.1_fragment_lights", "dEQP-GLES2.usecases.shadow.shadowmaps.basic_1sample.1_fragment_lights"), ("dEQP-GLES2.usecases.shadow.shadowmap.1sample.2_fragment_lights", "dEQP-GLES2.usecases.shadow.shadowmaps.basic_1sample.2_fragment_lights"), ("dEQP-GLES2.usecases.shadow.shadowmap.1sample.4_fragment_lights", "dEQP-GLES2.usecases.shadow.shadowmaps.basic_1sample.4_fragment_lights"), ("dEQP-GLES2.usecases.shadow.shadowmap.4sample.1_vertex_lights_no_texture", "dEQP-GLES2.usecases.shadow.shadowmaps.basic_4sample.1_vertex_lights_no_texture"), ("dEQP-GLES2.usecases.shadow.shadowmap.4sample.2_vertex_lights_no_texture", "dEQP-GLES2.usecases.shadow.shadowmaps.basic_4sample.2_vertex_lights_no_texture"), ("dEQP-GLES2.usecases.shadow.shadowmap.4sample.4_vertex_lights_no_texture", "dEQP-GLES2.usecases.shadow.shadowmaps.basic_4sample.4_vertex_lights_no_texture"), ("dEQP-GLES2.usecases.shadow.shadowmap.4sample.1_vertex_lights", "dEQP-GLES2.usecases.shadow.shadowmaps.basic_4sample.1_vertex_lights"), ("dEQP-GLES2.usecases.shadow.shadowmap.4sample.2_vertex_lights", "dEQP-GLES2.usecases.shadow.shadowmaps.basic_4sample.2_vertex_lights"), ("dEQP-GLES2.usecases.shadow.shadowmap.4sample.4_vertex_lights", "dEQP-GLES2.usecases.shadow.shadowmaps.basic_4sample.4_vertex_lights"), ("dEQP-GLES2.usecases.shadow.shadowmap.4sample.1_fragment_lights", "dEQP-GLES2.usecases.shadow.shadowmaps.basic_4sample.1_fragment_lights"), ("dEQP-GLES2.usecases.shadow.shadowmap.4sample.2_fragment_lights", "dEQP-GLES2.usecases.shadow.shadowmaps.basic_4sample.2_fragment_lights"), ("dEQP-GLES2.usecases.shadow.shadowmap.4sample.4_fragment_lights", "dEQP-GLES2.usecases.shadow.shadowmaps.basic_4sample.4_fragment_lights"), ("dEQP-GLES2.usecases.shadow.shadowmap.16sample.1_vertex_lights_no_texture", "dEQP-GLES2.usecases.shadow.shadowmaps.basic_16sample.1_vertex_lights_no_texture"), ("dEQP-GLES2.usecases.shadow.shadowmap.16sample.2_vertex_lights_no_texture", "dEQP-GLES2.usecases.shadow.shadowmaps.basic_16sample.2_vertex_lights_no_texture"), ("dEQP-GLES2.usecases.shadow.shadowmap.16sample.4_vertex_lights_no_texture", "dEQP-GLES2.usecases.shadow.shadowmaps.basic_16sample.4_vertex_lights_no_texture"), ("dEQP-GLES2.usecases.shadow.shadowmap.16sample.1_vertex_lights", "dEQP-GLES2.usecases.shadow.shadowmaps.basic_16sample.1_vertex_lights"), ("dEQP-GLES2.usecases.shadow.shadowmap.16sample.2_vertex_lights", "dEQP-GLES2.usecases.shadow.shadowmaps.basic_16sample.2_vertex_lights"), ("dEQP-GLES2.usecases.shadow.shadowmap.16sample.4_vertex_lights", "dEQP-GLES2.usecases.shadow.shadowmaps.basic_16sample.4_vertex_lights"), ("dEQP-GLES2.usecases.shadow.shadowmap.16sample.1_fragment_lights", "dEQP-GLES2.usecases.shadow.shadowmaps.basic_16sample.1_fragment_lights"), ("dEQP-GLES2.usecases.shadow.shadowmap.16sample.2_fragment_lights", "dEQP-GLES2.usecases.shadow.shadowmaps.basic_16sample.2_fragment_lights"), ("dEQP-GLES2.usecases.shadow.shadowmap.16sample.4_fragment_lights", "dEQP-GLES2.usecases.shadow.shadowmaps.basic_16sample.4_fragment_lights") ] RENAME_LIST_2012_3_2012_4 = [ ("dEQP-GLES2.functional.depth.*", "dEQP-GLES2.functional.fragment_ops.depth."), ("dEQP-GLES2.functional.stencil.*", "dEQP-GLES2.functional.fragment_ops.stencil.") ] def readCaseList (filename): f = open(filename, 'r') cases = [] for line in f: if line[0:5] == "TEST:": cases.append(line[6:].strip()) f.close() return cases def isWildcardPattern (pattern): return pattern[-1:] == '*' # returns (cases, renames) def renameCases (cases, rename): renamedCases = [] renamedSet = set() renames = [] for case in cases: renamed = None for src, dst in rename: if isWildcardPattern(src) and case[:len(src)-1] == src[:-1]: renamed = dst + case[len(src)-1:] break elif case == src: renamed = dst break if renamed != None: renames.append((case, renamed)) case = renamed # It is possible that some later case is renamed to case already seen in the list assert not case in renamedSet or renamed != None if case not in renamedSet: renamedCases.append(case) renamedSet.add(case) return (renamedCases, renames) # returns (added, removed) lists def diffCaseLists (old, new): added = [] removed = [] oldSet = set(old) newSet = set(new) # build added list for case in new: if not case in oldSet: added.append(case) # build removed set for case in old: if not case in newSet: removed.append(case) return (added, removed) if __name__ == "__main__": if len(sys.argv) != 3: print("%s [old caselist] [new caselist]" % sys.argv[0]) sys.exit(-1) oldCases = readCaseList(sys.argv[1]) newCases = readCaseList(sys.argv[2]) rename = RENAME_LIST_2012_3_2012_4 renamedCases, renameList = renameCases(oldCases, rename) added, removed = diffCaseLists(renamedCases, newCases) # for src, dst in rename: # print("RENAME: %s -> %s" % (src, dst)) for case in added: print("ADD: %s" % case) for src, dst in renameList: print("RENAME: %s -> %s" % (src, dst)) for case in removed: print("REMOVE: %s" % case)
bsd-3-clause
-6,694,891,574,763,381,000
72.415459
176
0.719813
false
2.689259
false
false
false
witlox/elasticluster
elasticluster/providers/ec2_boto.py
1
25751
# # Copyright (C) 2013, 2018 S3IT, University of Zurich # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # __author__ = ', '.join([ 'Nicolas Baer <[email protected]>', 'Antonio Messina <[email protected]>', 'Riccardo Murri <[email protected]>', ]) # System imports import hashlib import os import urllib import threading import time from warnings import warn # External modules import boto import boto.ec2 import boto.vpc from Crypto.PublicKey import RSA from paramiko import DSSKey, RSAKey, PasswordRequiredException from paramiko.ssh_exception import SSHException # Elasticluster imports from elasticluster import log from elasticluster.providers import AbstractCloudProvider from elasticluster.exceptions import VpcError, SecurityGroupError, \ SubnetError, KeypairError, ImageError, InstanceError, InstanceNotFoundError, ClusterError class BotoCloudProvider(AbstractCloudProvider): """This implementation of :py:class:`elasticluster.providers.AbstractCloudProvider` uses the boto ec2 interface to connect to ec2 compliant clouds and manage instances. Please check https://github.com/boto/boto for further information about the supported cloud platforms. :param str ec2_url: url to connect to cloud web service :param str ec2_region: region identifier :param str ec2_access_key: access key of the user account :param str ec2_secret_key: secret key of the user account :param str storage_path: path to store temporary data :param bool request_floating_ip: Whether ip are assigned automatically `True` or floating ips have to be assigned manually `False` :param str instance_profile: Instance profile with IAM role permissions :param float price: Spot instance price (if 0, do not use spot instances); used as a default in `start_instance`:py:meth :param int price: Timeout waiting for spot instances (only used if price > 0); used as a default in `start_instance`:py:meth """ __node_start_lock = threading.Lock() # lock used for node startup # interval (in seconds) for polling the cloud provider, # e.g., when requesting spot instances POLL_INTERVAL = 10 def __init__(self, ec2_url, ec2_region, ec2_access_key=None, ec2_secret_key=None, vpc=None, storage_path=None, request_floating_ip=False, instance_profile=None, price=0.0, timeout=0): self._url = ec2_url self._access_key = ec2_access_key self._secret_key = ec2_secret_key self._vpc = vpc self._instance_profile = instance_profile self.request_floating_ip = request_floating_ip # provide defaults for like-named arguments in `.start_instance` self.price = price self.timeout = timeout # read all parameters from url proto, opaqueurl = urllib.splittype(ec2_url) self._host, self._ec2path = urllib.splithost(opaqueurl) self._ec2host, port = urllib.splitport(self._host) if port: port = int(port) self._ec2port = port if proto == "https": self._secure = True else: self._secure = False self._region_name = ec2_region # will be initialized upon first connect self._ec2_connection = None self._vpc_connection = None self._vpc_id = None self._instances = {} self._cached_instances = [] self._images = None def _connect(self): """ Connect to the EC2 cloud provider. :return: :py:class:`boto.ec2.connection.EC2Connection` :raises: Generic exception on error """ # check for existing connection if self._ec2_connection: return self._ec2_connection try: log.debug("Connecting to EC2 endpoint %s", self._ec2host) # connect to webservice ec2_connection = boto.ec2.connect_to_region( self._region_name, aws_access_key_id=self._access_key, aws_secret_access_key=self._secret_key, is_secure=self._secure, host=self._ec2host, port=self._ec2port, path=self._ec2path, ) log.debug("EC2 connection has been successful.") if not self._vpc: vpc_connection = None self._vpc_id = None else: vpc_connection, self._vpc_id = self._find_vpc_by_name(self._vpc) except Exception as err: log.error("Error connecting to EC2: %s", err) raise self._ec2_connection, self._vpc_connection = ( ec2_connection, vpc_connection) return self._ec2_connection def _find_vpc_by_name(self, vpc_name): vpc_connection = boto.vpc.connect_to_region( self._region_name, aws_access_key_id=self._access_key, aws_secret_access_key=self._secret_key, is_secure=self._secure, host=self._ec2host, port=self._ec2port, path=self._ec2path, ) log.debug("VPC connection has been successful.") for vpc in vpc_connection.get_all_vpcs(): matches = [vpc.id] if 'Name' in vpc.tags: matches.append(vpc.tags['Name']) if vpc_name in matches: vpc_id = vpc.id if vpc_name != vpc_id: # then `vpc_name` is the VPC name log.debug("VPC `%s` has ID `%s`", vpc_name, vpc_id) break else: raise VpcError('Cannot find VPC `{0}`.'.format(vpc_name)) return (vpc_connection, vpc_id) def start_instance(self, key_name, public_key_path, private_key_path, security_group, flavor, image_id, image_userdata, username=None, node_name=None, network_ids=None, price=None, timeout=None, boot_disk_device=None, boot_disk_size=None, boot_disk_type=None, boot_disk_iops=None, placement_group=None, **kwargs): """Starts a new instance on the cloud using the given properties. The following tasks are done to start an instance: * establish a connection to the cloud web service * check ssh keypair and upload it if it does not yet exist. This is a locked process, since this function might be called in multiple threads and we only want the key to be stored once. * check if the security group exists * run the instance with the given properties :param str key_name: name of the ssh key to connect :param str public_key_path: path to ssh public key :param str private_key_path: path to ssh private key :param str security_group: firewall rule definition to apply on the instance :param str flavor: machine type to use for the instance :param str image_id: image type (os) to use for the instance :param str image_userdata: command to execute after startup :param str username: username for the given ssh key, default None :param float price: Spot instance price (if 0, do not use spot instances). :param int price: Timeout (in seconds) waiting for spot instances; only used if price > 0. :param str boot_disk_device: Root volume device path if not /dev/sda1 :param str boot_disk_size: Target size, in GiB, for the root volume :param str boot_disk_type: Type of root volume (standard, gp2, io1) :param str boot_disk_iops: Provisioned IOPS for the root volume :param str placement_group: Enable low-latency networking between compute nodes. :return: str - instance id of the started instance """ connection = self._connect() log.debug("Checking keypair `%s`.", key_name) # the `_check_keypair` method has to be called within a lock, # since it will upload the key if it does not exist and if this # happens for every node at the same time ec2 will throw an error # message (see issue #79) with BotoCloudProvider.__node_start_lock: self._check_keypair(key_name, public_key_path, private_key_path) log.debug("Checking security group `%s`.", security_group) security_group_id = self._check_security_group(security_group) # image_id = self._find_image_id(image_id) if network_ids: interfaces = [] for subnet in network_ids.split(','): subnet_id = self._check_subnet(subnet) interfaces.append( boto.ec2.networkinterface.NetworkInterfaceSpecification( subnet_id=subnet_id, groups=[security_group_id], associate_public_ip_address=self.request_floating_ip)) interfaces = boto.ec2.networkinterface.NetworkInterfaceCollection(*interfaces) security_groups = [] else: interfaces = None security_groups = [security_group] # get defaults for `price` and `timeout` from class instance if price is None: price = self.price if timeout is None: timeout = self.timeout if boot_disk_size: dev_root = boto.ec2.blockdevicemapping.BlockDeviceType() dev_root.size = int(boot_disk_size) dev_root.delete_on_termination = True if boot_disk_type: dev_root.volume_type = boot_disk_type if boot_disk_iops: dev_root.iops = int(boot_disk_iops) bdm = boto.ec2.blockdevicemapping.BlockDeviceMapping() dev_name = boot_disk_device if boot_disk_device else "/dev/sda1" bdm[dev_name] = dev_root else: bdm = None try: #start spot instance if bid is specified if price: log.info("Requesting spot instance with price `%s` ...", price) request = connection.request_spot_instances( price,image_id, key_name=key_name, security_groups=security_groups, instance_type=flavor, user_data=image_userdata, network_interfaces=interfaces, placement_group=placement_group, block_device_map=bdm, instance_profile_name=self._instance_profile)[-1] # wait until spot request is fullfilled (will wait # forever if no timeout is given) start_time = time.time() timeout = (float(timeout) if timeout else 0) log.info("Waiting for spot instance (will time out in %d seconds) ...", timeout) while request.status.code != 'fulfilled': if timeout and time.time()-start_time > timeout: request.cancel() raise RuntimeError('spot instance timed out') time.sleep(self.POLL_INTERVAL) # update request status request=connection.get_all_spot_instance_requests(request_ids=request.id)[-1] else: reservation = connection.run_instances( image_id, key_name=key_name, security_groups=security_groups, instance_type=flavor, user_data=image_userdata, network_interfaces=interfaces, placement_group=placement_group, block_device_map=bdm, instance_profile_name=self._instance_profile) except Exception as ex: log.error("Error starting instance: %s", ex) if "TooManyInstances" in ex: raise ClusterError(ex) else: raise InstanceError(ex) if price: vm = connection.get_only_instances(instance_ids=[request.instance_id])[-1] else: vm = reservation.instances[-1] vm.add_tag("Name", node_name) # cache instance object locally for faster access later on self._instances[vm.id] = vm return vm.id def stop_instance(self, instance_id): """Stops the instance gracefully. :param str instance_id: instance identifier """ instance = self._load_instance(instance_id) instance.terminate() del self._instances[instance_id] def get_ips(self, instance_id): """Retrieves the private and public ip addresses for a given instance. :return: list (ips) """ self._load_instance(instance_id) instance = self._load_instance(instance_id) IPs = [ip for ip in instance.private_ip_address, instance.ip_address if ip] # We also need to check if there is any floating IP associated if self.request_floating_ip and not self._vpc: # We need to list the floating IPs for this instance floating_ips = [ip for ip in self._ec2_connection.get_all_addresses() if ip.instance_id == instance.id] if not floating_ips: log.debug("Public ip address has to be assigned through " "elasticluster.") ip = self._allocate_address(instance) # This is probably the preferred IP we want to use IPs.insert(0, ip) else: IPs = [ip.public_ip for ip in floating_ips] + IPs return list(set(IPs)) def is_instance_running(self, instance_id): """Checks if the instance is up and running. :param str instance_id: instance identifier :return: bool - True if running, False otherwise """ instance = self._load_instance(instance_id) if instance.update() == "running": # If the instance is up&running, ensure it has an IP # address. if not instance.ip_address and self.request_floating_ip: log.debug("Public ip address has to be assigned through " "elasticluster.") self._allocate_address(instance) instance.update() return True else: return False def _allocate_address(self, instance): """Allocates a free public ip address to the given instance :param instance: instance to assign address to :type instance: py:class:`boto.ec2.instance.Reservation` :return: public ip address """ connection = self._connect() free_addresses = [ ip for ip in connection.get_all_addresses() if not ip.instance_id] if not free_addresses: try: address = connection.allocate_address() except Exception as ex: log.error("Unable to allocate a public IP address to instance `%s`", instance.id) return None try: address = free_addresses.pop() instance.use_ip(address) return address.public_ip except Exception as ex: log.error("Unable to associate IP address %s to instance `%s`", address, instance.id) return None def _load_instance(self, instance_id): """ Return instance with the given id. For performance reasons, the instance ID is first searched for in the collection of VM instances started by ElastiCluster (`self._instances`), then in the list of all instances known to the cloud provider at the time of the last update (`self._cached_instances`), and finally the cloud provider is directly queried. :param str instance_id: instance identifier :return: py:class:`boto.ec2.instance.Reservation` - instance :raises: `InstanceError` is returned if the instance can't be found in the local cache or in the cloud. """ # if instance is known, return it if instance_id in self._instances: return self._instances[instance_id] # else, check (cached) list from provider if instance_id not in self._cached_instances: self._cached_instances = self._build_cached_instances() if instance_id in self._cached_instances: inst = self._cached_instances[instance_id] self._instances[instance_id] = inst return inst # If we reached this point, the instance was not found neither # in the caches nor on the website. raise InstanceNotFoundError( "Instance `{instance_id}` not found" .format(instance_id=instance_id)) def _build_cached_instances(self): """ Build lookup table of VM instances known to the cloud provider. The returned dictionary links VM id with the actual VM object. """ connection = self._connect() reservations = connection.get_all_reservations() cached_instances = {} for rs in reservations: for vm in rs.instances: cached_instances[vm.id] = vm return cached_instances def _check_keypair(self, name, public_key_path, private_key_path): """First checks if the keypair is valid, then checks if the keypair is registered with on the cloud. If not the keypair is added to the users ssh keys. :param str name: name of the ssh key :param str public_key_path: path to the ssh public key file :param str private_key_path: path to the ssh private key file :raises: `KeypairError` if key is not a valid RSA or DSA key, the key could not be uploaded or the fingerprint does not match to the one uploaded to the cloud. """ connection = self._connect() keypairs = connection.get_all_key_pairs() keypairs = dict((k.name, k) for k in keypairs) # decide if dsa or rsa key is provided pkey = None is_dsa_key = False try: pkey = DSSKey.from_private_key_file(private_key_path) is_dsa_key = True except PasswordRequiredException: warn("Unable to check key file `{0}` because it is encrypted with a " "password. Please, ensure that you added it to the SSH agent " "with `ssh-add {1}`" .format(private_key_path, private_key_path)) except SSHException: try: pkey = RSAKey.from_private_key_file(private_key_path) except PasswordRequiredException: warn("Unable to check key file `{0}` because it is encrypted with a " "password. Please, ensure that you added it to the SSH agent " "with `ssh-add {1}`" .format(private_key_path, private_key_path)) except SSHException: raise KeypairError('File `%s` is neither a valid DSA key ' 'or RSA key.' % private_key_path) # create keys that don't exist yet if name not in keypairs: log.warning( "Keypair `%s` not found on resource `%s`, Creating a new one", name, self._url) with open(os.path.expanduser(public_key_path)) as f: key_material = f.read() try: # check for DSA on amazon if "amazon" in self._ec2host and is_dsa_key: log.error( "Apparently, amazon does not support DSA keys. " "Please specify a valid RSA key.") raise KeypairError( "Apparently, amazon does not support DSA keys." "Please specify a valid RSA key.") connection.import_key_pair(name, key_material) except Exception as ex: log.error( "Could not import key `%s` with name `%s` to `%s`", name, public_key_path, self._url) raise KeypairError( "could not create keypair `%s`: %s" % (name, ex)) else: # check fingerprint cloud_keypair = keypairs[name] if pkey: if "amazon" in self._ec2host: # AWS takes the MD5 hash of the key's DER representation. key = RSA.importKey(open(private_key_path).read()) der = key.publickey().exportKey('DER') m = hashlib.md5() m.update(der) digest = m.hexdigest() fingerprint = ':'.join(digest[i:(i + 2)] for i in range(0, len(digest), 2)) else: fingerprint = ':'.join(i.encode('hex') for i in pkey.get_fingerprint()) if fingerprint != cloud_keypair.fingerprint: if "amazon" in self._ec2host: log.error( "Apparently, Amazon does not compute the RSA key " "fingerprint as we do! We cannot check if the " "uploaded keypair is correct!") else: raise KeypairError( "Keypair `%s` is present but has " "different fingerprint. Aborting!" % name) def _check_security_group(self, name): """Checks if the security group exists. :param str name: name of the security group :return: str - security group id of the security group :raises: `SecurityGroupError` if group does not exist """ connection = self._connect() filters = {} if self._vpc: filters = {'vpc-id': self._vpc_id} security_groups = connection.get_all_security_groups(filters=filters) matching_groups = [ group for group in security_groups if name in [group.name, group.id] ] if len(matching_groups) == 0: raise SecurityGroupError( "the specified security group %s does not exist" % name) elif len(matching_groups) == 1: return matching_groups[0].id elif self._vpc and len(matching_groups) > 1: raise SecurityGroupError( "the specified security group name %s matches " "more than one security group" % name) def _check_subnet(self, name): """Checks if the subnet exists. :param str name: name of the subnet :return: str - subnet id of the subnet :raises: `SubnetError` if group does not exist """ # Subnets only exist in VPCs, so we don't need to worry about # the EC2 Classic case here. subnets = self._vpc_connection.get_all_subnets( filters={'vpcId': self._vpc_id}) matching_subnets = [ subnet for subnet in subnets if name in [subnet.tags.get('Name'), subnet.id] ] if len(matching_subnets) == 0: raise SubnetError( "the specified subnet %s does not exist" % name) elif len(matching_subnets) == 1: return matching_subnets[0].id else: raise SubnetError( "the specified subnet name %s matches more than " "one subnet" % name) def _find_image_id(self, image_id): """Finds an image id to a given id or name. :param str image_id: name or id of image :return: str - identifier of image """ if not self._images: connection = self._connect() self._images = connection.get_all_images() image_id_cloud = None for i in self._images: if i.id == image_id or i.name == image_id: image_id_cloud = i.id break if image_id_cloud: return image_id_cloud else: raise ImageError( "Could not find given image id `%s`" % image_id) def __getstate__(self): d = self.__dict__.copy() del d['_ec2_connection'] del d['_vpc_connection'] return d def __setstate__(self, state): self.__dict__ = state self._ec2_connection = None self._vpc_connection = None
gpl-3.0
4,051,944,693,585,966,600
39.425432
115
0.568405
false
4.45057
false
false
false
blekhmanlab/hominid
hominid/sort_results.py
1
6152
""" Read a rvcf file with stability selection scores for taxa. Sort the dataframe by rsq_median. Print results. usage: python sort_results.py \ ../example/stability_selection_example_output.vcf \ ../example/hominid_example_taxon_table_input.txt \ arcsinsqrt \ 0.5 \ 10 """ import argparse import sys import pandas as pd from hominid.hominid import read_taxon_file, align_snp_and_taxa def sort_results(rvcf_input_file_path, taxon_table_file_path, transform, r_sqr_median_cutoff, stability_cutoff, snp_count, no_tables, extra_columns): print('plotting {} SNPs from {}'.format(snp_count, rvcf_input_file_path)) # read the rvcf file and sort by rsq_median df = pd.read_csv(rvcf_input_file_path, sep='\t', dtype={'CHROM': str}) #print('df.shape: {}'.format(df.shape)) sorted_rsq_best_medians_df = df.sort_values(by='rsq_median', ascending=False) x_df = sorted_rsq_best_medians_df[sorted_rsq_best_medians_df.rsq_median > r_sqr_median_cutoff] print('{} SNPs with r_sqr > {:5.3f}'.format(x_df.shape[0], r_sqr_median_cutoff)) taxon_table_df = read_taxon_file(taxon_table_file_path, transform=transform) for row_i in range(sorted_rsq_best_medians_df.shape[0]): if row_i >= snp_count: break else: # get a 1-row dataframe snp_df = sorted_rsq_best_medians_df.iloc[[row_i]] aligned_snp_df, aligned_taxa_df = align_snp_and_taxa( snp_df, taxon_table_df ) # get the taxon stability selection scores # use the taxon table df index to get column names for snp_df taxon_scores_df = snp_df.loc[:, taxon_table_df.index].transpose() sorted_taxon_scores_df = taxon_scores_df.sort_values(by=taxon_scores_df.columns[0], ascending=False) #sorted_taxon_scores_df = taxon_scores_df.sort(taxon_scores_df.columns[0], ascending=False) p_df_list = [] print('{} {} {:5.3f}'.format(snp_df.iloc[0].GENE, snp_df.iloc[0].ID, snp_df.iloc[0].rsq_median)) summary_line = '{}\t{}\t'.format(snp_df.iloc[0].GENE, snp_df.iloc[0].ID) for i, (selected_taxon, selected_taxon_row) in enumerate(sorted_taxon_scores_df.iterrows()): # use selected_taxon_row.index[0] to index the first and only column selected_taxon_score = selected_taxon_row.iloc[0] if selected_taxon_score < stability_cutoff: #print('done with selected taxa') break else: # trim 'Root;' from the front of the taxon name if selected_taxon.startswith('Root;'): taxon_name = selected_taxon[5:] else: taxon_name = selected_taxon print(' {:5.3f} {}'.format(selected_taxon_score, taxon_name)) summary_line += '{}, '.format(taxon_name) gts = [ snp_df.iloc[0].REF + snp_df.iloc[0].REF, # 0 snp_df.iloc[0].REF + snp_df.iloc[0].ALT, # 1 snp_df.iloc[0].ALT + snp_df.iloc[0].ALT # 2 ] aligned_snp_value_list = aligned_snp_df.values.flatten().tolist() data_dict = { 'chromosome': [snp_df.iloc[0].CHROM] * aligned_snp_df.shape[1], 'snp_id': [snp_df.iloc[0].ID] * aligned_snp_df.shape[1], 'gene': [snp_df.iloc[0].GENE] * aligned_snp_df.shape[1], 'taxon': [selected_taxon] * aligned_snp_df.shape[1], 'abundance': aligned_taxa_df[selected_taxon].values.tolist(), 'variant_allele_count': [str(int(v)) for v in aligned_snp_value_list], 'genotype': [gts[int(v)] for v in aligned_snp_value_list], 'sample_id' : aligned_snp_df.columns } columns_to_display = ['abundance', 'variant_allele_count', 'genotype', 'sample_id'] if extra_columns: for extra_column in extra_columns.split(','): data_dict[extra_column] = snp_df.iloc[0][extra_column] columns_to_display.append(extra_column) p_df = pd.DataFrame(data_dict) p_df_list.append(p_df) if no_tables: pass else: p_df[columns_to_display].to_csv( sys.stdout, sep='\t' ) # save a stacked bar plot if len(p_df_list) > 0: file_name = 'stacked_bar_plot_selected_taxa_{}_{}.pdf'.format( snp_df.iloc[0].GENE, snp_df.iloc[0].ID ) p_df = pd.concat(p_df_list, axis=0) # at this point the index for p_df looks like # 0...76.0...76.0...76 # replace the index p_df.index = range(p_df.shape[0]) #p_df.to_csv(file_path, sep='\t') stacked_bar_title = '{}\n{}'.format(snp_df.iloc[0].GENE, snp_df.iloc[0].ID) def main(): argparser = argparse.ArgumentParser() argparser.add_argument('rvcf_input_file_path') argparser.add_argument('taxon_table_file_path') argparser.add_argument('transform') argparser.add_argument( 'r_sqr_median_cutoff', type=float ) argparser.add_argument( 'stability_cutoff', type=float ) argparser.add_argument( 'snp_count', type=int ) argparser.add_argument( '--no-tables', action='store_true' ) argparser.add_argument( '--extra-columns', type=str ) args = argparser.parse_args() print(args) sort_results(**vars(args)) if __name__ == '__main__': main()
mit
-8,981,641,414,200,290,000
41.136986
112
0.519831
false
3.429208
false
false
false
tejasnikumbh/Algorithms
ArraysAndSorting/MarkAndToys.py
1
1514
''' In place quickSort The quickSort Method Time Complexity : Best,Avg - O(NlogN) , Worst - O(N^2) Space Complexity : O(N) Auxilary Space : O(logN) for the stack frames ''' def quickSort(a,start,end): if(start >= end): return a else: pivot = a[end] swapIndex = start for i in range(start,end + 1): if(a[i] < pivot): #swap(a,i,swapIndex) temp = a[i] a[i] = a[swapIndex] a[swapIndex] = temp swapIndex += 1 #swap(a,end,swapIndex) temp = a[end] a[end] = a[swapIndex] a[swapIndex] = temp quickSort(a,start,swapIndex - 1) quickSort(a,swapIndex + 1,end) return a ''' Function that returns maximum toys that can be bought. Simple strategy is to sort the prices array and add as many toys as possible by incrementally adding up prices from the least to the most until budget is exhausted. ''' def max_toys(prices, rupees): #Compute and return final answer over here answer = 0 prices = quickSort(prices,0,len(prices)-1) totalBudget = rupees for price in prices: if((totalBudget - price) >= 0): totalBudget -= price answer += 1 else: break return answer ''' Main function for the program ''' if __name__ == '__main__': n, k = map(int, raw_input().split()) prices = map(int, raw_input().split()) print max_toys(prices, k)
bsd-2-clause
122,863,958,827,943,060
28.686275
82
0.562087
false
3.425339
false
false
false
openmips/stbgui
lib/python/Components/ServiceScan.py
1
9086
from enigma import eComponentScan, iDVBFrontend, eTimer from Components.NimManager import nimmanager as nimmgr from Tools.Transponder import getChannelNumber class ServiceScan: Idle = 1 Running = 2 Done = 3 Error = 4 DonePartially = 5 Errors = { 0: _("error starting scanning"), 1: _("error while scanning"), 2: _("no resource manager"), 3: _("no channel list") } def scanStatusChanged(self): if self.state == self.Running: self.progressbar.setValue(self.scan.getProgress()) self.lcd_summary and self.lcd_summary.updateProgress(self.scan.getProgress()) if self.scan.isDone(): errcode = self.scan.getError() if errcode == 0: self.state = self.DonePartially self.servicelist.listAll() else: self.state = self.Error self.errorcode = errcode self.network.setText("") self.transponder.setText("") else: result = self.foundServices + self.scan.getNumServices() percentage = self.scan.getProgress() if percentage > 99: percentage = 99 #TRANSLATORS: The stb is performing a channel scan, progress percentage is printed in '%d' (and '%%' will show a single '%' symbol) message = ngettext("Scanning - %d%% completed", "Scanning - %d%% completed", percentage) % percentage message += ", " #TRANSLATORS: Intermediate scanning result, '%d' channel(s) have been found so far message += ngettext("%d channel found", "%d channels found", result) % result self.text.setText(message) transponder = self.scan.getCurrentTransponder() network = "" tp_text = "" if transponder: tp_type = transponder.getSystem() if tp_type == iDVBFrontend.feSatellite: network = _("Satellite") tp = transponder.getDVBS() orb_pos = tp.orbital_position try: sat_name = str(nimmgr.getSatDescription(orb_pos)) except KeyError: sat_name = "" if orb_pos > 1800: # west orb_pos = 3600 - orb_pos h = _("W") else: h = _("E") if ("%d.%d" % (orb_pos/10, orb_pos%10)) in sat_name: network = sat_name else: network = ("%s %d.%d %s") % (sat_name, orb_pos / 10, orb_pos % 10, h) tp_text = { tp.System_DVB_S : "DVB-S", tp.System_DVB_S2 : "DVB-S2" }.get(tp.system, "") if tp_text == "DVB-S2": tp_text = ("%s %s") % ( tp_text, { tp.Modulation_Auto : "Auto", tp.Modulation_QPSK : "QPSK", tp.Modulation_8PSK : "8PSK", tp.Modulation_QAM16 : "QAM16", tp.Modulation_16APSK : "16APSK", tp.Modulation_32APSK : "32APSK" }.get(tp.modulation, "")) tp_text = ("%s %d%c / %d / %s") % ( tp_text, tp.frequency/1000, { tp.Polarisation_Horizontal : 'H', tp.Polarisation_Vertical : 'V', tp.Polarisation_CircularLeft : 'L', tp.Polarisation_CircularRight : 'R' }.get(tp.polarisation, ' '), tp.symbol_rate/1000, { tp.FEC_Auto : "AUTO", tp.FEC_1_2 : "1/2", tp.FEC_2_3 : "2/3", tp.FEC_3_4 : "3/4", tp.FEC_5_6 : "5/6", tp.FEC_7_8 : "7/8", tp.FEC_8_9 : "8/9", tp.FEC_3_5 : "3/5", tp.FEC_4_5 : "4/5", tp.FEC_9_10 : "9/10", tp.FEC_None : "NONE" }.get(tp.fec, "")) if tp.is_id > -1 and tp.system == tp.System_DVB_S2: tp_text = ("%s IS %d") % (tp_text, tp.is_id) elif tp_type == iDVBFrontend.feCable: network = _("Cable") tp = transponder.getDVBC() tp_text = ("DVB-C/C2 %s %d MHz / SR:%d / FEC:%s") %( { tp.Modulation_Auto : "AUTO", tp.Modulation_QAM16 : "QAM16", tp.Modulation_QAM32 : "QAM32", tp.Modulation_QAM64 : "QAM64", tp.Modulation_QAM128 : "QAM128", tp.Modulation_QAM256 : "QAM256" }.get(tp.modulation, ""), tp.frequency/1000, tp.symbol_rate/1000, { tp.FEC_Auto : "AUTO", tp.FEC_1_2 : "1/2", tp.FEC_2_3 : "2/3", tp.FEC_3_4 : "3/4", tp.FEC_5_6 : "5/6", tp.FEC_7_8 : "7/8", tp.FEC_8_9 : "8/9", tp.FEC_3_5 : "3/5", tp.FEC_4_5 : "4/5", tp.FEC_9_10 : "9/10", tp.FEC_6_7 : "6/7", tp.FEC_None : "NONE" }.get(tp.fec_inner, "")) elif tp_type == iDVBFrontend.feTerrestrial: network = _("Terrestrial") tp = transponder.getDVBT() channel = getChannelNumber(tp.frequency, self.scanList[self.run]["feid"]) if channel: channel = _("CH") + "%s " % channel freqMHz = "%0.1f MHz" % (tp.frequency/1000000.) tp_text = ("%s %s %s %s") %( { tp.System_DVB_T_T2 : "DVB-T/T2", tp.System_DVB_T : "DVB-T", tp.System_DVB_T2 : "DVB-T2" }.get(tp.system, ""), { tp.Modulation_QPSK : "QPSK", tp.Modulation_QAM16 : "QAM16", tp.Modulation_QAM64 : "QAM64", tp.Modulation_Auto : "AUTO", tp.Modulation_QAM256 : "QAM256" }.get(tp.modulation, ""), "%s%s" % (channel, freqMHz.replace(".0","")), { tp.Bandwidth_8MHz : "Bw 8MHz", tp.Bandwidth_7MHz : "Bw 7MHz", tp.Bandwidth_6MHz : "Bw 6MHz", tp.Bandwidth_Auto : "Bw Auto", tp.Bandwidth_5MHz : "Bw 5MHz", tp.Bandwidth_1_712MHz : "Bw 1.712MHz", tp.Bandwidth_10MHz : "Bw 10MHz" }.get(tp.bandwidth, "")) elif tp_type == iDVBFrontend.feATSC: network = _("ATSC") tp = transponder.getATSC() freqMHz = "%0.1f MHz" % (tp.frequency/1000000.) tp_text = ("%s %s %s %s") % ( { tp.System_ATSC : _("ATSC"), tp.System_DVB_C_ANNEX_B : _("DVB-C ANNEX B") }.get(tp.system, ""), { tp.Modulation_Auto : _("Auto"), tp.Modulation_QAM16 : "QAM16", tp.Modulation_QAM32 : "QAM32", tp.Modulation_QAM64 : "QAM64", tp.Modulation_QAM128 : "QAM128", tp.Modulation_QAM256 : "QAM256", tp.Modulation_VSB_8 : "8VSB", tp.Modulation_VSB_16 : "16VSB" }.get(tp.modulation, ""), freqMHz.replace(".0",""), { tp.Inversion_Off : _("Off"), tp.Inversion_On :_("On"), tp.Inversion_Unknown : _("Auto") }.get(tp.inversion, "")) else: print "unknown transponder type in scanStatusChanged" self.network.setText(network) self.transponder.setText(tp_text) if self.state == self.DonePartially: self.foundServices += self.scan.getNumServices() self.text.setText(ngettext("Scanning completed, %d channel found", "Scanning completed, %d channels found", self.foundServices) % self.foundServices) if self.state == self.Error: self.text.setText(_("ERROR - failed to scan (%s)!") % (self.Errors[self.errorcode]) ) if self.state == self.DonePartially or self.state == self.Error: self.delaytimer.start(100, True) def __init__(self, progressbar, text, servicelist, passNumber, scanList, network, transponder, frontendInfo, lcd_summary): self.foundServices = 0 self.progressbar = progressbar self.text = text self.servicelist = servicelist self.passNumber = passNumber self.scanList = scanList self.frontendInfo = frontendInfo self.transponder = transponder self.network = network self.run = 0 self.lcd_summary = lcd_summary self.scan = None self.delaytimer = eTimer() self.delaytimer.callback.append(self.execEnd) def doRun(self): self.scan = eComponentScan() self.frontendInfo.frontend_source = lambda : self.scan.getFrontend() self.feid = self.scanList[self.run]["feid"] self.flags = self.scanList[self.run]["flags"] self.networkid = 0 if "networkid" in self.scanList[self.run]: self.networkid = self.scanList[self.run]["networkid"] self.state = self.Idle self.scanStatusChanged() for x in self.scanList[self.run]["transponders"]: self.scan.addInitial(x) def updatePass(self): size = len(self.scanList) if size > 1: txt = "%s %s/%s (%s)" % (_("pass"), self.run + 1, size, nimmgr.getNim(self.scanList[self.run]["feid"]).slot_name) self.passNumber.setText(txt) def execBegin(self): self.doRun() self.updatePass() self.scan.statusChanged.get().append(self.scanStatusChanged) self.scan.newService.get().append(self.newService) self.servicelist.clear() self.state = self.Running err = self.scan.start(self.feid, self.flags, self.networkid) self.frontendInfo.updateFrontendData() if err: self.state = self.Error self.errorcode = 0 self.scanStatusChanged() def execEnd(self): if self.scan is None: if not self.isDone(): print "*** warning *** scan was not finished!" return self.scan.statusChanged.get().remove(self.scanStatusChanged) self.scan.newService.get().remove(self.newService) self.scan = None if self.run != len(self.scanList) - 1: self.run += 1 self.execBegin() else: self.state = self.Done def isDone(self): return self.state == self.Done or self.state == self.Error def newService(self): newServiceName = self.scan.getLastServiceName() newServiceRef = self.scan.getLastServiceRef() self.servicelist.addItem((newServiceName, newServiceRef)) self.lcd_summary and self.lcd_summary.updateService(newServiceName) def destroy(self): self.state = self.Idle if self.scan is not None: self.scan.statusChanged.get().remove(self.scanStatusChanged) self.scan.newService.get().remove(self.newService) self.scan = None
gpl-2.0
5,148,115,482,864,712,000
37.016736
152
0.619194
false
2.752499
false
false
false
donbixler/xhtml2pdf
xhtml2pdf/parser.py
1
24988
# -*- coding: utf-8 -*- # Copyright 2010 Dirk Holtwick, holtwick.it # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from html5lib import treebuilders, inputstream from xhtml2pdf.default import TAGS, STRING, INT, BOOL, SIZE, COLOR, FILE from xhtml2pdf.default import BOX, POS, MUST, FONT from xhtml2pdf.util import getSize, getBool, toList, getColor, getAlign from xhtml2pdf.util import getBox, getPos, pisaTempFile from reportlab.platypus.doctemplate import NextPageTemplate, FrameBreak from reportlab.platypus.flowables import PageBreak, KeepInFrame from xhtml2pdf.xhtml2pdf_reportlab import PmlRightPageBreak, PmlLeftPageBreak from xhtml2pdf.tags import * # TODO: Kill wild import! from xhtml2pdf.tables import * # TODO: Kill wild import! from xhtml2pdf.util import * # TODO: Kill wild import! from xml.dom import Node import copy import html5lib import logging import re import types import xhtml2pdf.w3c.cssDOMElementInterface as cssDOMElementInterface import xml.dom.minidom CSSAttrCache = {} log = logging.getLogger("xhtml2pdf") rxhttpstrip = re.compile("https?://[^/]+(.*)", re.M | re.I) class AttrContainer(dict): def __getattr__(self, name): try: return dict.__getattr__(self, name) except: return self[name] def pisaGetAttributes(c, tag, attributes): global TAGS attrs = {} if attributes: for k, v in attributes.items(): try: attrs[str(k)] = str(v) # XXX no Unicode! Reportlab fails with template names except: attrs[k] = v nattrs = {} if tag in TAGS: block, adef = TAGS[tag] adef["id"] = STRING # print block, adef for k, v in adef.iteritems(): nattrs[k] = None # print k, v # defaults, wenn vorhanden if type(v) == types.TupleType: if v[1] == MUST: if k not in attrs: log.warn(c.warning("Attribute '%s' must be set!", k)) nattrs[k] = None continue nv = attrs.get(k, v[1]) dfl = v[1] v = v[0] else: nv = attrs.get(k, None) dfl = None if nv is not None: if type(v) == types.ListType: nv = nv.strip().lower() if nv not in v: #~ raise PML_EXCEPTION, "attribute '%s' of wrong value, allowed is one of: %s" % (k, repr(v)) log.warn(c.warning("Attribute '%s' of wrong value, allowed is one of: %s", k, repr(v))) nv = dfl elif v == BOOL: nv = nv.strip().lower() nv = nv in ("1", "y", "yes", "true", str(k)) elif v == SIZE: try: nv = getSize(nv) except: log.warn(c.warning("Attribute '%s' expects a size value", k)) elif v == BOX: nv = getBox(nv, c.pageSize) elif v == POS: nv = getPos(nv, c.pageSize) elif v == INT: nv = int(nv) elif v == COLOR: nv = getColor(nv) elif v == FILE: nv = c.getFile(nv) elif v == FONT: nv = c.getFontName(nv) nattrs[k] = nv return AttrContainer(nattrs) attrNames = ''' color font-family font-size font-weight font-style text-decoration line-height letter-spacing background-color display margin-left margin-right margin-top margin-bottom padding-left padding-right padding-top padding-bottom border-top-color border-top-style border-top-width border-bottom-color border-bottom-style border-bottom-width border-left-color border-left-style border-left-width border-right-color border-right-style border-right-width text-align vertical-align width height zoom page-break-after page-break-before list-style-type list-style-image white-space text-indent -pdf-page-break -pdf-frame-break -pdf-next-page -pdf-keep-with-next -pdf-outline -pdf-outline-level -pdf-outline-open -pdf-line-spacing -pdf-keep-in-frame-mode -pdf-word-wrap '''.strip().split() def getCSSAttr(self, cssCascade, attrName, default=NotImplemented): if attrName in self.cssAttrs: return self.cssAttrs[attrName] try: result = cssCascade.findStyleFor(self.cssElement, attrName, default) except LookupError: result = None # XXX Workaround for inline styles try: style = self.cssStyle except: style = self.cssStyle = cssCascade.parser.parseInline(self.cssElement.getStyleAttr() or '')[0] if attrName in style: result = style[attrName] if result == 'inherit': if hasattr(self.parentNode, 'getCSSAttr'): result = self.parentNode.getCSSAttr(cssCascade, attrName, default) elif default is not NotImplemented: return default raise LookupError("Could not find inherited CSS attribute value for '%s'" % (attrName,)) if result is not None: self.cssAttrs[attrName] = result return result #TODO: Monkeypatching standard lib should go away. xml.dom.minidom.Element.getCSSAttr = getCSSAttr # Create an aliasing system. Many sources use non-standard tags, because browsers allow # them to. This allows us to map a nonstandard name to the standard one. nonStandardAttrNames = { 'bgcolor': 'background-color', } def mapNonStandardAttrs(c, n, attrList): for attr in nonStandardAttrNames: if attr in attrList and nonStandardAttrNames[attr] not in c: c[nonStandardAttrNames[attr]] = attrList[attr] return c def getCSSAttrCacheKey(node): _cl = _id = _st = '' for k, v in node.attributes.items(): if k == 'class': _cl = v elif k == 'id': _id = v elif k == 'style': _st = v return "%s#%s#%s#%s#%s" % (id(node.parentNode), node.tagName.lower(), _cl, _id, _st) def CSSCollect(node, c): #node.cssAttrs = {} #return node.cssAttrs if c.css: _key = getCSSAttrCacheKey(node) if hasattr(node.parentNode, "tagName"): if node.parentNode.tagName.lower() != "html": CachedCSSAttr = CSSAttrCache.get(_key, None) if CachedCSSAttr is not None: node.cssAttrs = CachedCSSAttr return CachedCSSAttr node.cssElement = cssDOMElementInterface.CSSDOMElementInterface(node) node.cssAttrs = {} # node.cssElement.onCSSParserVisit(c.cssCascade.parser) cssAttrMap = {} for cssAttrName in attrNames: try: cssAttrMap[cssAttrName] = node.getCSSAttr(c.cssCascade, cssAttrName) #except LookupError: # pass except Exception: # TODO: Kill this catch-all! log.debug("CSS error '%s'", cssAttrName, exc_info=1) CSSAttrCache[_key] = node.cssAttrs return node.cssAttrs def CSS2Frag(c, kw, isBlock): # COLORS if "color" in c.cssAttr: c.frag.textColor = getColor(c.cssAttr["color"]) if "background-color" in c.cssAttr: c.frag.backColor = getColor(c.cssAttr["background-color"]) # FONT SIZE, STYLE, WEIGHT if "font-family" in c.cssAttr: c.frag.fontName = c.getFontName(c.cssAttr["font-family"]) if "font-size" in c.cssAttr: # XXX inherit c.frag.fontSize = max(getSize("".join(c.cssAttr["font-size"]), c.frag.fontSize, c.baseFontSize), 1.0) if "line-height" in c.cssAttr: leading = "".join(c.cssAttr["line-height"]) c.frag.leading = getSize(leading, c.frag.fontSize) c.frag.leadingSource = leading else: c.frag.leading = getSize(c.frag.leadingSource, c.frag.fontSize) if "letter-spacing" in c.cssAttr: c.frag.letterSpacing = c.cssAttr["letter-spacing"] if "-pdf-line-spacing" in c.cssAttr: c.frag.leadingSpace = getSize("".join(c.cssAttr["-pdf-line-spacing"])) # print "line-spacing", c.cssAttr["-pdf-line-spacing"], c.frag.leading if "font-weight" in c.cssAttr: value = c.cssAttr["font-weight"].lower() if value in ("bold", "bolder", "500", "600", "700", "800", "900"): c.frag.bold = 1 else: c.frag.bold = 0 for value in toList(c.cssAttr.get("text-decoration", "")): if "underline" in value: c.frag.underline = 1 if "line-through" in value: c.frag.strike = 1 if "none" in value: c.frag.underline = 0 c.frag.strike = 0 if "font-style" in c.cssAttr: value = c.cssAttr["font-style"].lower() if value in ("italic", "oblique"): c.frag.italic = 1 else: c.frag.italic = 0 if "white-space" in c.cssAttr: # normal | pre | nowrap c.frag.whiteSpace = str(c.cssAttr["white-space"]).lower() # ALIGN & VALIGN if "text-align" in c.cssAttr: c.frag.alignment = getAlign(c.cssAttr["text-align"]) if "vertical-align" in c.cssAttr: c.frag.vAlign = c.cssAttr["vertical-align"] # HEIGHT & WIDTH if "height" in c.cssAttr: c.frag.height = "".join(toList(c.cssAttr["height"])) # XXX Relative is not correct! if c.frag.height in ("auto",): c.frag.height = None if "width" in c.cssAttr: c.frag.width = "".join(toList(c.cssAttr["width"])) # XXX Relative is not correct! if c.frag.width in ("auto",): c.frag.width = None # ZOOM if "zoom" in c.cssAttr: zoom = "".join(toList(c.cssAttr["zoom"])) # XXX Relative is not correct! if zoom.endswith("%"): zoom = float(zoom[: - 1]) / 100.0 c.frag.zoom = float(zoom) # MARGINS & LIST INDENT, STYLE if isBlock: if "margin-top" in c.cssAttr: c.frag.spaceBefore = getSize(c.cssAttr["margin-top"], c.frag.fontSize) if "margin-bottom" in c.cssAttr: c.frag.spaceAfter = getSize(c.cssAttr["margin-bottom"], c.frag.fontSize) if "margin-left" in c.cssAttr: c.frag.bulletIndent = kw["margin-left"] # For lists kw["margin-left"] += getSize(c.cssAttr["margin-left"], c.frag.fontSize) c.frag.leftIndent = kw["margin-left"] if "margin-right" in c.cssAttr: kw["margin-right"] += getSize(c.cssAttr["margin-right"], c.frag.fontSize) c.frag.rightIndent = kw["margin-right"] if "text-indent" in c.cssAttr: c.frag.firstLineIndent = getSize(c.cssAttr["text-indent"], c.frag.fontSize) if "list-style-type" in c.cssAttr: c.frag.listStyleType = str(c.cssAttr["list-style-type"]).lower() if "list-style-image" in c.cssAttr: c.frag.listStyleImage = c.getFile(c.cssAttr["list-style-image"]) # PADDINGS if isBlock: if "padding-top" in c.cssAttr: c.frag.paddingTop = getSize(c.cssAttr["padding-top"], c.frag.fontSize) if "padding-bottom" in c.cssAttr: c.frag.paddingBottom = getSize(c.cssAttr["padding-bottom"], c.frag.fontSize) if "padding-left" in c.cssAttr: c.frag.paddingLeft = getSize(c.cssAttr["padding-left"], c.frag.fontSize) if "padding-right" in c.cssAttr: c.frag.paddingRight = getSize(c.cssAttr["padding-right"], c.frag.fontSize) # BORDERS if isBlock: if "border-top-width" in c.cssAttr: c.frag.borderTopWidth = getSize(c.cssAttr["border-top-width"], c.frag.fontSize) if "border-bottom-width" in c.cssAttr: c.frag.borderBottomWidth = getSize(c.cssAttr["border-bottom-width"], c.frag.fontSize) if "border-left-width" in c.cssAttr: c.frag.borderLeftWidth = getSize(c.cssAttr["border-left-width"], c.frag.fontSize) if "border-right-width" in c.cssAttr: c.frag.borderRightWidth = getSize(c.cssAttr["border-right-width"], c.frag.fontSize) if "border-top-style" in c.cssAttr: c.frag.borderTopStyle = c.cssAttr["border-top-style"] if "border-bottom-style" in c.cssAttr: c.frag.borderBottomStyle = c.cssAttr["border-bottom-style"] if "border-left-style" in c.cssAttr: c.frag.borderLeftStyle = c.cssAttr["border-left-style"] if "border-right-style" in c.cssAttr: c.frag.borderRightStyle = c.cssAttr["border-right-style"] if "border-top-color" in c.cssAttr: c.frag.borderTopColor = getColor(c.cssAttr["border-top-color"]) if "border-bottom-color" in c.cssAttr: c.frag.borderBottomColor = getColor(c.cssAttr["border-bottom-color"]) if "border-left-color" in c.cssAttr: c.frag.borderLeftColor = getColor(c.cssAttr["border-left-color"]) if "border-right-color" in c.cssAttr: c.frag.borderRightColor = getColor(c.cssAttr["border-right-color"]) def pisaPreLoop(node, context, collect=False): """ Collect all CSS definitions """ data = u"" if node.nodeType == Node.TEXT_NODE and collect: data = node.data elif node.nodeType == Node.ELEMENT_NODE: name = node.tagName.lower() if name in ("style", "link"): attr = pisaGetAttributes(context, name, node.attributes) media = [x.strip() for x in attr.media.lower().split(",") if x.strip()] if attr.get("type", "").lower() in ("", "text/css") and \ (not media or "all" in media or "print" in media or "pdf" in media): if name == "style": for node in node.childNodes: data += pisaPreLoop(node, context, collect=True) context.addCSS(data) return u"" if name == "link" and attr.href and attr.rel.lower() == "stylesheet": # print "CSS LINK", attr context.addCSS('\n@import "%s" %s;' % (attr.href, ",".join(media))) for node in node.childNodes: result = pisaPreLoop(node, context, collect=collect) if collect: data += result return data def pisaLoop(node, context, path=None, **kw): if path is None: path = [] # Initialize KW if not kw: kw = { "margin-top": 0, "margin-bottom": 0, "margin-left": 0, "margin-right": 0, } else: kw = copy.copy(kw) #indent = len(path) * " " # only used for debug print statements # TEXT if node.nodeType == Node.TEXT_NODE: # print indent, "#", repr(node.data) #, context.frag context.addFrag(node.data) # context.text.append(node.value) # ELEMENT elif node.nodeType == Node.ELEMENT_NODE: node.tagName = node.tagName.replace(":", "").lower() if node.tagName in ("style", "script"): return path = copy.copy(path) + [node.tagName] # Prepare attributes attr = pisaGetAttributes(context, node.tagName, node.attributes) #log.debug(indent + "<%s %s>" % (node.tagName, attr) + repr(node.attributes.items())) #, path # Calculate styles context.cssAttr = CSSCollect(node, context) context.cssAttr = mapNonStandardAttrs(context.cssAttr, node, attr) context.node = node # Block? PAGE_BREAK = 1 PAGE_BREAK_RIGHT = 2 PAGE_BREAK_LEFT = 3 pageBreakAfter = False frameBreakAfter = False display = context.cssAttr.get("display", "inline").lower() # print indent, node.tagName, display, context.cssAttr.get("background-color", None), attr isBlock = (display == "block") if isBlock: context.addPara() # Page break by CSS if "-pdf-next-page" in context.cssAttr: context.addStory(NextPageTemplate(str(context.cssAttr["-pdf-next-page"]))) if "-pdf-page-break" in context.cssAttr: if str(context.cssAttr["-pdf-page-break"]).lower() == "before": context.addStory(PageBreak()) if "-pdf-frame-break" in context.cssAttr: if str(context.cssAttr["-pdf-frame-break"]).lower() == "before": context.addStory(FrameBreak()) if str(context.cssAttr["-pdf-frame-break"]).lower() == "after": frameBreakAfter = True if "page-break-before" in context.cssAttr: if str(context.cssAttr["page-break-before"]).lower() == "always": context.addStory(PageBreak()) if str(context.cssAttr["page-break-before"]).lower() == "right": context.addStory(PageBreak()) context.addStory(PmlRightPageBreak()) if str(context.cssAttr["page-break-before"]).lower() == "left": context.addStory(PageBreak()) context.addStory(PmlLeftPageBreak()) if "page-break-after" in context.cssAttr: if str(context.cssAttr["page-break-after"]).lower() == "always": pageBreakAfter = PAGE_BREAK if str(context.cssAttr["page-break-after"]).lower() == "right": pageBreakAfter = PAGE_BREAK_RIGHT if str(context.cssAttr["page-break-after"]).lower() == "left": pageBreakAfter = PAGE_BREAK_LEFT if display == "none": # print "none!" return # Translate CSS to frags # Save previous frag styles context.pushFrag() # Map styles to Reportlab fragment properties CSS2Frag(context, kw, isBlock) # EXTRAS if "-pdf-keep-with-next" in context.cssAttr: context.frag.keepWithNext = getBool(context.cssAttr["-pdf-keep-with-next"]) if "-pdf-outline" in context.cssAttr: context.frag.outline = getBool(context.cssAttr["-pdf-outline"]) if "-pdf-outline-level" in context.cssAttr: context.frag.outlineLevel = int(context.cssAttr["-pdf-outline-level"]) if "-pdf-outline-open" in context.cssAttr: context.frag.outlineOpen = getBool(context.cssAttr["-pdf-outline-open"]) if "-pdf-word-wrap" in context.cssAttr: context.frag.wordWrap = context.cssAttr["-pdf-word-wrap"] # handle keep-in-frame keepInFrameMode = None keepInFrameMaxWidth = 0 keepInFrameMaxHeight = 0 if "-pdf-keep-in-frame-mode" in context.cssAttr: value = str(context.cssAttr["-pdf-keep-in-frame-mode"]).strip().lower() if value in ("shrink", "error", "overflow", "truncate"): keepInFrameMode = value if "-pdf-keep-in-frame-max-width" in context.cssAttr: keepInFrameMaxWidth = getSize("".join(context.cssAttr["-pdf-keep-in-frame-max-width"])) if "-pdf-keep-in-frame-max-height" in context.cssAttr: keepInFrameMaxHeight = getSize("".join(context.cssAttr["-pdf-keep-in-frame-max-height"])) # ignore nested keep-in-frames, tables have their own KIF handling keepInFrame = keepInFrameMode is not None and context.keepInFrameIndex is None if keepInFrame: # keep track of current story index, so we can wrap everythink # added after this point in a KeepInFrame context.keepInFrameIndex = len(context.story) # BEGIN tag klass = globals().get("pisaTag%s" % node.tagName.replace(":", "").upper(), None) obj = None # Static block elementId = attr.get("id", None) staticFrame = context.frameStatic.get(elementId, None) if staticFrame: context.frag.insideStaticFrame += 1 oldStory = context.swapStory() # Tag specific operations if klass is not None: obj = klass(node, attr) obj.start(context) # Visit child nodes context.fragBlock = fragBlock = copy.copy(context.frag) for nnode in node.childNodes: pisaLoop(nnode, context, path, **kw) context.fragBlock = fragBlock # END tag if obj: obj.end(context) # Block? if isBlock: context.addPara() # XXX Buggy! # Page break by CSS if pageBreakAfter: context.addStory(PageBreak()) if pageBreakAfter == PAGE_BREAK_RIGHT: context.addStory(PmlRightPageBreak()) if pageBreakAfter == PAGE_BREAK_LEFT: context.addStory(PmlLeftPageBreak()) if frameBreakAfter: context.addStory(FrameBreak()) if keepInFrame: # get all content added after start of -pdf-keep-in-frame and wrap # it in a KeepInFrame substory = context.story[context.keepInFrameIndex:] context.story = context.story[:context.keepInFrameIndex] context.story.append( KeepInFrame( content=substory, maxWidth=keepInFrameMaxWidth, maxHeight=keepInFrameMaxHeight)) context.keepInFrameIndex = None # Static block, END if staticFrame: context.addPara() for frame in staticFrame: frame.pisaStaticStory = context.story context.swapStory(oldStory) context.frag.insideStaticFrame -= 1 # context.debug(1, indent, "</%s>" % (node.tagName)) # Reset frag style context.pullFrag() # Unknown or not handled else: # context.debug(1, indent, "???", node, node.nodeType, repr(node)) # Loop over children for node in node.childNodes: pisaLoop(node, context, path, **kw) def pisaParser(src, context, default_css="", xhtml=False, encoding=None, xml_output=None): """ - Parse HTML and get miniDOM - Extract CSS informations, add default CSS, parse CSS - Handle the document DOM itself and build reportlab story - Return Context object """ global CSSAttrCache CSSAttrCache = {} if xhtml: #TODO: XHTMLParser doesn't see to exist... parser = html5lib.XHTMLParser(tree=treebuilders.getTreeBuilder("dom")) else: parser = html5lib.HTMLParser(tree=treebuilders.getTreeBuilder("dom")) if type(src) in types.StringTypes: if type(src) is types.UnicodeType: # If an encoding was provided, do not change it. if not encoding: encoding = "utf-8" src = src.encode(encoding) src = pisaTempFile(src, capacity=context.capacity) # Test for the restrictions of html5lib if encoding: # Workaround for html5lib<0.11.1 if hasattr(inputstream, "isValidEncoding"): if encoding.strip().lower() == "utf8": encoding = "utf-8" if not inputstream.isValidEncoding(encoding): log.error("%r is not a valid encoding e.g. 'utf8' is not valid but 'utf-8' is!", encoding) else: if inputstream.codecName(encoding) is None: log.error("%r is not a valid encoding", encoding) document = parser.parse( src, encoding=encoding) if xml_output: if encoding: xml_output.write(document.toprettyxml(encoding=encoding)) else: xml_output.write(document.toprettyxml(encoding="utf8")) if default_css: context.addCSS(default_css) pisaPreLoop(document, context) #try: context.parseCSS() #except: # context.cssText = DEFAULT_CSS # context.parseCSS() # context.debug(9, pprint.pformat(context.css)) pisaLoop(document, context) return context # Shortcuts HTML2PDF = pisaParser def XHTML2PDF(*a, **kw): kw["xhtml"] = True return HTML2PDF(*a, **kw) XML2PDF = XHTML2PDF
apache-2.0
-1,639,543,538,045,728,800
34.494318
117
0.581279
false
3.732338
false
false
false
Dioptas/Dioptas
dioptas/model/util/BackgroundExtraction.py
1
2873
# -*- coding: utf-8 -*- # Dioptas - GUI program for fast processing of 2D X-ray diffraction data # Principal author: Clemens Prescher ([email protected]) # Copyright (C) 2014-2019 GSECARS, University of Chicago, USA # Copyright (C) 2015-2018 Institute for Geology and Mineralogy, University of Cologne, Germany # Copyright (C) 2019-2020 DESY, Hamburg, Germany # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import logging logger = logging.getLogger(__name__) import numpy as np try: from .smooth_bruckner import smooth_bruckner except ImportError: try: from .smooth_bruckner_cython import smooth_bruckner except ImportError: try: import pyximport pyximport.install(language_level=3) from .smooth_bruckner_cython import smooth_bruckner except ImportError as e: print(e) logger.warning( "Could not import the Fortran or Cython version of smooth_bruckner. Using python implementation instead. Please" " run 'f2py -c -m smooth_bruckner smooth_bruckner.f95' in the model/util folder for faster" " implementation") from .smooth_bruckner_python import smooth_bruckner def extract_background(x, y, smooth_width=0.1, iterations=50, cheb_order=50): """ Performs a background subtraction using bruckner smoothing and a chebyshev polynomial. Standard parameters are found to be optimal for synchrotron XRD. :param x: x-data of pattern :param y: y-data of pattern :param smooth_width: width of the window in x-units used for bruckner smoothing :param iterations: number of iterations for the bruckner smoothing :param cheb_order: order of the fitted chebyshev polynomial :return: vector of extracted y background """ smooth_points = int((float(smooth_width) / (x[1] - x[0]))) y_smooth = smooth_bruckner(y, smooth_points, iterations) # get cheb input parameters x_cheb = 2. * (x - x[0]) / (x[-1] - x[0]) - 1. cheb_parameters = np.polynomial.chebyshev.chebfit(x_cheb, y_smooth, cheb_order) return np.polynomial.chebyshev.chebval(x_cheb, cheb_parameters)
gpl-3.0
-2,083,473,547,483,108,400
42.530303
128
0.679777
false
3.856376
false
false
false
quru/wagtail
wagtail/wagtailimages/models.py
1
17958
from __future__ import absolute_import, unicode_literals import hashlib import os.path from collections import OrderedDict from contextlib import contextmanager import django from django.conf import settings from django.core.exceptions import ImproperlyConfigured from django.core.files import File from django.core.urlresolvers import reverse from django.db import models from django.db.models.signals import pre_delete, pre_save from django.dispatch.dispatcher import receiver from django.forms.widgets import flatatt from django.utils.encoding import python_2_unicode_compatible from django.utils.functional import cached_property from django.utils.safestring import mark_safe from django.utils.six import BytesIO, string_types, text_type from django.utils.translation import ugettext_lazy as _ from taggit.managers import TaggableManager from unidecode import unidecode from willow.image import Image as WillowImage from wagtail.wagtailadmin.taggable import TagSearchable from wagtail.wagtailadmin.utils import get_object_usage from wagtail.wagtailcore import hooks from wagtail.wagtailcore.models import CollectionMember from wagtail.wagtailimages.exceptions import InvalidFilterSpecError from wagtail.wagtailimages.rect import Rect from wagtail.wagtailsearch import index from wagtail.wagtailsearch.queryset import SearchableQuerySetMixin class SourceImageIOError(IOError): """ Custom exception to distinguish IOErrors that were thrown while opening the source image """ pass class ImageQuerySet(SearchableQuerySetMixin, models.QuerySet): pass def get_upload_to(instance, filename): """ Obtain a valid upload path for an image file. This needs to be a module-level function so that it can be referenced within migrations, but simply delegates to the `get_upload_to` method of the instance, so that AbstractImage subclasses can override it. """ return instance.get_upload_to(filename) def get_rendition_upload_to(instance, filename): """ Obtain a valid upload path for an image rendition file. This needs to be a module-level function so that it can be referenced within migrations, but simply delegates to the `get_upload_to` method of the instance, so that AbstractRendition subclasses can override it. """ return instance.get_upload_to(filename) @python_2_unicode_compatible class AbstractImage(CollectionMember, TagSearchable): title = models.CharField(max_length=255, verbose_name=_('title')) file = models.ImageField( verbose_name=_('file'), upload_to=get_upload_to, width_field='width', height_field='height' ) width = models.IntegerField(verbose_name=_('width'), editable=False) height = models.IntegerField(verbose_name=_('height'), editable=False) created_at = models.DateTimeField(verbose_name=_('created at'), auto_now_add=True, db_index=True) uploaded_by_user = models.ForeignKey( settings.AUTH_USER_MODEL, verbose_name=_('uploaded by user'), null=True, blank=True, editable=False, on_delete=models.SET_NULL ) tags = TaggableManager(help_text=None, blank=True, verbose_name=_('tags')) focal_point_x = models.PositiveIntegerField(null=True, blank=True) focal_point_y = models.PositiveIntegerField(null=True, blank=True) focal_point_width = models.PositiveIntegerField(null=True, blank=True) focal_point_height = models.PositiveIntegerField(null=True, blank=True) file_size = models.PositiveIntegerField(null=True, editable=False) objects = ImageQuerySet.as_manager() def is_stored_locally(self): """ Returns True if the image is hosted on the local filesystem """ try: self.file.path return True except NotImplementedError: return False def get_file_size(self): if self.file_size is None: try: self.file_size = self.file.size except OSError: # File doesn't exist return self.save(update_fields=['file_size']) return self.file_size def get_upload_to(self, filename): folder_name = 'original_images' filename = self.file.field.storage.get_valid_name(filename) # do a unidecode in the filename and then # replace non-ascii characters in filename with _ , to sidestep issues with filesystem encoding filename = "".join((i if ord(i) < 128 else '_') for i in unidecode(filename)) # Truncate filename so it fits in the 100 character limit # https://code.djangoproject.com/ticket/9893 while len(os.path.join(folder_name, filename)) >= 95: prefix, dot, extension = filename.rpartition('.') filename = prefix[:-1] + dot + extension return os.path.join(folder_name, filename) def get_usage(self): return get_object_usage(self) @property def usage_url(self): return reverse('wagtailimages:image_usage', args=(self.id,)) search_fields = TagSearchable.search_fields + CollectionMember.search_fields + [ index.FilterField('uploaded_by_user'), ] def __str__(self): return self.title @contextmanager def get_willow_image(self): # Open file if it is closed close_file = False try: image_file = self.file if self.file.closed: # Reopen the file if self.is_stored_locally(): self.file.open('rb') else: # Some external storage backends don't allow reopening # the file. Get a fresh file instance. #1397 storage = self._meta.get_field('file').storage image_file = storage.open(self.file.name, 'rb') close_file = True except IOError as e: # re-throw this as a SourceImageIOError so that calling code can distinguish # these from IOErrors elsewhere in the process raise SourceImageIOError(text_type(e)) # Seek to beginning image_file.seek(0) try: yield WillowImage.open(image_file) finally: if close_file: image_file.close() def get_rect(self): return Rect(0, 0, self.width, self.height) def get_focal_point(self): if self.focal_point_x is not None and \ self.focal_point_y is not None and \ self.focal_point_width is not None and \ self.focal_point_height is not None: return Rect.from_point( self.focal_point_x, self.focal_point_y, self.focal_point_width, self.focal_point_height, ) def has_focal_point(self): return self.get_focal_point() is not None def set_focal_point(self, rect): if rect is not None: self.focal_point_x = rect.centroid_x self.focal_point_y = rect.centroid_y self.focal_point_width = rect.width self.focal_point_height = rect.height else: self.focal_point_x = None self.focal_point_y = None self.focal_point_width = None self.focal_point_height = None def get_suggested_focal_point(self): with self.get_willow_image() as willow: faces = willow.detect_faces() if faces: # Create a bounding box around all faces left = min(face[0] for face in faces) top = min(face[1] for face in faces) right = max(face[2] for face in faces) bottom = max(face[3] for face in faces) focal_point = Rect(left, top, right, bottom) else: features = willow.detect_features() if features: # Create a bounding box around all features left = min(feature[0] for feature in features) top = min(feature[1] for feature in features) right = max(feature[0] for feature in features) bottom = max(feature[1] for feature in features) focal_point = Rect(left, top, right, bottom) else: return None # Add 20% to width and height and give it a minimum size x, y = focal_point.centroid width, height = focal_point.size width *= 1.20 height *= 1.20 width = max(width, 100) height = max(height, 100) return Rect.from_point(x, y, width, height) @classmethod def get_rendition_model(cls): """ Get the Rendition model for this Image model """ if django.VERSION >= (1, 9): return cls.renditions.rel.related_model else: return cls.renditions.related.related_model def get_rendition(self, filter): if isinstance(filter, string_types): filter, created = Filter.objects.get_or_create(spec=filter) cache_key = filter.get_cache_key(self) Rendition = self.get_rendition_model() try: rendition = self.renditions.get( filter=filter, focal_point_key=cache_key, ) except Rendition.DoesNotExist: # Generate the rendition image generated_image = filter.run(self, BytesIO()) # Generate filename input_filename = os.path.basename(self.file.name) input_filename_without_extension, input_extension = os.path.splitext(input_filename) # A mapping of image formats to extensions FORMAT_EXTENSIONS = { 'jpeg': '.jpg', 'png': '.png', 'gif': '.gif', } output_extension = filter.spec.replace('|', '.') + FORMAT_EXTENSIONS[generated_image.format_name] if cache_key: output_extension = cache_key + '.' + output_extension # Truncate filename to prevent it going over 60 chars output_filename_without_extension = input_filename_without_extension[:(59 - len(output_extension))] output_filename = output_filename_without_extension + '.' + output_extension rendition, created = self.renditions.get_or_create( filter=filter, focal_point_key=cache_key, defaults={'file': File(generated_image.f, name=output_filename)} ) return rendition def is_portrait(self): return (self.width < self.height) def is_landscape(self): return (self.height < self.width) @property def filename(self): return os.path.basename(self.file.name) @property def default_alt_text(self): # by default the alt text field (used in rich text insertion) is populated # from the title. Subclasses might provide a separate alt field, and # override this return self.title def is_editable_by_user(self, user): from wagtail.wagtailimages.permissions import permission_policy return permission_policy.user_has_permission_for_instance(user, 'change', self) class Meta: abstract = True class Image(AbstractImage): admin_form_fields = ( 'title', 'file', 'collection', 'tags', 'focal_point_x', 'focal_point_y', 'focal_point_width', 'focal_point_height', ) # Do smartcropping calculations when user saves an image without a focal point @receiver(pre_save, sender=Image) def image_feature_detection(sender, instance, **kwargs): if getattr(settings, 'WAGTAILIMAGES_FEATURE_DETECTION_ENABLED', False): # Make sure the image doesn't already have a focal point if not instance.has_focal_point(): # Set the focal point instance.set_focal_point(instance.get_suggested_focal_point()) # Receive the pre_delete signal and delete the file associated with the model instance. @receiver(pre_delete, sender=Image) def image_delete(sender, instance, **kwargs): # Pass false so FileField doesn't save the model. instance.file.delete(False) def get_image_model(): from django.conf import settings from django.apps import apps try: app_label, model_name = settings.WAGTAILIMAGES_IMAGE_MODEL.split('.') except AttributeError: return Image except ValueError: raise ImproperlyConfigured("WAGTAILIMAGES_IMAGE_MODEL must be of the form 'app_label.model_name'") image_model = apps.get_model(app_label, model_name) if image_model is None: raise ImproperlyConfigured( "WAGTAILIMAGES_IMAGE_MODEL refers to model '%s' that has not been installed" % settings.WAGTAILIMAGES_IMAGE_MODEL ) return image_model class Filter(models.Model): """ Represents one or more operations that can be applied to an Image to produce a rendition appropriate for final display on the website. Usually this would be a resize operation, but could potentially involve colour processing, etc. """ # The spec pattern is operation1-var1-var2|operation2-var1 spec = models.CharField(max_length=255, unique=True) @cached_property def operations(self): # Search for operations self._search_for_operations() # Build list of operation objects operations = [] for op_spec in self.spec.split('|'): op_spec_parts = op_spec.split('-') if op_spec_parts[0] not in self._registered_operations: raise InvalidFilterSpecError("Unrecognised operation: %s" % op_spec_parts[0]) op_class = self._registered_operations[op_spec_parts[0]] operations.append(op_class(*op_spec_parts)) return operations def run(self, image, output): with image.get_willow_image() as willow: original_format = willow.format_name # Fix orientation of image willow = willow.auto_orient() for operation in self.operations: willow = operation.run(willow, image) or willow if original_format == 'jpeg': # Allow changing of JPEG compression quality if hasattr(settings, 'WAGTAILIMAGES_JPEG_QUALITY'): quality = settings.WAGTAILIMAGES_JPEG_QUALITY else: quality = 85 return willow.save_as_jpeg(output, quality=quality) elif original_format == 'gif': # Convert image to PNG if it's not animated if not willow.has_animation(): return willow.save_as_png(output) else: return willow.save_as_gif(output) elif original_format == 'bmp': # Convert to PNG return willow.save_as_png(output) else: return willow.save(original_format, output) def get_cache_key(self, image): vary_parts = [] for operation in self.operations: for field in getattr(operation, 'vary_fields', []): value = getattr(image, field, '') vary_parts.append(str(value)) vary_string = '-'.join(vary_parts) # Return blank string if there are no vary fields if not vary_string: return '' return hashlib.sha1(vary_string.encode('utf-8')).hexdigest()[:8] _registered_operations = None @classmethod def _search_for_operations(cls): if cls._registered_operations is not None: return operations = [] for fn in hooks.get_hooks('register_image_operations'): operations.extend(fn()) cls._registered_operations = dict(operations) class AbstractRendition(models.Model): filter = models.ForeignKey(Filter, related_name='+') file = models.ImageField(upload_to=get_rendition_upload_to, width_field='width', height_field='height') width = models.IntegerField(editable=False) height = models.IntegerField(editable=False) focal_point_key = models.CharField(max_length=255, blank=True, default='', editable=False) @property def url(self): return self.file.url @property def alt(self): return self.image.title @property def attrs(self): """ The src, width, height, and alt attributes for an <img> tag, as a HTML string """ return flatatt(self.attrs_dict) @property def attrs_dict(self): """ A dict of the src, width, height, and alt attributes for an <img> tag. """ return OrderedDict([ ('src', self.url), ('width', self.width), ('height', self.height), ('alt', self.alt), ]) def img_tag(self, extra_attributes={}): attrs = self.attrs_dict.copy() attrs.update(extra_attributes) return mark_safe('<img{}>'.format(flatatt(attrs))) def __html__(self): return self.img_tag() def get_upload_to(self, filename): folder_name = 'images' filename = self.file.field.storage.get_valid_name(filename) return os.path.join(folder_name, filename) class Meta: abstract = True class Rendition(AbstractRendition): image = models.ForeignKey(Image, related_name='renditions') class Meta: unique_together = ( ('image', 'filter', 'focal_point_key'), ) # Receive the pre_delete signal and delete the file associated with the model instance. @receiver(pre_delete, sender=Rendition) def rendition_delete(sender, instance, **kwargs): # Pass false so FileField doesn't save the model. instance.file.delete(False)
bsd-3-clause
-9,143,806,606,278,823,000
33.402299
111
0.622452
false
4.159833
false
false
false
hehongliang/tensorflow
tensorflow/python/keras/optimizer_v2/ftrl_test.py
1
17276
# Copyright 2015 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Functional tests for Ftrl operations.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import numpy as np from tensorflow.python.framework import constant_op from tensorflow.python.framework import dtypes from tensorflow.python.framework import ops from tensorflow.python.keras.optimizer_v2 import ftrl from tensorflow.python.ops import embedding_ops from tensorflow.python.ops import math_ops from tensorflow.python.ops import resource_variable_ops from tensorflow.python.ops import variables from tensorflow.python.platform import test from tensorflow.python.training import adagrad from tensorflow.python.training import gradient_descent class FtrlOptimizerTest(test.TestCase): def doTestFtrlwithoutRegularization(self, use_resource=False): for dtype in [dtypes.half, dtypes.float32]: with self.cached_session() as sess: if use_resource: var0 = resource_variable_ops.ResourceVariable([0.0, 0.0], dtype=dtype) var1 = resource_variable_ops.ResourceVariable([0.0, 0.0], dtype=dtype) else: var0 = variables.Variable([0.0, 0.0], dtype=dtype) var1 = variables.Variable([0.0, 0.0], dtype=dtype) grads0 = constant_op.constant([0.1, 0.2], dtype=dtype) grads1 = constant_op.constant([0.01, 0.02], dtype=dtype) opt = ftrl.Ftrl( 3.0, initial_accumulator_value=0.1, l1_regularization_strength=0.0, l2_regularization_strength=0.0) update = opt.apply_gradients(zip([grads0, grads1], [var0, var1])) variables.global_variables_initializer().run() v0_val, v1_val = sess.run([var0, var1]) self.assertAllClose([0.0, 0.0], v0_val) self.assertAllClose([0.0, 0.0], v1_val) # Run 3 steps FTRL for _ in range(3): update.run() v0_val, v1_val = sess.run([var0, var1]) self.assertAllCloseAccordingToType( np.array([-2.60260963, -4.29698515]), v0_val) self.assertAllCloseAccordingToType( np.array([-0.28432083, -0.56694895]), v1_val) def testFtrlWithoutRegularization(self): self.doTestFtrlwithoutRegularization(use_resource=False) def testResourceFtrlWithoutRegularization(self): self.doTestFtrlwithoutRegularization(use_resource=True) def testFtrlwithoutRegularization2(self): for dtype in [dtypes.half, dtypes.float32]: with self.cached_session() as sess: var0 = variables.Variable([1.0, 2.0], dtype=dtype) var1 = variables.Variable([4.0, 3.0], dtype=dtype) grads0 = constant_op.constant([0.1, 0.2], dtype=dtype) grads1 = constant_op.constant([0.01, 0.02], dtype=dtype) opt = ftrl.Ftrl( 3.0, initial_accumulator_value=0.1, l1_regularization_strength=0.0, l2_regularization_strength=0.0) update = opt.apply_gradients(zip([grads0, grads1], [var0, var1])) variables.global_variables_initializer().run() v0_val, v1_val = sess.run([var0, var1]) self.assertAllCloseAccordingToType([1.0, 2.0], v0_val) self.assertAllCloseAccordingToType([4.0, 3.0], v1_val) # Run 3 steps FTRL for _ in range(3): update.run() v0_val, v1_val = sess.run([var0, var1]) self.assertAllCloseAccordingToType( np.array([-2.55607247, -3.98729396]), v0_val) self.assertAllCloseAccordingToType( np.array([-0.28232238, -0.56096673]), v1_val) def testMinimizeSparseResourceVariable(self): for dtype in [dtypes.half, dtypes.float32, dtypes.float64]: with self.cached_session(): var0 = resource_variable_ops.ResourceVariable([[1.0, 2.0]], dtype=dtype) x = constant_op.constant([[4.0], [5.0]], dtype=dtype) pred = math_ops.matmul(embedding_ops.embedding_lookup([var0], [0]), x) loss = pred * pred sgd_op = ftrl.Ftrl(1.0).minimize(loss, var_list=[var0]) variables.global_variables_initializer().run() # Fetch params to validate initial values self.assertAllCloseAccordingToType([[1.0, 2.0]], self.evaluate(var0)) # Run 1 step of sgd sgd_op.run() # Validate updated params self.assertAllCloseAccordingToType([[0, 1]], self.evaluate(var0), atol=0.01) def testFtrlWithL1(self): for dtype in [dtypes.half, dtypes.float32]: with self.cached_session() as sess: var0 = variables.Variable([1.0, 2.0], dtype=dtype) var1 = variables.Variable([4.0, 3.0], dtype=dtype) grads0 = constant_op.constant([0.1, 0.2], dtype=dtype) grads1 = constant_op.constant([0.01, 0.02], dtype=dtype) opt = ftrl.Ftrl( 3.0, initial_accumulator_value=0.1, l1_regularization_strength=0.001, l2_regularization_strength=0.0) update = opt.apply_gradients(zip([grads0, grads1], [var0, var1])) variables.global_variables_initializer().run() v0_val, v1_val = sess.run([var0, var1]) self.assertAllCloseAccordingToType([1.0, 2.0], v0_val) self.assertAllCloseAccordingToType([4.0, 3.0], v1_val) # Run 10 steps FTRL for _ in range(10): update.run() v0_val, v1_val = sess.run([var0, var1]) self.assertAllCloseAccordingToType( np.array([-7.66718769, -10.91273689]), v0_val) self.assertAllCloseAccordingToType( np.array([-0.93460727, -1.86147261]), v1_val) def testFtrlWithL1_L2(self): for dtype in [dtypes.half, dtypes.float32]: with self.cached_session() as sess: var0 = variables.Variable([1.0, 2.0], dtype=dtype) var1 = variables.Variable([4.0, 3.0], dtype=dtype) grads0 = constant_op.constant([0.1, 0.2], dtype=dtype) grads1 = constant_op.constant([0.01, 0.02], dtype=dtype) opt = ftrl.Ftrl( 3.0, initial_accumulator_value=0.1, l1_regularization_strength=0.001, l2_regularization_strength=2.0) update = opt.apply_gradients(zip([grads0, grads1], [var0, var1])) variables.global_variables_initializer().run() v0_val, v1_val = sess.run([var0, var1]) self.assertAllCloseAccordingToType([1.0, 2.0], v0_val) self.assertAllCloseAccordingToType([4.0, 3.0], v1_val) # Run 10 steps FTRL for _ in range(10): update.run() v0_val, v1_val = sess.run([var0, var1]) self.assertAllCloseAccordingToType( np.array([-0.24059935, -0.46829352]), v0_val) self.assertAllCloseAccordingToType( np.array([-0.02406147, -0.04830509]), v1_val) def testFtrlWithL1_L2_L2Shrinkage(self): """Test the new FTRL op with support for l2 shrinkage. The addition of this parameter which places a constant pressure on weights towards the origin causes the gradient descent trajectory to differ. The weights will tend to have smaller magnitudes with this parameter set. """ for dtype in [dtypes.half, dtypes.float32]: with self.cached_session() as sess: var0 = variables.Variable([1.0, 2.0], dtype=dtype) var1 = variables.Variable([4.0, 3.0], dtype=dtype) grads0 = constant_op.constant([0.1, 0.2], dtype=dtype) grads1 = constant_op.constant([0.01, 0.02], dtype=dtype) opt = ftrl.Ftrl( 3.0, initial_accumulator_value=0.1, l1_regularization_strength=0.001, l2_regularization_strength=2.0, l2_shrinkage_regularization_strength=0.1) update = opt.apply_gradients(zip([grads0, grads1], [var0, var1])) variables.global_variables_initializer().run() v0_val, v1_val = sess.run([var0, var1]) self.assertAllCloseAccordingToType([1.0, 2.0], v0_val) self.assertAllCloseAccordingToType([4.0, 3.0], v1_val) # Run 10 steps FTRL for _ in range(10): update.run() v0_val, v1_val = sess.run([var0, var1]) self.assertAllCloseAccordingToType( np.array([-0.22578995, -0.44345796]), v0_val) self.assertAllCloseAccordingToType( np.array([-0.14378493, -0.13229476]), v1_val) def testFtrlWithL1_L2_L2ShrinkageSparse(self): """Tests the new FTRL op with support for l2 shrinkage on sparse grads.""" for dtype in [dtypes.half, dtypes.float32]: with self.cached_session() as sess: var0 = variables.Variable([[1.0], [2.0]], dtype=dtype) var1 = variables.Variable([[4.0], [3.0]], dtype=dtype) grads0 = ops.IndexedSlices( constant_op.constant([0.1], shape=[1, 1], dtype=dtype), constant_op.constant([0]), constant_op.constant([2, 1])) grads1 = ops.IndexedSlices( constant_op.constant([0.02], shape=[1, 1], dtype=dtype), constant_op.constant([1]), constant_op.constant([2, 1])) opt = ftrl.Ftrl( 3.0, initial_accumulator_value=0.1, l1_regularization_strength=0.001, l2_regularization_strength=2.0, l2_shrinkage_regularization_strength=0.1) update = opt.apply_gradients(zip([grads0, grads1], [var0, var1])) variables.global_variables_initializer().run() v0_val, v1_val = sess.run([var0, var1]) self.assertAllCloseAccordingToType([[1.0], [2.0]], v0_val) self.assertAllCloseAccordingToType([[4.0], [3.0]], v1_val) # Run 10 steps FTRL for _ in range(10): update.run() v0_val, v1_val = sess.run([var0, var1]) self.assertAllCloseAccordingToType([[-0.22578995], [2.]], v0_val) self.assertAllCloseAccordingToType([[4.], [-0.13229476]], v1_val) def testFtrlWithL2ShrinkageDoesNotChangeLrSchedule(self): """Verifies that l2 shrinkage in FTRL does not change lr schedule.""" for dtype in [dtypes.half, dtypes.float32]: with self.cached_session() as sess: var0 = variables.Variable([1.0, 2.0], dtype=dtype) var1 = variables.Variable([1.0, 2.0], dtype=dtype) grads0 = constant_op.constant([0.1, 0.2], dtype=dtype) grads1 = constant_op.constant([0.1, 0.2], dtype=dtype) opt0 = ftrl.Ftrl( 3.0, initial_accumulator_value=0.1, l1_regularization_strength=0.001, l2_regularization_strength=2.0, l2_shrinkage_regularization_strength=0.1) opt1 = ftrl.Ftrl( 3.0, initial_accumulator_value=0.1, l1_regularization_strength=0.001, l2_regularization_strength=2.0) update0 = opt0.apply_gradients([(grads0, var0)]) update1 = opt1.apply_gradients([(grads1, var1)]) variables.global_variables_initializer().run() v0_val, v1_val = sess.run([var0, var1]) self.assertAllCloseAccordingToType([1.0, 2.0], v0_val) self.assertAllCloseAccordingToType([1.0, 2.0], v1_val) # Run 10 steps FTRL for _ in range(10): update0.run() update1.run() v0_val, v1_val = sess.run([var0, var1]) # var0 is experiencing L2 shrinkage so it should be smaller than var1 # in magnitude. self.assertTrue((v0_val**2 < v1_val**2).all()) accum0 = sess.run(opt0.get_slot(var0, "accumulator")) accum1 = sess.run(opt1.get_slot(var1, "accumulator")) # L2 shrinkage should not change how we update grad accumulator. self.assertAllCloseAccordingToType(accum0, accum1) def applyOptimizer(self, opt, dtype, steps=5, is_sparse=False): if is_sparse: var0 = variables.Variable([[0.0], [0.0]], dtype=dtype) var1 = variables.Variable([[0.0], [0.0]], dtype=dtype) grads0 = ops.IndexedSlices( constant_op.constant([0.1], shape=[1, 1], dtype=dtype), constant_op.constant([0]), constant_op.constant([2, 1])) grads1 = ops.IndexedSlices( constant_op.constant([0.02], shape=[1, 1], dtype=dtype), constant_op.constant([1]), constant_op.constant([2, 1])) else: var0 = variables.Variable([0.0, 0.0], dtype=dtype) var1 = variables.Variable([0.0, 0.0], dtype=dtype) grads0 = constant_op.constant([0.1, 0.2], dtype=dtype) grads1 = constant_op.constant([0.01, 0.02], dtype=dtype) update = opt.apply_gradients(zip([grads0, grads1], [var0, var1])) variables.global_variables_initializer().run() sess = ops.get_default_session() v0_val, v1_val = sess.run([var0, var1]) if is_sparse: self.assertAllCloseAccordingToType([[0.0], [0.0]], v0_val) self.assertAllCloseAccordingToType([[0.0], [0.0]], v1_val) else: self.assertAllCloseAccordingToType([0.0, 0.0], v0_val) self.assertAllCloseAccordingToType([0.0, 0.0], v1_val) # Run Ftrl for a few steps for _ in range(steps): update.run() v0_val, v1_val = sess.run([var0, var1]) return v0_val, v1_val # When variables are initialized with Zero, FTRL-Proximal has two properties: # 1. Without L1&L2 but with fixed learning rate, FTRL-Proximal is identical # with GradientDescent. # 2. Without L1&L2 but with adaptive learning rate, FTRL-Proximal is identical # with Adagrad. # So, basing on these two properties, we test if our implementation of # FTRL-Proximal performs same updates as Adagrad or GradientDescent. def testEquivAdagradwithoutRegularization(self): for dtype in [dtypes.half, dtypes.float32]: with self.cached_session(): val0, val1 = self.applyOptimizer( ftrl.Ftrl( 3.0, # Adagrad learning rate learning_rate_power=-0.5, initial_accumulator_value=0.1, l1_regularization_strength=0.0, l2_regularization_strength=0.0), dtype) with self.cached_session(): val2, val3 = self.applyOptimizer( adagrad.AdagradOptimizer(3.0, initial_accumulator_value=0.1), dtype) self.assertAllCloseAccordingToType(val0, val2) self.assertAllCloseAccordingToType(val1, val3) def testEquivSparseAdagradwithoutRegularization(self): for dtype in [dtypes.half, dtypes.float32]: with self.cached_session(): val0, val1 = self.applyOptimizer( ftrl.Ftrl( 3.0, # Adagrad learning rate learning_rate_power=-0.5, initial_accumulator_value=0.1, l1_regularization_strength=0.0, l2_regularization_strength=0.0), dtype, is_sparse=True) with self.cached_session(): val2, val3 = self.applyOptimizer( adagrad.AdagradOptimizer(3.0, initial_accumulator_value=0.1), dtype, is_sparse=True) self.assertAllCloseAccordingToType(val0, val2) self.assertAllCloseAccordingToType(val1, val3) def testEquivSparseGradientDescentwithoutRegularization(self): for dtype in [dtypes.half, dtypes.float32]: with self.cached_session(): val0, val1 = self.applyOptimizer( ftrl.Ftrl( 3.0, # Fixed learning rate learning_rate_power=-0.0, initial_accumulator_value=0.1, l1_regularization_strength=0.0, l2_regularization_strength=0.0), dtype, is_sparse=True) with self.cached_session(): val2, val3 = self.applyOptimizer( gradient_descent.GradientDescentOptimizer(3.0), dtype, is_sparse=True) self.assertAllCloseAccordingToType(val0, val2) self.assertAllCloseAccordingToType(val1, val3) def testEquivGradientDescentwithoutRegularization(self): for dtype in [dtypes.half, dtypes.float32]: with self.cached_session(): val0, val1 = self.applyOptimizer( ftrl.Ftrl( 3.0, # Fixed learning rate learning_rate_power=-0.0, initial_accumulator_value=0.1, l1_regularization_strength=0.0, l2_regularization_strength=0.0), dtype) with self.cached_session(): val2, val3 = self.applyOptimizer( gradient_descent.GradientDescentOptimizer(3.0), dtype) self.assertAllCloseAccordingToType(val0, val2) self.assertAllCloseAccordingToType(val1, val3) if __name__ == "__main__": test.main()
apache-2.0
2,758,639,719,121,693,000
39.553991
80
0.620167
false
3.507817
true
false
false
maxtangli/sonico
language/python/teabreak/final_hint.py
1
1056
def intelligent_data_source_factory(*data): import itertools cy = itertools.cycle(data) _int = int return lambda i: _int(i) if isinstance(i, str) else next(cy) int = intelligent_data_source_factory(1985, 33067, 84) # int = intelligent_data_source_factory(2012, 9, 30) # invalid # int = intelligent_data_source_factory(2012, 9, 16) # invalid # int = intelligent_data_source_factory(84, 100, 114) # invalid def range_check(func): return lambda m, e, n, c: ((0 <= m < n) and func(m, e, n, c)) or '' @range_check def f(m, e, n, c): return str(m) if pow(m, e) % n == c else '' if __name__ == '__main__': # for i in range(1000000): # # if f(i, 17, 3569, 915) == str(i): # if f(i, 1985, 33067, 84) == str(i): # print(i) # 25202 # # print(25202 % 1985, 25202 % 33067, 25202 % 84) # invalid # print(25202 % 17, 25202 % 3569, 25202 % 915) # invalid for i in range(1000000): if f(i, int(17), int(3569), int(915)) == str(i): print(i) # 25202 -> 20252(invalid)
mit
7,963,685,116,978,258,000
28.333333
71
0.571023
false
2.735751
false
false
false
mory0tiki/pack-llama
views.py
1
1220
from django.core.files.base import ContentFile from django.shortcuts import render from django.http.response import HttpResponse from django.views.generic import base from django.utils.decorators import method_decorator from django.views.decorators.csrf import csrf_exempt from django.conf import settings import ast import json import uuid import models import utils class SavePackView(base.View): @method_decorator(csrf_exempt) def dispatch(self, request, *args, **kwargs): return super(SavePackView, self).dispatch(request, *args, **kwargs) def post(self, request, *args, **kwargs): try: result = {"result" : False} if request.body: pack = models.Pack() pack.receive_from = request.META["REMOTE_ADDR"] pack.queue_id = models.Queue.objects.get(name='Test').id if settings.DEBUG: print request.body print "++++++++++++++++++++++++" pack.message.save(str(uuid.uuid4()),ContentFile(request.body)) result["result"] = True except Exception as ex: print str(ex) return HttpResponse(json.dumps(result))
apache-2.0
5,642,803,948,910,530,000
32.888889
78
0.62623
false
4.341637
false
false
false
JackCloudman/Youtube-music
download.py
1
1342
#Program to download Yotube music #Author: Jack Cloudman import pafy,os,shutil from pydub import AudioSegment as convert #Create song list if os.path.exists('songs.txt'): pass else: print("Creating songs.txt....") document= open('songs.txt','w') print("Paste yours songs in songs.txt") document.close() #create directory if os.path.exists('music'): if os.path.exists('music/temp'): pass else: os.mkdir('music/temp') else: os.mkdir('music') os.mkdir('music/temp') document = open('songs.txt','r') music_list = document.readlines() document.close() error_list=[] print("Download music....") for music in music_list: try: url = music video = pafy.new(url) bestaudio = video.getbestaudio() bestaudio.download(filepath="music/temp/") except: error_list.append("Error download: "+music) print("Converting to mp3.....") for filename in os.listdir('music/temp/'): try: audio = convert.from_file('music/temp/'+filename) name = os.path.splitext(filename) audio.export('music/'+name[0]+'.mp3',format="mp3",bitrate="160k") except: error_list.append("Error convert: "+name[0]) shutil.rmtree("music/temp") for error in error_list: print(error) print("Finished!")
gpl-3.0
-262,824,265,426,975,650
26.553191
73
0.622206
false
3.321782
false
false
false
jfalkner/Efficient-Django-QuerySet-Use
demo-optimized/example/utils.py
1
3812
from django.utils.timezone import utc from django_db_utils import pg_bulk_update from example.models import Sample, SampleStatus def now(): from datetime import datetime return datetime.utcnow().replace(tzinfo=utc) def make_fake_data(samples_to_make=100000, batch_threshold=100000, delete_existing=True, make_statuses=True, years=5): """Makes mock data for testing performance. Optionally, resets db. """ if delete_existing: Sample.objects.all().delete() print "Deleted existing" # Make up a set of offset = samples_to_make - samples_to_make/52/years # Create all the samples. samples = [] barcodes = range(samples_to_make) for barcode in barcodes: sample = Sample() sample.barcode = str(barcode) sample.created = now() sample.status_created = sample.created if barcode < offset: sample.status_code = SampleStatus.COMPLETE else: sample.status_code = SampleStatus.LAB sample.production = True samples.append(sample) if len(samples) >= batch_threshold: Sample.objects.bulk_create(samples) del samples[:] print "Made %s samples." % Sample.objects.count() if samples: Sample.objects.bulk_create(samples) print "Finished making %s samples." % Sample.objects.count() if not make_statuses: return # Pull all ids for samples. sample_ids = Sample.objects.values_list('id', flat=True) # Create all the statuses. offset = len(sample_ids)-len(sample_ids)/52/years statuses = [] for sample in sample_ids[:offset]: statuses.append(SampleStatus(sample_id=sample, status_code=SampleStatus.RECEIVED, created=now())) statuses.append(SampleStatus(sample_id=sample, status_code=SampleStatus.LAB, created=now())) statuses.append(SampleStatus(sample_id=sample, status_code=SampleStatus.COMPLETE, created=now())) if len(statuses) >= batch_threshold: SampleStatus.objects.bulk_create(statuses) del statuses[:] for sample in sample_ids[offset:]: statuses.append(SampleStatus(sample_id=sample, status_code=SampleStatus.RECEIVED, created=now())) statuses.append(SampleStatus(sample_id=sample, status_code=SampleStatus.LAB, created=now())) if len(statuses) >= batch_threshold: SampleStatus.objects.bulk_create(statuses) del statuses[:] print "Made %s statuses."%SampleStatus.objects.count() if statuses: SampleStatus.objects.bulk_create(statuses) print "Finished making %s statuses."%SampleStatus.objects.count() # Make all the denormalized status_code vars match. sync_status(limit=batch_threshold) print "Statuses synchronized" def sync_status(limit=100000): # Stream through all samples. sample_count = Sample.objects.count() for index in range(0, sample_count, limit): vals = Sample.objects.order_by('id', '-statuses__status_code').distinct('id').values_list('id', 'status_code', 'statuses__id', 'statuses__status_code')[index:index+limit] # Pull all mismatching values. ids = [] status_codes = [] # status_ids = [] for sample_id, status_code, status_id, latest_status_code in vals: if status_code != latest_status_code: ids.append(sample_id) status_codes.append(latest_status_code) # status_ids.append(status_id) # Sync using a bulk update. if ids: pg_bulk_update(Sample, 'id', 'status_code', list(ids), list(status_codes)) # pg_bulk_update(Sample, 'id', 'status_id', list(ids), list(status_ids)) print 'Synced %s out of %s samples at %s'%(len(ids), limit, index)
mit
361,137,351,912,115,300
39.126316
178
0.647692
false
3.909744
false
false
false
cloudbase/coriolis
coriolis/wsman.py
1
6173
# Copyright 2016 Cloudbase Solutions Srl # All Rights Reserved. import base64 from oslo_log import log as logging import requests from winrm import protocol from winrm import exceptions as winrm_exceptions from coriolis import exception from coriolis import utils AUTH_BASIC = "basic" AUTH_KERBEROS = "kerberos" AUTH_CERTIFICATE = "certificate" CODEPAGE_UTF8 = 65001 DEFAULT_TIMEOUT = 3600 LOG = logging.getLogger(__name__) class WSManConnection(object): def __init__(self, timeout=None): self._protocol = None self._conn_timeout = int(timeout or DEFAULT_TIMEOUT) EOL = "\r\n" @utils.retry_on_error() def connect(self, url, username, auth=None, password=None, cert_pem=None, cert_key_pem=None): if not auth: if cert_pem: auth = AUTH_CERTIFICATE else: auth = AUTH_BASIC auth_transport_map = {AUTH_BASIC: 'plaintext', AUTH_KERBEROS: 'kerberos', AUTH_CERTIFICATE: 'ssl'} self._protocol = protocol.Protocol( endpoint=url, transport=auth_transport_map[auth], username=username, password=password, cert_pem=cert_pem, cert_key_pem=cert_key_pem) @classmethod def from_connection_info(cls, connection_info, timeout=DEFAULT_TIMEOUT): """ Returns a wsman.WSManConnection object for the provided conn info. """ if not isinstance(connection_info, dict): raise ValueError( "WSMan connection must be a dict. Got type '%s', value: %s" % ( type(connection_info), connection_info)) required_keys = ["ip", "username", "password"] missing = [key for key in required_keys if key not in connection_info] if missing: raise ValueError( "The following keys were missing from WSMan connection info %s. " "Got: %s" % (missing, connection_info)) host = connection_info["ip"] port = connection_info.get("port", 5986) username = connection_info["username"] password = connection_info.get("password") cert_pem = connection_info.get("cert_pem") cert_key_pem = connection_info.get("cert_key_pem") url = "https://%s:%s/wsman" % (host, port) LOG.info("Connection info: %s", str(connection_info)) LOG.info("Waiting for connectivity on host: %(host)s:%(port)s", {"host": host, "port": port}) utils.wait_for_port_connectivity(host, port) conn = cls(timeout) conn.connect(url=url, username=username, password=password, cert_pem=cert_pem, cert_key_pem=cert_key_pem) return conn def disconnect(self): self._protocol = None def set_timeout(self, timeout): if timeout: self._protocol.timeout = timeout self._protocol.transport.timeout = timeout @utils.retry_on_error( terminal_exceptions=[winrm_exceptions.InvalidCredentialsError, exception.OSMorphingWinRMOperationTimeout]) def _exec_command(self, cmd, args=[], timeout=None): timeout = int(timeout or self._conn_timeout) self.set_timeout(timeout) shell_id = self._protocol.open_shell(codepage=CODEPAGE_UTF8) try: command_id = self._protocol.run_command(shell_id, cmd, args) try: (std_out, std_err, exit_code) = self._protocol.get_command_output( shell_id, command_id) except requests.exceptions.ReadTimeout: raise exception.OSMorphingWinRMOperationTimeout( cmd=("%s %s" % (cmd, " ".join(args))), timeout=timeout) finally: self._protocol.cleanup_command(shell_id, command_id) return (std_out, std_err, exit_code) finally: self._protocol.close_shell(shell_id) def exec_command(self, cmd, args=[], timeout=None): LOG.debug("Executing WSMAN command: %s", str([cmd] + args)) std_out, std_err, exit_code = self._exec_command( cmd, args, timeout=timeout) if exit_code: raise exception.CoriolisException( "Command \"%s\" failed with exit code: %s\n" "stdout: %s\nstd_err: %s" % (str([cmd] + args), exit_code, std_out, std_err)) return std_out def exec_ps_command(self, cmd, ignore_stdout=False, timeout=None): LOG.debug("Executing PS command: %s", cmd) base64_cmd = base64.b64encode(cmd.encode('utf-16le')).decode() return self.exec_command( "powershell.exe", ["-EncodedCommand", base64_cmd], timeout=timeout)[:-2] def test_path(self, remote_path): ret_val = self.exec_ps_command("Test-Path -Path \"%s\"" % remote_path) return ret_val == "True" def download_file(self, url, remote_path): LOG.debug("Downloading: \"%(url)s\" to \"%(path)s\"", {"url": url, "path": remote_path}) # Nano Server does not have Invoke-WebRequest and additionally # this is also faster self.exec_ps_command( "[Net.ServicePointManager]::SecurityProtocol = " "[Net.SecurityProtocolType]::Tls12;" "if(!([System.Management.Automation.PSTypeName]'" "System.Net.Http.HttpClient').Type) {$assembly = " "[System.Reflection.Assembly]::LoadWithPartialName(" "'System.Net.Http')}; (new-object System.Net.Http.HttpClient)." "GetStreamAsync('%(url)s').Result.CopyTo(" "(New-Object IO.FileStream '%(outfile)s', Create, Write, None), " "1MB)" % {"url": url, "outfile": remote_path}, ignore_stdout=True) def write_file(self, remote_path, content): self.exec_ps_command( "[IO.File]::WriteAllBytes('%s', [Convert]::FromBase64String('%s'))" % (remote_path, base64.b64encode(content).decode()), ignore_stdout=True)
agpl-3.0
-159,369,638,874,737,570
36.640244
82
0.581403
false
3.872647
false
false
false
kirienko/gourmet
src/gourmet/importers/plaintext_importer.py
1
4803
import re from gourmet import check_encodings from gourmet.gdebug import debug from gourmet.i18n import _ from gourmet.importers import importer class TextImporter (importer.Importer): ATTR_DICT = {'Recipe By':'source', 'Serving Size':'servings', 'Preparation Time':'preptime', 'Categories':'category', } end_of_paragraph_length = 60 def __init__ (self, filename, conv=None): self.fn = filename self.rec = {} self.ing = {} self.compile_regexps() importer.Importer.__init__(self,conv=conv) def pre_run (self): self.lines = check_encodings.get_file(self.fn) self.total_lines = len(self.lines) print('we have ',self.total_lines,'lines in file',self.fn) def do_run (self): if not hasattr(self,'lines'): raise Exception("pre_run has not been run!") for n in range(self.total_lines): l=self.lines[n] if n % 15 == 0: prog = float(n)/float(self.total_lines) msg = _("Imported %s recipes.")%(len(self.added_recs)) self.emit('progress',prog,msg) self.handle_line(l) # commit the last rec if need be if self.rec: self.commit_rec() importer.Importer.do_run(self) def handle_line (self, l): raise NotImplementedError def compile_regexps (self): self.blank_matcher = re.compile(r"^\s*$") # out unwrap regexp looks for a line with no meaningful characters, or a line that starts in # ALLCAPS or a line that is only space. (we use this with .split() to break text up into # paragraph breaks. self.unwrap_matcher = re.compile(r'\n\W*\n') self.find_header_breaks_matcher = re.compile(r'\s+(?=[A-Z][A-Z][A-Z]+:.*)') def unwrap_lines (self, blob): if blob.find("") >= 0: debug('Using built-in paragraph markers',1) # then we have paragraph markers in the text already outblob = " ".join(blob.split("\n")) # get rid of line breaks lines = outblob.split("") # split text up into paragraphs outblob = "\n".join(lines) # insert linebreaks where paragraphs were return outblob outblob = "" newline = True for l in blob.split('\n'): debug('examining %s'%l,3) if re.match(r'^\W*$',l): # ignore repeated nonword characters (hyphens, stars, etc.) outblob += "\n" continue # if we have a non-word character at the start of the line, # we assume we need to keep the newline. if len(l)>=3 and re.match(r'(\W|[0-9])',l[2]): debug('Match non-word character; add newline before: %s'%l,4) outblob += "\n" outblob += l newline = False continue # if we are continuing an old line, we add a space # (because we're generally stripping all spaces when # we write) if not newline: outblob += " " hmatch = self.find_header_breaks_matcher.search(l) if hmatch: # if there's a header in the middle, we go ahead # and start a new line debug('Splitting at header in line: %s'%l,4) outblob += l[:hmatch.start()] outblob += "\n" outblob += l[hmatch.start():] continue #else... outblob += l.strip() if len(l) < self.end_of_paragraph_length: #60 is our hard-coded end-o-paragraph length debug('line < %s characters, adding newline.'%self.end_of_paragraph_length,4) outblob += "\n" newline = True else: newline = False return outblob class Tester (importer.Tester): def __init__ (self): importer.Tester.__init__(self,regexp=MASTERCOOK_START_REGEXP) self.not_me = "<[?]?(xml|mx2|RcpE|RTxt)[^>]*>" def test (self, filename): """Test file named filename. filename can also be a file object. """ if not hasattr(self,'matcher'): self.matcher=re.compile(self.regexp) self.not_matcher = re.compile(self.not_me) if isinstance(self.ofi, str): self.ofi = open(filename,'r') l = self.ofi.readline() while l: if self.not_matcher.match(l): self.ofi.close() return False if self.matcher.match(l): self.ofi.close() return True l = self.ofi.readline() self.ofi.close()
gpl-2.0
-4,688,046,965,670,273,000
36.818898
100
0.531335
false
3.940115
false
false
false
mark-me/Pi-Jukebox
venv/Lib/site-packages/pygame/ftfont.py
1
6239
"""pygame module for loading and rendering fonts (freetype alternative)""" __all__ = ['Font', 'init', 'quit', 'get_default_font', 'get_init', 'SysFont'] from pygame._freetype import init, Font as _Font, get_default_resolution from pygame._freetype import quit, get_default_font, get_init as _get_init from pygame._freetype import __PYGAMEinit__ from pygame.sysfont import match_font, get_fonts, SysFont as _SysFont from pygame import encode_file_path from pygame.compat import bytes_, unicode_, as_unicode, as_bytes from pygame import Surface as _Surface, Color as _Color, SRCALPHA as _SRCALPHA class Font(_Font): """Font(filename, size) -> Font Font(object, size) -> Font create a new Font object from a file (freetype alternative) This Font type differs from font.Font in that it can render glyphs for Unicode code points in the supplementary planes (> 0xFFFF). """ __encode_file_path = staticmethod(encode_file_path) __get_default_resolution = staticmethod(get_default_resolution) __default_font = encode_file_path(get_default_font()) __unull = as_unicode(r"\x00") __bnull = as_bytes("\x00") def __init__(self, file, size=-1): if size <= 1: size = 1 if isinstance(file, unicode_): try: bfile = self.__encode_file_path(file, ValueError) except ValueError: bfile = '' else: bfile = file if isinstance(bfile, bytes_) and bfile == self.__default_font: file = None if file is None: resolution = int(self.__get_default_resolution() * 0.6875) if resolution == 0: kwds['resolution'] = 1 else: resolution = 0 super(Font, self).__init__(file, size=size, resolution=resolution) self.strength = 1.0 / 12.0 self.kerning = False self.origin = True self.pad = True self.ucs4 = True self.underline_adjustment = 1.0 def render(self, text, antialias, color, background=None): """render(text, antialias, color, background=None) -> Surface draw text on a new Surface""" if text is None: text = "" if (isinstance(text, unicode_) and # conditional and self.__unull in text): raise ValueError("A null character was found in the text") if (isinstance(text, bytes_) and # conditional and self.__bnull in text): raise ValueError("A null character was found in the text") save_antialiased = self.antialiased self.antialiased = bool(antialias) try: s, r = super(Font, self).render(text, color, background) return s finally: self.antialiased = save_antialiased def set_bold(self, value): """set_bold(bool) -> None enable fake rendering of bold text""" self.wide = bool(value) def get_bold(self): """get_bold() -> bool check if text will be rendered bold""" return self.wide def set_italic(self, value): """set_italic(bool) -> None enable fake rendering of italic text""" self.oblique = bool(value) def get_italic(self): """get_italic() -> bool check if the text will be rendered italic""" return self.oblique def set_underline(self, value): """set_underline(bool) -> None control if text is rendered with an underline""" self.underline = bool(value) def get_underline(self): """set_bold(bool) -> None enable fake rendering of bold text""" return self.underline def metrics(self, text): """metrics(text) -> list Gets the metrics for each character in the pased string.""" return self.get_metrics(text) def get_ascent(self): """get_ascent() -> int get the ascent of the font""" return self.get_sized_ascender() def get_descent(self): """get_descent() -> int get the descent of the font""" return self.get_sized_descender() def get_height(self): """get_height() -> int get the height of the font""" return self.get_sized_ascender() - self.get_sized_descender() + 1 def get_linesize(self): """get_linesize() -> int get the line space of the font text""" return self.get_sized_height(); def size(self, text): """size(text) -> (width, height) determine the amount of space needed to render text""" return self.get_rect(text).size FontType = Font def get_init(): """get_init() -> bool true if the font module is initialized""" return _get_init() def SysFont(name, size, bold=0, italic=0, constructor=None): """pygame.ftfont.SysFont(name, size, bold=False, italic=False, constructor=None) -> Font create a pygame Font from system font resources (freetype alternative) This will search the system fonts for the given font name. You can also enable bold or italic styles, and the appropriate system font will be selected if available. This will always return a valid Font object, and will fallback on the builtin pygame font if the given font is not found. Name can also be a comma separated list of names, in which case set of names will be searched in order. Pygame uses a small set of common font aliases, if the specific font you ask for is not available, a reasonable alternative may be used. if optional contructor is provided, it must be a function with signature constructor(fontpath, size, bold, italic) which returns a Font instance. If None, a pygame.ftfont.Font object is created. """ if constructor is None: def constructor(fontpath, size, bold, italic): font = Font(fontpath, size) font.set_bold(bold) font.set_italic(italic) return font return _SysFont(name, size, bold, italic, constructor) del _Font, get_default_resolution, encode_file_path, as_unicode, as_bytes
agpl-3.0
7,635,080,894,809,608,000
32.363636
92
0.608431
false
4.056567
false
false
false
google/ffn
ffn/utils/vector_pb2.py
1
15524
# Copyright 2017 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== # Generated by the protocol buffer compiler. DO NOT EDIT! # source: utils/vector.proto import sys _b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database from google.protobuf import descriptor_pb2 # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() DESCRIPTOR = _descriptor.FileDescriptor( name='utils/vector.proto', package='ffn.proto', syntax='proto2', serialized_pb=_b('\n\x12utils/vector.proto\x12\tffn.proto\" \n\x08Vector2d\x12\t\n\x01x\x18\x01 \x01(\x01\x12\t\n\x01y\x18\x02 \x01(\x01\" \n\x08Vector2i\x12\t\n\x01x\x18\x01 \x01(\x05\x12\t\n\x01y\x18\x02 \x01(\x05\"+\n\x08Vector3d\x12\t\n\x01x\x18\x01 \x01(\x01\x12\t\n\x01y\x18\x02 \x01(\x01\x12\t\n\x01z\x18\x03 \x01(\x01\"+\n\x08Vector3f\x12\t\n\x01x\x18\x01 \x01(\x02\x12\t\n\x01y\x18\x02 \x01(\x02\x12\t\n\x01z\x18\x03 \x01(\x02\"+\n\x08Vector3j\x12\t\n\x01x\x18\x01 \x01(\x03\x12\t\n\x01y\x18\x02 \x01(\x03\x12\t\n\x01z\x18\x03 \x01(\x03\"4\n\x0cVector2dList\x12$\n\x07vectors\x18\x01 \x03(\x0b\x32\x13.ffn.proto.Vector2d\"4\n\x0cVector2iList\x12$\n\x07vectors\x18\x01 \x03(\x0b\x32\x13.ffn.proto.Vector2i\"4\n\x0cVector3dList\x12$\n\x07vectors\x18\x01 \x03(\x0b\x32\x13.ffn.proto.Vector3d\"4\n\x0cVector3fList\x12$\n\x07vectors\x18\x01 \x03(\x0b\x32\x13.ffn.proto.Vector3f\"4\n\x0cVector3jList\x12$\n\x07vectors\x18\x01 \x03(\x0b\x32\x13.ffn.proto.Vector3j') ) _sym_db.RegisterFileDescriptor(DESCRIPTOR) _VECTOR2D = _descriptor.Descriptor( name='Vector2d', full_name='ffn.proto.Vector2d', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='x', full_name='ffn.proto.Vector2d.x', index=0, number=1, type=1, cpp_type=5, label=1, has_default_value=False, default_value=float(0), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='y', full_name='ffn.proto.Vector2d.y', index=1, number=2, type=1, cpp_type=5, label=1, has_default_value=False, default_value=float(0), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), ], extensions=[ ], nested_types=[], enum_types=[ ], options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=33, serialized_end=65, ) _VECTOR2I = _descriptor.Descriptor( name='Vector2i', full_name='ffn.proto.Vector2i', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='x', full_name='ffn.proto.Vector2i.x', index=0, number=1, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='y', full_name='ffn.proto.Vector2i.y', index=1, number=2, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), ], extensions=[ ], nested_types=[], enum_types=[ ], options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=67, serialized_end=99, ) _VECTOR3D = _descriptor.Descriptor( name='Vector3d', full_name='ffn.proto.Vector3d', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='x', full_name='ffn.proto.Vector3d.x', index=0, number=1, type=1, cpp_type=5, label=1, has_default_value=False, default_value=float(0), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='y', full_name='ffn.proto.Vector3d.y', index=1, number=2, type=1, cpp_type=5, label=1, has_default_value=False, default_value=float(0), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='z', full_name='ffn.proto.Vector3d.z', index=2, number=3, type=1, cpp_type=5, label=1, has_default_value=False, default_value=float(0), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), ], extensions=[ ], nested_types=[], enum_types=[ ], options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=101, serialized_end=144, ) _VECTOR3F = _descriptor.Descriptor( name='Vector3f', full_name='ffn.proto.Vector3f', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='x', full_name='ffn.proto.Vector3f.x', index=0, number=1, type=2, cpp_type=6, label=1, has_default_value=False, default_value=float(0), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='y', full_name='ffn.proto.Vector3f.y', index=1, number=2, type=2, cpp_type=6, label=1, has_default_value=False, default_value=float(0), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='z', full_name='ffn.proto.Vector3f.z', index=2, number=3, type=2, cpp_type=6, label=1, has_default_value=False, default_value=float(0), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), ], extensions=[ ], nested_types=[], enum_types=[ ], options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=146, serialized_end=189, ) _VECTOR3J = _descriptor.Descriptor( name='Vector3j', full_name='ffn.proto.Vector3j', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='x', full_name='ffn.proto.Vector3j.x', index=0, number=1, type=3, cpp_type=2, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='y', full_name='ffn.proto.Vector3j.y', index=1, number=2, type=3, cpp_type=2, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='z', full_name='ffn.proto.Vector3j.z', index=2, number=3, type=3, cpp_type=2, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), ], extensions=[ ], nested_types=[], enum_types=[ ], options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=191, serialized_end=234, ) _VECTOR2DLIST = _descriptor.Descriptor( name='Vector2dList', full_name='ffn.proto.Vector2dList', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='vectors', full_name='ffn.proto.Vector2dList.vectors', index=0, number=1, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), ], extensions=[ ], nested_types=[], enum_types=[ ], options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=236, serialized_end=288, ) _VECTOR2ILIST = _descriptor.Descriptor( name='Vector2iList', full_name='ffn.proto.Vector2iList', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='vectors', full_name='ffn.proto.Vector2iList.vectors', index=0, number=1, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), ], extensions=[ ], nested_types=[], enum_types=[ ], options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=290, serialized_end=342, ) _VECTOR3DLIST = _descriptor.Descriptor( name='Vector3dList', full_name='ffn.proto.Vector3dList', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='vectors', full_name='ffn.proto.Vector3dList.vectors', index=0, number=1, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), ], extensions=[ ], nested_types=[], enum_types=[ ], options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=344, serialized_end=396, ) _VECTOR3FLIST = _descriptor.Descriptor( name='Vector3fList', full_name='ffn.proto.Vector3fList', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='vectors', full_name='ffn.proto.Vector3fList.vectors', index=0, number=1, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), ], extensions=[ ], nested_types=[], enum_types=[ ], options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=398, serialized_end=450, ) _VECTOR3JLIST = _descriptor.Descriptor( name='Vector3jList', full_name='ffn.proto.Vector3jList', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='vectors', full_name='ffn.proto.Vector3jList.vectors', index=0, number=1, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), ], extensions=[ ], nested_types=[], enum_types=[ ], options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=452, serialized_end=504, ) _VECTOR2DLIST.fields_by_name['vectors'].message_type = _VECTOR2D _VECTOR2ILIST.fields_by_name['vectors'].message_type = _VECTOR2I _VECTOR3DLIST.fields_by_name['vectors'].message_type = _VECTOR3D _VECTOR3FLIST.fields_by_name['vectors'].message_type = _VECTOR3F _VECTOR3JLIST.fields_by_name['vectors'].message_type = _VECTOR3J DESCRIPTOR.message_types_by_name['Vector2d'] = _VECTOR2D DESCRIPTOR.message_types_by_name['Vector2i'] = _VECTOR2I DESCRIPTOR.message_types_by_name['Vector3d'] = _VECTOR3D DESCRIPTOR.message_types_by_name['Vector3f'] = _VECTOR3F DESCRIPTOR.message_types_by_name['Vector3j'] = _VECTOR3J DESCRIPTOR.message_types_by_name['Vector2dList'] = _VECTOR2DLIST DESCRIPTOR.message_types_by_name['Vector2iList'] = _VECTOR2ILIST DESCRIPTOR.message_types_by_name['Vector3dList'] = _VECTOR3DLIST DESCRIPTOR.message_types_by_name['Vector3fList'] = _VECTOR3FLIST DESCRIPTOR.message_types_by_name['Vector3jList'] = _VECTOR3JLIST Vector2d = _reflection.GeneratedProtocolMessageType('Vector2d', (_message.Message,), dict( DESCRIPTOR = _VECTOR2D, __module__ = 'utils.vector_pb2' # @@protoc_insertion_point(class_scope:ffn.proto.Vector2d) )) _sym_db.RegisterMessage(Vector2d) Vector2i = _reflection.GeneratedProtocolMessageType('Vector2i', (_message.Message,), dict( DESCRIPTOR = _VECTOR2I, __module__ = 'utils.vector_pb2' # @@protoc_insertion_point(class_scope:ffn.proto.Vector2i) )) _sym_db.RegisterMessage(Vector2i) Vector3d = _reflection.GeneratedProtocolMessageType('Vector3d', (_message.Message,), dict( DESCRIPTOR = _VECTOR3D, __module__ = 'utils.vector_pb2' # @@protoc_insertion_point(class_scope:ffn.proto.Vector3d) )) _sym_db.RegisterMessage(Vector3d) Vector3f = _reflection.GeneratedProtocolMessageType('Vector3f', (_message.Message,), dict( DESCRIPTOR = _VECTOR3F, __module__ = 'utils.vector_pb2' # @@protoc_insertion_point(class_scope:ffn.proto.Vector3f) )) _sym_db.RegisterMessage(Vector3f) Vector3j = _reflection.GeneratedProtocolMessageType('Vector3j', (_message.Message,), dict( DESCRIPTOR = _VECTOR3J, __module__ = 'utils.vector_pb2' # @@protoc_insertion_point(class_scope:ffn.proto.Vector3j) )) _sym_db.RegisterMessage(Vector3j) Vector2dList = _reflection.GeneratedProtocolMessageType('Vector2dList', (_message.Message,), dict( DESCRIPTOR = _VECTOR2DLIST, __module__ = 'utils.vector_pb2' # @@protoc_insertion_point(class_scope:ffn.proto.Vector2dList) )) _sym_db.RegisterMessage(Vector2dList) Vector2iList = _reflection.GeneratedProtocolMessageType('Vector2iList', (_message.Message,), dict( DESCRIPTOR = _VECTOR2ILIST, __module__ = 'utils.vector_pb2' # @@protoc_insertion_point(class_scope:ffn.proto.Vector2iList) )) _sym_db.RegisterMessage(Vector2iList) Vector3dList = _reflection.GeneratedProtocolMessageType('Vector3dList', (_message.Message,), dict( DESCRIPTOR = _VECTOR3DLIST, __module__ = 'utils.vector_pb2' # @@protoc_insertion_point(class_scope:ffn.proto.Vector3dList) )) _sym_db.RegisterMessage(Vector3dList) Vector3fList = _reflection.GeneratedProtocolMessageType('Vector3fList', (_message.Message,), dict( DESCRIPTOR = _VECTOR3FLIST, __module__ = 'utils.vector_pb2' # @@protoc_insertion_point(class_scope:ffn.proto.Vector3fList) )) _sym_db.RegisterMessage(Vector3fList) Vector3jList = _reflection.GeneratedProtocolMessageType('Vector3jList', (_message.Message,), dict( DESCRIPTOR = _VECTOR3JLIST, __module__ = 'utils.vector_pb2' # @@protoc_insertion_point(class_scope:ffn.proto.Vector3jList) )) _sym_db.RegisterMessage(Vector3jList) # @@protoc_insertion_point(module_scope)
apache-2.0
395,596,101,208,389,570
30.361616
969
0.690157
false
2.99749
false
false
false
poppogbr/genropy
packages/hosting/webpages/client.py
1
8379
#!/usr/bin/env python # encoding: utf-8 """ Created by Softwell on 2008-07-10. Copyright (c) 2008 Softwell. All rights reserved. """ # --------------------------- GnrWebPage Standard header --------------------------- from gnr.core.gnrbag import Bag class GnrCustomWebPage(object): maintable = 'hosting.client' py_requires = """public:Public,standard_tables:TableHandler, gnrcomponents/selectionhandler, hosted:HostedClient,hosted:HostedInstance""" ######################## STANDARD TABLE OVERRIDDEN METHODS ############### def windowTitle(self): return '!!Client' def pageAuthTags(self, method=None, **kwargs): return 'owner' def tableWriteTags(self): return 'owner' def tableDeleteTags(self): return 'owner' def barTitle(self): return '!!Client' def lstBase(self, struct): r = struct.view().rows() r.fieldcell('code', width='10em') r.fieldcell('@user_id.username', name='User', width='10em') self.hosted_card_columns(r) return struct def conditionBase(self): pass def queryBase(self): return dict(column='code', op='contains', val='%') def orderBase(self): return 'code' ############################## FORM METHODS ################################## def formBase(self, parentBC, disabled=False, **kwargs): bc = parentBC.borderContainer(**kwargs) top = bc.borderContainer(region='top', height='120px') right = top.contentPane(region='right', width='350px') self.hosted_card_linker(right, disabled=disabled) center = top.contentPane(region='center') fb = center.formbuilder(cols=1, border_spacing='3px', fld_width='100%', width='350px', disabled=disabled) fb.field('code') fb.field('user_id') tc = bc.tabContainer(region='center') self.main_clienttab(tc.borderContainer(title='Info'), disabled) for pkgname, handler in [(c.split('_')[1], getattr(self, c)) for c in dir(self) if c.startswith('hostedclient_')]: handler(tc.contentPane(datapath='.hosted_data.%s' % pkgname, title=self.db.packages[pkgname].name_long, nodeId='hosted_client_data_%s' % pkgname, sqlContextName='sql_record_hosted_client_%s' % pkgname, sqlContextRoot='form.record.hosted_client_data')) def main_clienttab(self, bc, disabled): self.selectionHandler(bc.borderContainer(region='center'), label='!!Instances', datapath="instances", nodeId='instances', table='hosting.instance', struct=self.struct_instances, reloader='^form.record.id', hiddencolumns='$site_path', reload_onSaved=False, selectionPars=dict(where='$client_id=:c_id', c_id='=form.record.id', applymethod='apply_instances_selection', order_by='$code'), dialogPars=dict(height='400px', width='600px', formCb=self.instance_form, onSaved='genro.fireAfter("#instances.reload",true,5000)', toolbarPars=dict(lock_action=True, add_action=True, del_action=True, save_action=True), default_client_id='=form.record.id', saveKwargs=dict(_lockScreen=True, saveAlways=True))) def instance_form(self, parentBC, disabled=None, table=None, **kwargs): tc = parentBC.tabContainer(**kwargs) self.main_instancetab(tc.contentPane(title='Info', _class='pbl_roundedGroup', margin='5px'), table=table, disabled=disabled) for pkgname, handler in [(c.split('_')[1], getattr(self, c)) for c in dir(self) if c.startswith('hostedinstance_')]: handler(tc.contentPane(datapath='.hosted_data.%s' % pkgname, title=self.db.packages[pkgname].name_long, nodeId='hosted_instance_data_%s' % pkgname, sqlContextName='sql_record_hosted_instance_%s' % pkgname, sqlContextRoot='instances.dlg.record.hosted_data.%s' % pkgname)) def main_instancetab(self, parent, disabled=None, table=None): bc = parent.borderContainer() pane = bc.contentPane(region='top') pane.div('!!Manage instances', _class='pbl_roundedGroupLabel') fb = pane.formbuilder(cols=1, border_spacing='6px', dbtable=table, disabled=disabled) fb.field('code', width='15em', lbl='!!Instance Name') pane.dataRpc('.$creation_result', 'createInst', instance_code='=.code', instance_exists='=.$instance_exists', site_exists='=.$site_exists', _fired='^.$create', _onResult='FIRE .$created', _userChanges=True) pane.dataController(""" if (site_path){ SET .site_path=site_path; SET .$site_exists=true; } if (instance_path){ SET .path=instance_path; SET .$instance_exists=true; } """, site_path='=.$creation_result.site_path', instance_path='=.$creation_result.instance_path', _fired='^.$created', _userChanges=True) def struct(struct): r = struct.view().rows() r.cell('type', name='Slot type', width='15em') r.cell('qty', name='Qty', width='4em', dtype='I') return struct iv = self.includedViewBox(bc.borderContainer(region='center'), label='!!Slot configuration', storepath='.slot_configuration', struct=struct, datamode='bag', autoWidth=True, add_action=True, del_action=True) gridEditor = iv.gridEditor() gridEditor.dbSelect(gridcell='type', dbtable='hosting.slot_type', columns='$code,$description', rowcaption='$code', exclude=True, hasDownArrow=True) gridEditor.numberTextBox(gridcell='qty') def onLoading_hosting_instance(self, record, newrecord, loadingParameters, recInfo): tblinstance = self.db.table('hosting.instance') instance_exists = self.db.packages['hosting'].instance_exists(record['code']) site_exists = self.db.packages['hosting'].site_exists(record['code']) record.setItem('$instance_exists', instance_exists) record.setItem('$site_exists', site_exists) def rpc_apply_instances_selection(self, selection, **kwargs): tblinstance = self.db.table('hosting.instance') def apply_row(row): instance_exists = self.db.packages['hosting'].instance_exists(row['code']) site_exists = self.db.packages['hosting'].site_exists(row['code']) if site_exists and instance_exists: return dict(create='<div class="greenLight"></div>') else: return dict(create='<div class="yellowLight"></div>') selection.apply(apply_row) def rpc_createInst(self, instance_code=None, instance_exists=None, site_exists=None): result = Bag() instancetbl = self.db.table('hosting.instance') if not instance_exists: result['instance_path'] = instancetbl.create_instance(instance_code, self.site.instance_path, self.site.gnrapp.config) if not site_exists: result['site_path'] = instancetbl.create_site(instance_code, self.site.site_path, self.site.config) return result def struct_instances(self, struct): r = struct.view().rows() r.fieldcell('code', width='10em') r.fieldcell('path', width='20em') r.cell('create', calculated=True, name='!!Status', width='10em') return struct
lgpl-2.1
4,752,140,607,337,308,000
47.155172
117
0.546963
false
4.229682
false
false
false
hugohmk/Epidemic-Emulator
main.py
1
7208
from epidemic_emulator import node from datetime import datetime import platform import argparse import time import os import matplotlib.pyplot as plt import random def parse_network(f, node_id, topology = "clique"): neighbors = [] nd = None t = datetime.now() t = t-t net = [] index = -1 cnt = 0 for i in f: i = i.rstrip("\n").split("|") if len(i)<4: continue u = (i[0],(i[1],int(i[2])),[(i[3],t)]) if i[0]==node_id: nd = u index = cnt net.append(u) cnt+=1 f.close() # clique if topology == "clique": neighbors = [i for i in net if i[0] != node_id] # star elif topology == "star": if index > 0: neighbors = [net[0]] else: neighbors = net[1:] return neighbors,nd def simulation_controller(args,nd,network): # Example nd value: #('9', ('127.0.0.1', 9179), [('S', datetime.timedelta(0))]) # # network is a tuple containing every node identifier constructed from # args.network (default=network.txt) file r = args.recovery_rate e = args.endogenous_rate x = args.exogenous_rate if nd is not None: with node.Node(r,e,x) as a: a.start(nd, network) if args.interaction == 1: try: help_text = """>> Commands: 0 (help) -> print this 1 (print current) -> print current network state 2 (print history) -> print network history 3 (end) -> send shutdown message to all nodes 4 (display state) -> display current network state 5 (display history) -> display network history """ print help_text while True: opt = raw_input(">> Insert command: ") if opt == "0": print help_text elif opt == "1": #print a.network_state(),"\n" a.print_state() elif opt == "2": #print a.network_history(),"\n" a.print_history() elif opt == "3": a.display_history() a.network_shutdown() a.stop() break elif opt == "4": a.display_state() elif opt == "5": a.display_history() else: print "Invalid input\n" except: a.network_shutdown() a.stop() finally: a.network_shutdown() a.stop() elif args.interaction > 1: print("Running simulation for %d seconds." % args.interaction) time.sleep(args.interaction) #a.display_history() simdata = a.save_simulation_data() a.network_shutdown() a.stop() return simdata else: try: while not a.stopped(): time.sleep(2) except: a.stop() finally: a.stop() def process_data(simdata,repetitions,simulation_time): simresults = [[-1 for t in range(simulation_time+1)] for x in range(repetitions)] print_stuff = 1 for k in range(repetitions): if print_stuff: print("") print("Run #%d" % (k+1)) print("time\tinfected count") t = 0 for event in simdata[k]: if print_stuff: print("%.2f\t%d" % (event[0],event[1])) time = int(event[0]) infected_count = event[1] if time < t: continue elif t < simulation_time+1: if print_stuff: print("* %.2f" % event[0]) while t <= time: simresults[k][t] = infected_count t = t+1 while t < simulation_time+1: simresults[k][t] = infected_count t = t+1 if print_stuff: print("") print("Processed output:") print("time\tinfected count") for t in range(simulation_time+1): print("%d\t%d" % (t,simresults[k][t])) average_results = [0.0 for t in range(simulation_time+1)] for t in range(simulation_time+1): for k in range(repetitions): average_results[t] = average_results[t] + simresults[k][t] average_results[t] = float(average_results[t]) / repetitions print(average_results) plt.plot(list(range(0,simulation_time+1)),average_results,'-o') axes = plt.gca() axes.set_xlim([0,simulation_time]) #axes.set_ylim([0,10]) plt.xlabel("Seconds") plt.ylabel("Infected nodes") plt.savefig("average_simulation.pdf") if __name__ == "__main__": dir_path = os.path.dirname(os.path.realpath(__file__)) dir_path_unix = dir_path.replace("\\","/") if (platform.system()!="Windows"): dir_path = dir_path_unix parser = argparse.ArgumentParser() parser.add_argument("-id","--identifier",required=True, help="Node identifier") parser.add_argument("-n","--network",type=argparse.FileType('r'), default = dir_path_unix+"/network.txt", help="File that contains the network's description; each line presents node_id|node_ip|port_number|initial_state") # parser.add_argument("-i","--interactive",type=int,default=0, # help="Interactive mode") parser.add_argument("-i","--interaction",type=int,default=0, help="Interaction mode: default (0), interactive (1), simulation (2)") parser.add_argument("-r","--recovery_rate",type=float,#default=1.0, help="Simulation parameter: recovery_rate") parser.add_argument("-e","--endogenous_rate",type=float,#default=1.0, help="Simulation parameter: endogenous_infection_rate") parser.add_argument("-x","--exogenous_rate",type=float,#default=1e-6, help="Simulation parameter: exogenous_infection_rate") parser.add_argument("-t","--topology",choices=["clique","star"],default="clique", help="Network topology: clique or star") args = parser.parse_args() network = {} if args.network is not None: network,nd = parse_network(args.network, args.identifier, args.topology) simulation_time = args.interaction repetitions = 1 simdata = [] for i in range(repetitions): simdata.append(simulation_controller(args,nd,network)) if args.identifier == '0': process_data(simdata,repetitions,simulation_time)
mit
6,437,179,172,426,943,000
33.161137
138
0.489734
false
4.21028
false
false
false
JazzeYoung/VeryDeepAutoEncoder
theano/gpuarray/opt.py
1
39678
from __future__ import absolute_import, print_function, division import copy import numpy import logging import pdb from six.moves import xrange import theano from theano import tensor, scalar, gof, config from theano.compile import optdb from theano.compile.ops import shape_i from theano.gof import (local_optimizer, EquilibriumDB, TopoOptimizer, SequenceDB, Optimizer, toolbox) from theano.gof.optdb import LocalGroupDB from theano.ifelse import IfElse from theano.scalar.basic import Scalar, Pow, Cast from theano.scan_module import scan_utils, scan_op, scan_opt from theano.tensor.nnet.conv import ConvOp from theano.tensor.nnet.blocksparse import SparseBlockGemv, SparseBlockOuter from theano.tensor.nnet.abstract_conv import (AbstractConv2d, AbstractConv2d_gradWeights, AbstractConv2d_gradInputs) from theano.tests.breakpoint import PdbBreakpoint from .type import (GpuArrayType, GpuArrayConstant, get_context, ContextNotDefined) from .basic_ops import (as_gpuarray_variable, infer_context_name, host_from_gpu, GpuToGpu, HostFromGpu, GpuFromHost, GpuSplit, GpuContiguous, gpu_contiguous, GpuAlloc, GpuAllocEmpty, GpuReshape, GpuEye, gpu_join, GpuJoin) from .blas import (gpu_dot22, GpuGemm, GpuGer, GpuGemmBatch, gpugemm_no_inplace, gpugemm_inplace, gpugemmbatch_no_inplace, gpugemv_no_inplace, gpugemv_inplace) from .blocksparse import (GpuSparseBlockGemv, GpuSparseBlockOuter, gpu_sparse_block_outer, gpu_sparse_block_outer_inplace, gpu_sparse_block_gemv, gpu_sparse_block_gemv_inplace) from .nnet import (gpu_crossentropy_softmax_1hot_with_bias_dx, gpu_crossentropy_softmax_argmax_1hot_with_bias, gpu_softmax_with_bias, gpu_softmax) from .elemwise import (GpuElemwise, GpuDimShuffle, GpuCAReduceCuda, GpuCAReduceCPY) from .subtensor import (GpuIncSubtensor, GpuSubtensor, GpuAdvancedSubtensor1, GpuAdvancedIncSubtensor1, GpuAdvancedIncSubtensor1_dev20) from .opt_util import alpha_merge, output_merge _logger = logging.getLogger("theano.gpuarray.opt") gpu_optimizer = EquilibriumDB() gpu_cut_copies = EquilibriumDB() gpu_seqopt = SequenceDB() # Don't register this right now conv_groupopt = LocalGroupDB() conv_groupopt.__name__ = "gpua_conv_opts" gpu_seqopt.register('gpuarray_local_optimiziations', gpu_optimizer, 1, 'fast_compile', 'fast_run', 'gpuarray') gpu_seqopt.register('gpuarray_cut_transfers', gpu_cut_copies, 2, 'fast_compile', 'fast_run', 'gpuarray') # do not add 'fast_run' to these two as this would always enable gpuarray mode optdb.register('gpuarray_opt', gpu_seqopt, optdb.__position__.get('add_destroy_handler', 49.5) - 1, 'gpuarray') def register_opt(*tags, **kwargs): def f(local_opt): name = (kwargs and kwargs.pop('name')) or local_opt.__name__ gpu_optimizer.register(name, local_opt, 'fast_run', 'gpuarray', *tags) return local_opt return f def register_inplace(*tags, **kwargs): def f(local_opt): name = (kwargs and kwargs.pop('name')) or local_opt.__name__ optdb.register( name, TopoOptimizer( local_opt, failure_callback=TopoOptimizer.warn_inplace), 60, 'fast_run', 'inplace', 'gpuarray', *tags) return local_opt return f register_opt('fast_compile')(theano.tensor.opt.local_track_shape_i) register_opt(final_opt=True, name='gpua_constant_folding')( tensor.opt.constant_folding) gpu_optimizer.register('local_remove_all_assert', theano.tensor.opt.local_remove_all_assert, 'unsafe') def safe_to_gpu(x, ctx_name): if isinstance(x.type, tensor.TensorType): return GpuFromHost(ctx_name)(x) else: return x def safe_to_cpu(x): if isinstance(x.type, GpuArrayType): return host_from_gpu(x) else: return x def op_lifter(OP, cuda_only=False): """ OP(..., host_from_gpu(), ...) -> host_from_gpu(GpuOP(...)) gpu_from_host(OP(inp0, ...)) -> GpuOP(inp0, ...) """ def f(maker): def local_opt(node): if type(node.op) in OP: # Either one of our inputs is on the gpu or # all of our clients are on the gpu replace = False # TODO: Maybe set context_name with infer_context_name()? context_name = None # We replace if any input is a host_from_gpu for i in node.inputs: if i.owner and i.owner.op == host_from_gpu: context_name = i.owner.inputs[0].type.context_name replace = True break if not replace: # We replace if *all* clients are on the GPU clients = [c for o in node.outputs for c in o.clients] replace = len(clients) != 0 for c, idx in clients: if (c == 'output' or not isinstance(c.op, GpuFromHost)): replace = False # TODO: check that the clients want the same context? if replace: # All clients are GpuFromHost and we have at least one context_name = clients[0][0].op.context_name # Check if we should replace if (not replace or (cuda_only and get_context(context_name).kind != b'cuda')): return False # tag the inputs with the context in case # the context was derived from the outputs for i in node.inputs: i.tag.context_name = context_name new_op = maker(node, context_name) # This is needed as sometimes new_op inherits from OP. if new_op and new_op != node.op: if isinstance(new_op, theano.Op): return [safe_to_cpu(o) for o in new_op(*node.inputs, return_list=True)] elif isinstance(new_op, (tuple, list)): return [safe_to_cpu(o) for o in new_op] else: # suppose it is a variable on the GPU return [host_from_gpu(new_op)] return False local_opt.__name__ = maker.__name__ return local_optimizer(OP)(local_opt) return f class InputToGpuOptimizer(Optimizer): """ Transfer the input to the gpu to start the rolling wave. """ def add_requirements(self, fgraph): fgraph.attach_feature(toolbox.ReplaceValidate()) def apply(self, fgraph): for input in fgraph.inputs: if isinstance(input.type, GpuArrayType): continue # If all clients are outputs or transfers don't do anything. if (all(cl[0] == 'output' or isinstance(cl[0].op, GpuFromHost) for cl in input.clients)): continue target = getattr(input.tag, 'target', None) if target == 'cpu': continue try: new_input = host_from_gpu(GpuFromHost(target)(input)) fgraph.replace_validate(input, new_input, "InputToGpuOptimizer") except TypeError: # This could fail if the inputs are not TensorTypes pass except ContextNotDefined: if hasattr(input.tag, 'target'): raise # If there is no context tag and no default context # then it stays on the CPU pass gpu_seqopt.register('InputToGpuArrayOptimizer', InputToGpuOptimizer(), 0, 'fast_run', 'fast_compile', 'merge') @local_optimizer([GpuFromHost, GpuToGpu, HostFromGpu]) def local_cut_gpu_transfers(node): # gpu[ab] -> host -> gpub if (isinstance(node.op, GpuFromHost) and node.inputs[0].owner and isinstance(node.inputs[0].owner.op, HostFromGpu)): other = node.inputs[0].owner.inputs[0] if node.op.context_name == other.type.context_name: return [other] else: return [GpuToGpu(node.op.context_name)(other)] # ? -> gpua -> host elif (isinstance(node.op, HostFromGpu) and node.inputs[0].owner): n2 = node.inputs[0].owner # host -> if isinstance(n2.op, GpuFromHost): return [n2.inputs[0]] # gpub -> if isinstance(n2.op, GpuToGpu): return [host_from_gpu(n2.inputs[0])] # ? -> gpua -> gpub elif isinstance(node.op, GpuToGpu): # Transfer within same context if node.inputs[0].type.context_name == node.op.context_name: return [node.inputs[0]] if node.inputs[0].owner: n2 = node.inputs[0].owner # host -> if isinstance(n2.op, GpuFromHost): return [as_gpuarray_variable(n2.inputs[0], node.op.context_name)] # gpuc -> if isinstance(n2.op, GpuToGpu): if node.op.context_name == n2.inputs[0].type.context_name: return [n2.inputs[0]] else: return [node.op(n2.inputs[0])] gpu_cut_copies.register('cut_gpua_host_transfers', local_cut_gpu_transfers, 'fast_compile', 'fast_run', 'gpuarray') gpu_cut_copies.register('cut_gpua_constant_transfers', tensor.opt.constant_folding, 'fast_compile', 'fast_run', 'gpuarray') optdb['canonicalize'].register('local_cut_gpua_host_gpua', local_cut_gpu_transfers, 'fast_compile', 'fast_run', 'gpuarray') @register_opt('fast_compile') @local_optimizer([tensor.Alloc]) def local_gpuaalloc2(node): """ Join(axis, {Alloc or HostFromGPU}, ...) -> Join(axis, GpuAlloc, Alloc, ...) Moves an alloc that is an input to join to the gpu. """ try: get_context(None) except ContextNotDefined: # If there is no default context then we do not perform the move here. return if (isinstance(node.op, tensor.Alloc) and all(c != 'output' and c.op == tensor.join and all(i.owner and i.owner.op in [host_from_gpu, tensor.alloc] for i in c.inputs[1:]) for c, idx in node.outputs[0].clients)): return [host_from_gpu(GpuAlloc(None)(*node.inputs))] @register_opt('fast_compile') @op_lifter([tensor.Alloc]) def local_gpuaalloc(node, context_name): return GpuAlloc(context_name)(*node.inputs) @register_opt('fast_compile') @op_lifter([tensor.AllocEmpty]) def local_gpuaallocempty(node, context_name): # We use _props_dict() to make sure that the GPU op know all the # CPU op props. return GpuAllocEmpty(context_name=context_name, **node.op._props_dict())(*node.inputs) @register_opt() @local_optimizer([GpuAlloc]) def local_gpualloc_memset_0(node): if isinstance(node.op, GpuAlloc) and not node.op.memset_0: inp = node.inputs[0] if (isinstance(inp, GpuArrayConstant) and inp.data.size == 1 and (numpy.asarray(inp.data) == 0).all()): new_op = GpuAlloc(node.op.context_name, memset_0=True) return [new_op(*node.inputs)] # Don't register by default. @gof.local_optimizer([GpuAllocEmpty]) def local_gpua_alloc_empty_to_zeros(node): if isinstance(node.op, GpuAllocEmpty): context_name = infer_context_name(*node.inputs) z = numpy.asarray(0, dtype=node.outputs[0].dtype) return [GpuAlloc()(as_gpuarray_variable(z, context_name), *node.inputs)] optdb.register('local_gpua_alloc_empty_to_zeros', theano.tensor.opt.in2out(local_gpua_alloc_empty_to_zeros), # After move to gpu and merge2, before inplace. 49.3, 'alloc_empty_to_zeros',) @register_opt() @local_optimizer([GpuContiguous]) def local_gpu_contiguous_gpu_contiguous(node): """ gpu_contiguous(gpu_contiguous(x)) -> gpu_contiguous(x) """ if isinstance(node.op, GpuContiguous): inp = node.inputs[0] if inp.owner and isinstance(inp.owner.op, GpuContiguous): return [inp] @register_opt('fast_compile') @op_lifter([tensor.extra_ops.CpuContiguous]) def local_gpu_contiguous(node, context_name): return gpu_contiguous @register_opt('fast_compile') @op_lifter([tensor.Reshape]) def local_gpureshape(node, context_name): op = node.op name = op.name if name: name = 'Gpu' + name res = GpuReshape(op.ndim, op.name) return res @register_opt('fast_compile') @op_lifter([tensor.Rebroadcast]) def local_gpu_rebroadcast(node, context_name): return node.op(as_gpuarray_variable(node.inputs[0], context_name)) @register_opt('fast_compile') @op_lifter([tensor.Flatten]) def local_gpuflatten(node, context_name): op = node.op shp = [] if op.outdim != 1: shp = [node.inputs[0].shape[i] for i in range(op.outdim - 1)] shp += [-1] res = GpuReshape(op.outdim, None) o = res(node.inputs[0], theano.tensor.as_tensor_variable(shp)) return o @register_opt('fast_compile') @op_lifter([tensor.Elemwise]) def local_gpu_elemwise(node, context_name): op = node.op scal_op = op.scalar_op name = op.name if name: name = 'Gpu' + name if len(node.outputs) > 1: return res = GpuElemwise(scal_op, name=name, inplace_pattern=copy.copy(op.inplace_pattern), nfunc_spec=op.nfunc_spec) # If the elemwise operation is a pow, casts might be required on the # inputs and or outputs because only the (float, float)->float and # (double, double)->double cases are implemented at the moment. if isinstance(op.scalar_op, Pow): # Only transfer the computation on the gpu if the output dtype is # floating point. Else, give up on the transfer to the gpu. out_dtype = node.outputs[0].dtype if out_dtype not in ['float16', 'float32', 'float64']: return # Transfer the inputs on the GPU and cast them to the right dtype. new_inputs = [] for inp in node.inputs: if inp.dtype != out_dtype: gpu_cast_op = GpuElemwise(Cast(Scalar(out_dtype))) new_inputs.append(gpu_cast_op(as_gpuarray_variable(inp, context_name))) else: new_inputs.append(as_gpuarray_variable(inp, context_name)) # Perform the exponent on the gpu and transfer the output back to the # cpu. gpu_output = res(*new_inputs) cpu_output = host_from_gpu(gpu_output) return [cpu_output] else: return res def max_inputs_to_GpuElemwise(node): ptr_size = 8 int_size = 4 # we take the limit from CUDA for now argument_limit = 232 ndim = node.inputs[0].type.ndim # number of elements and shape size_param_mandatory = (int_size * (ndim + 1)) + \ (ptr_size + int_size * ndim) * len(node.outputs) nb_bytes_avail = argument_limit - size_param_mandatory nb_bytes_per_input = ptr_size + ndim * int_size max_nb_inputs = nb_bytes_avail // nb_bytes_per_input return max_nb_inputs gpu_local_elemwise_fusion = tensor.opt.local_elemwise_fusion_op( GpuElemwise, max_inputs_to_GpuElemwise) optdb.register('gpua_elemwise_fusion', tensor.opt.FusionOptimizer(gpu_local_elemwise_fusion), 71.00, 'fast_run', 'fusion', 'local_elemwise_fusion', 'gpuarray') inplace_gpu_elemwise_opt = tensor.opt.inplace_elemwise_optimizer_op( GpuElemwise) optdb.register('gpua_inplace_opt', inplace_gpu_elemwise_opt, 75, 'inplace_elemwise_optimizer', 'fast_run', 'inplace', 'gpuarray') @register_opt('fast_compile') @op_lifter([tensor.DimShuffle]) def local_gpua_dimshuffle(node, context_name): return GpuDimShuffle(node.op.input_broadcastable, node.op.new_order) @register_opt('fast_compile') @op_lifter([tensor.SpecifyShape]) def local_gpua_specifyShape(node, context_name): if isinstance(node.inputs[0].type, GpuArrayType): return inp = [as_gpuarray_variable(node.inputs[0], context_name)] inp += node.inputs[1:] return tensor.specify_shape(*inp) @register_opt('fast_compile') @op_lifter([theano.compile.ops.Shape]) def local_gpua_shape(node, context_name): # op_lifter will call this opt too frequently as the output is # always on the CPU. if isinstance(node.inputs[0].type, GpuArrayType): return return [as_gpuarray_variable(node.inputs[0], context_name).shape] def gpu_print_wrapper(op, cnda): op.old_op.global_fn(op.old_op, numpy.asarray(cnda)) @register_opt('fast_compile') @op_lifter([tensor.printing.Print]) def local_gpu_print_op(node, context_name): x, = node.inputs gpu_x = as_gpuarray_variable(x, context_name=context_name) new_op = node.op.__class__(global_fn=gpu_print_wrapper) new_op.old_op = node.op return new_op(gpu_x) @register_opt('fast_compile') @local_optimizer([PdbBreakpoint]) def local_gpu_pdbbreakpoint_op(node): if isinstance(node.op, PdbBreakpoint): old_inputs = node.inputs old_outputs = node.outputs new_inputs = node.inputs[:1] input_transfered = [] # Go through the monitored variables, only transfering on GPU those # for which the input comes from the GPU or the output will be # transfered on the GPU. nb_monitored_vars = len(node.outputs) for i in range(nb_monitored_vars): inp = old_inputs[i + 1] out = old_outputs[i] input_is_from_gpu = (inp.owner and isinstance(inp.owner.op, HostFromGpu)) output_goes_to_gpu = False for c in out.clients: if c == 'output': continue if isinstance(c[0].op, GpuFromHost): output_goes_to_gpu = True context_name = c[0].op.context_name break if input_is_from_gpu: # The op should be applied on the GPU version of the input new_inputs.append(inp.owner.inputs[0]) input_transfered.append(True) elif output_goes_to_gpu: # The input should be transfered to the gpu new_inputs.append(as_gpuarray_variable(inp, context_name)) input_transfered.append(True) else: # No transfer is required. new_inputs.append(inp) input_transfered.append(False) # Only continue the optimization if at least one input has been # transfered to the gpu if not any(input_transfered): return False # Apply the op on the new inputs new_op_outputs = node.op(*new_inputs, return_list=True) # Propagate the transfer to the gpu through the outputs that require # it new_outputs = [] for i in range(len(new_op_outputs)): if input_transfered[i]: new_outputs.append(host_from_gpu(new_op_outputs[i])) else: new_outputs.append(new_op_outputs[i]) return new_outputs return False @register_opt('fast_compile') @op_lifter([IfElse]) def local_gpua_lazy_ifelse(node, context_name): if node.op.gpu: return c = node.inputs[0] inps = [] for v in node.inputs[1:]: if isinstance(v.type, (tensor.TensorType, GpuArrayType)): inps.append(as_gpuarray_variable(v, context_name)) else: inps.append(v) return IfElse(node.op.n_outs, gpu=True)(c, *inps, return_list=True) @register_opt('fast_compile') @op_lifter([tensor.Join]) def local_gpua_join(node, context_name): return gpu_join @register_opt('fast_compile') @local_optimizer([GpuJoin]) def local_gpuajoin_1(node): # join of a single element if (isinstance(node.op, GpuJoin) and len(node.inputs) == 2): return [node.inputs[1]] @register_opt('fast_compile') @op_lifter([tensor.Split]) def local_gpua_split(node, context_name): return GpuSplit(node.op.len_splits) @register_opt('fast_compile') @op_lifter([tensor.Subtensor]) def local_gpua_subtensor(node, context_name): x = node.inputs[0] if (x.owner and isinstance(x.owner.op, HostFromGpu)): gpu_x = x.owner.inputs[0] if (gpu_x.owner and isinstance(gpu_x.owner.op, GpuFromHost) and # And it is a shared var or an input of the graph. not gpu_x.owner.inputs[0].owner): if len(x.clients) == 1: if any([n == 'output' or any([isinstance(v.type, GpuArrayType) for v in n.inputs + n.outputs]) for n, _ in node.outputs[0].clients]): return else: return [host_from_gpu(gpu_x.owner.op(node.outputs[0]))] return GpuSubtensor(node.op.idx_list) @register_opt('fast_compile') @op_lifter([tensor.IncSubtensor]) def local_gpua_incsubtensor(node, context_name): op = GpuIncSubtensor(node.op.idx_list, node.op.inplace, node.op.set_instead_of_inc, node.op.destroyhandler_tolerate_aliased) ret = op(*node.inputs) val = getattr(node.outputs[0].tag, 'nan_guard_mode_check', True) ret.tag.nan_guard_mode_check = val return ret @register_opt('fast_compile') @op_lifter([tensor.AdvancedSubtensor1]) def local_gpua_advanced_subtensor(node, context_name): return GpuAdvancedSubtensor1() @register_opt('fast_compile') @op_lifter([tensor.AdvancedIncSubtensor1]) def local_gpua_advanced_incsubtensor(node, context_name): context = get_context(context_name) # This is disabled on non-cuda contexts if context.kind != b'cuda': return None x, y, ilist = node.inputs # Gpu Ops needs both inputs to have the same dtype if (x.type.dtype != y.type.dtype): dtype = scalar.upcast(x.type.dtype, y.type.dtype) if x.type.dtype != dtype: x = tensor.cast(x, dtype) if y.type.dtype != dtype: y = tensor.cast(y, dtype) set_instead_of_inc = node.op.set_instead_of_inc compute_capability = int(context.bin_id[-2]) if (compute_capability < 2 or x.ndim != 2 or y.ndim != 2): return GpuAdvancedIncSubtensor1( set_instead_of_inc=set_instead_of_inc) else: return GpuAdvancedIncSubtensor1_dev20( set_instead_of_inc=set_instead_of_inc) @register_inplace() @local_optimizer([GpuAdvancedIncSubtensor1, GpuAdvancedIncSubtensor1_dev20]) def local_advincsub1_gpua_inplace(node): if isinstance(node.op, (GpuAdvancedIncSubtensor1, GpuAdvancedIncSubtensor1_dev20)): if not node.op.inplace: return [node.op.clone_inplace()(*node.inputs)] @register_opt('fast_compile') @op_lifter([tensor.CAReduce, tensor.Sum, tensor.elemwise.Prod]) def local_gpua_careduce(node, context_name): if isinstance(node.op.scalar_op, (scalar.Add, scalar.Mul, scalar.Maximum, scalar.Minimum)): ctx = get_context(context_name) if ctx.kind == b'opencl': op = GpuCAReduceCPY if node.op.scalar_op not in [scalar.add, scalar.mul]: # We don't support yet all reduction with cpy code. return elif ctx.kind == b'cuda': op = GpuCAReduceCuda else: return False x, = node.inputs greduce = op( node.op.scalar_op, axis=node.op.axis, dtype=getattr(node.op, 'dtype', None), acc_dtype=getattr(node.op, 'acc_dtype', None)) gvar = greduce(x) # We need to have the make node called, otherwise the mask can # be None if (op is GpuCAReduceCPY or gvar.owner.op.supports_c_code([ as_gpuarray_variable(x, context_name)])): return greduce else: # Try to make a simpler pattern based on reshaping # The principle is that if two adjacent dimensions have # the same value in the reduce_mask, then we can reshape # to make them a single dimension, do the reduction, and # then reshape to get them back. if node.op.axis is None: reduce_mask = [1] * x.type.ndim else: reduce_mask = [0] * x.type.ndim for a in node.op.axis: assert reduce_mask[a] == 0 reduce_mask[a] = 1 new_in_shp = [shape_i(x, 0)] new_mask = [reduce_mask[0]] for i in xrange(1, x.type.ndim): if reduce_mask[i] == reduce_mask[i - 1]: new_in_shp[-1] *= shape_i(x, i) else: new_mask.append(reduce_mask[i]) new_in_shp.append(shape_i(x, i)) new_axis = [] for idx, m in enumerate(new_mask): if m == 1: new_axis.append(idx) greduce = op( node.op.scalar_op, axis=new_axis, reduce_mask=new_mask, dtype=getattr(node.op, 'dtype', None), acc_dtype=getattr(node.op, 'acc_dtype', None)) reshaped_x = x.reshape(tensor.stack(new_in_shp)) gpu_reshaped_x = as_gpuarray_variable(reshaped_x, context_name) gvar = greduce(gpu_reshaped_x) # We need to have the make node called, otherwise the mask can # be None reshaped_gpu_inputs = [gpu_reshaped_x] if greduce.supports_c_code(reshaped_gpu_inputs): reduce_reshaped_x = host_from_gpu( greduce(gpu_reshaped_x)) if reduce_reshaped_x.ndim != node.outputs[0].ndim: out_shp = [] for i in range(x.ndim): if i not in node.op.axis: out_shp.append(shape_i(x, i)) unreshaped_reduce = reduce_reshaped_x.reshape( tensor.stack(out_shp)) else: unreshaped_reduce = reduce_reshaped_x return [unreshaped_reduce] @register_opt('fast_compile') @op_lifter([tensor.blas.Gemv, tensor.blas_c.CGemv]) def local_gpua_gemv(node, context_name): if node.op.inplace: return gpugemv_inplace else: return gpugemv_no_inplace @register_opt('fast_compile') @op_lifter([tensor.blas.Gemm]) def local_gpua_gemm(node, context_name): if node.op.inplace: return gpugemm_inplace else: return gpugemm_no_inplace @register_opt('fast_compile') @op_lifter([tensor.blas.BatchedDot]) def local_gpua_gemmbatch(node, context_name): a, b = node.inputs c = tensor.AllocEmpty(a.dtype)(a.shape[0], a.shape[1], b.shape[2]) return gpugemmbatch_no_inplace(c, 1.0, a, b, 0.0) @register_opt('fast_compile') @op_lifter([tensor.basic.Dot]) def local_gpua_hgemm(node, context_name): from theano.sandbox.cuda import nvcc_compiler if nvcc_compiler.nvcc_version < '7.5': _logger.warning("Not performing dot of float16 on the GPU since " "cuda 7.5 is not available. Updating could speed up " "your code.") return A = node.inputs[0] B = node.inputs[1] if (A.ndim == 2 and B.ndim == 2 and A.dtype == 'float16' and B.dtype == 'float16'): fgraph = node.inputs[0].fgraph C = GpuAllocEmpty(dtype='float16', context_name=context_name)( shape_i(A, 0, fgraph), shape_i(B, 1, fgraph)) return gpugemm_no_inplace(C, 1.0, A, B, 0.0) @register_opt() @alpha_merge(GpuGemm, alpha_in=1, beta_in=4) def local_gpuagemm_alpha_merge(node, *inputs): return [gpugemm_no_inplace(*inputs)] @register_opt() @output_merge(GpuGemm, alpha_in=1, beta_in=4, out_in=0) def local_gpuagemm_output_merge(node, *inputs): return [gpugemm_no_inplace(*inputs)] @register_opt() @alpha_merge(GpuGemmBatch, alpha_in=1, beta_in=4) def local_gpuagemmbatch_alpha_merge(node, *inputs): return [gpugemmbatch_no_inplace(*inputs)] @register_opt() @output_merge(GpuGemmBatch, alpha_in=1, beta_in=4, out_in=0) def local_gpuagemmbatch_output_merge(node, *inputs): return [gpugemmbatch_no_inplace(*inputs)] @register_opt('fast_compile') @op_lifter([tensor.blas.Ger, tensor.blas_c.CGer, tensor.blas_scipy.ScipyGer]) def local_gpua_ger(node, context_name): return GpuGer(inplace=node.op.destructive) @register_opt('fast_compile') @op_lifter([tensor.blas.Dot22]) def local_gpua_dot22(node, context_name): return gpu_dot22 @register_opt('fast_compile') @op_lifter([tensor.blas.Dot22Scalar]) def local_gpua_dot22scalar(node, context_name): x, y, a = node.inputs x = as_gpuarray_variable(x, context_name) y = as_gpuarray_variable(y, context_name) z = GpuAllocEmpty(x.dtype, context_name)(x.shape[0], y.shape[1]) return [gpugemm_no_inplace(z, a, x, y, 0)] @register_opt('fast_compile') @op_lifter([tensor.basic.Eye]) def local_gpua_eye(node, context_name): return GpuEye(dtype=node.op.dtype, context_name=context_name) @register_opt('fast_compile') @op_lifter([tensor.nnet.CrossentropySoftmaxArgmax1HotWithBias], cuda_only=True) def local_gpua_crossentropysoftmaxargmax1hotwithbias(node, context_name): return gpu_crossentropy_softmax_argmax_1hot_with_bias @register_opt('fast_compile') @op_lifter([tensor.nnet.CrossentropySoftmax1HotWithBiasDx], cuda_only=True) def local_gpua_crossentropysoftmax1hotwithbiasdx(node, context_name): return gpu_crossentropy_softmax_1hot_with_bias_dx @register_opt('fast_compile') @op_lifter([tensor.nnet.Softmax], cuda_only=True) def local_gpua_softmax(node, context_name): return gpu_softmax @register_opt('fast_compile') @op_lifter([tensor.nnet.SoftmaxWithBias], cuda_only=True) def local_gpua_softmaxwithbias(node, context_name): return gpu_softmax_with_bias @register_opt('fast_compile') @op_lifter([theano.tensor.opt.Assert]) def local_assert(node, context_name): # Check if input nodes are already on the GPU if isinstance(node.inputs[0].type, GpuArrayType): return return [host_from_gpu(node.op(as_gpuarray_variable(node.inputs[0], context_name), *node.inputs[1:]))] @register_opt('fast_compile') @op_lifter([ConvOp]) def local_error_convop(node, context_name): assert False, """ ConvOp does not work with the gpuarray backend. Use the new convolution interface to have GPU convolution working: theano.tensor.nnet.conv2d() """ @register_opt('fast_compile') @op_lifter([SparseBlockGemv]) def local_lift_sparseblockgemv(node, context_name): if node.op.inplace: return gpu_sparse_block_gemv_inplace else: return gpu_sparse_block_gemv @register_opt('fast_compile') @op_lifter([SparseBlockOuter]) def local_lift_sparseblockouter(node, context_name): if node.op.inplace: return gpu_sparse_block_outer_inplace else: return gpu_sparse_block_outer @register_inplace() @local_optimizer([GpuSparseBlockGemv], inplace=True) def local_inplace_sparseblockgemv(node): if isinstance(node.op, GpuSparseBlockGemv) and not node.op.inplace: return [gpu_sparse_block_gemv_inplace(*node.inputs)] @register_inplace() @local_optimizer([GpuSparseBlockOuter], inplace=True) def local_inplace_sparseblockouter(node): if isinstance(node.op, GpuSparseBlockOuter) and not node.op.inplace: return [GpuSparseBlockOuter(inplace=True)(*node.inputs)] # This deals with any abstract convs that have a transfer somewhere @register_opt('fast_compile') @op_lifter([AbstractConv2d, AbstractConv2d_gradWeights, AbstractConv2d_gradInputs]) def local_lift_abstractconv2d(node, context_name): if isinstance(node.outputs[0].type, GpuArrayType): # Don't handle this node here, it's already on the GPU. return inps = list(node.inputs) inps[0] = as_gpuarray_variable(node.inputs[0], context_name=context_name) inps[1] = as_gpuarray_variable(node.inputs[1], context_name=context_name) return [node.op(*inps)] # Register this here so that it goes after the abstract lifting register_opt('fast_compile')(conv_groupopt) @register_opt("low_memory") @local_optimizer([GpuCAReduceCuda]) def local_gpu_elemwise_careduce(node): """ Merge some GpuCAReduceCuda and GPUElemwise. """ if (isinstance(node.op, GpuCAReduceCuda) and node.op.pre_scalar_op is None and node.inputs[0].owner and isinstance(node.inputs[0].owner.op, GpuElemwise) and # The Op support all scalar with 1 inputs. We don't # automatically add more case, as some like trigonometic # operation with some reduction pattern will probably results # in slow down. isinstance(node.inputs[0].owner.op.scalar_op, scalar.basic.Sqr)): op = node.op inp = node.inputs[0].owner.inputs[0] return [GpuCAReduceCuda(scalar_op=op.scalar_op, axis=op.axis, reduce_mask=op.reduce_mask, pre_scalar_op=scalar.basic.sqr)(inp)] @local_optimizer(None) def local_assert_no_cpu_op(node): if (all([var.owner and isinstance(var.owner.op, HostFromGpu) for var in node.inputs]) and any([[c for c in var.clients if isinstance(c[0].op, GpuFromHost)] for var in node.outputs])): if config.assert_no_cpu_op == "warn": _logger.warning(("CPU Op %s is detected in the computation " "graph") % node) elif config.assert_no_cpu_op == "raise": raise AssertionError("The Op %s is on CPU." % node) elif config.assert_no_cpu_op == "pdb": pdb.set_trace() # Register the local_assert_no_cpu_op: assert_no_cpu_op = theano.tensor.opt.in2out(local_assert_no_cpu_op, name='assert_no_cpu_op') # 49.2 is after device specialization & fusion optimizations for last transfers optdb.register('gpua_assert_no_cpu_op', assert_no_cpu_op, 49.2, 'assert_no_cpu_op') def tensor_to_gpu(x, context_name): if isinstance(x.type, tensor.TensorType): y = GpuArrayType(broadcastable=x.type.broadcastable, context_name=context_name, dtype=x.type.dtype)() if x.name: y.name = x.name + '[Gpua]' return y else: return x def gpu_safe_new(x, tag=''): """ Internal function that constructs a new variable from x with the same type, but with a different name (old name + tag). This function is used by gradient, or the R-op to construct new variables for the inputs of the inner graph such that there is no interference between the original graph and the newly constructed graph. """ if hasattr(x, 'name') and x.name is not None: nw_name = x.name + tag else: nw_name = None if isinstance(x, theano.Constant): return x.clone() nw_x = x.type() nw_x.name = nw_name return nw_x def gpu_reconstruct_graph(inputs, outputs, tag=None): """ Different interface to clone, that allows you to pass inputs. Compared to clone, this method always replaces the inputs with new variables of the same type, and returns those (in the same order as the original inputs). """ if tag is None: tag = '' nw_inputs = [gpu_safe_new(x, tag) for x in inputs] givens = {} for nw_x, x in zip(nw_inputs, inputs): givens[x] = nw_x nw_outputs = scan_utils.clone(outputs, replace=givens) return (nw_inputs, nw_outputs) @register_opt('scan', 'fast_compile') @op_lifter([scan_op.Scan]) def local_scan_to_gpua(node, context_name): info = copy.deepcopy(node.op.info) if info.get('gpua', False): return info['gpua'] = True nw_ins = [node.inputs[0]] e = (1 + node.op.n_seqs + node.op.n_mit_mot + node.op.n_mit_sot + node.op.n_sit_sot + node.op.n_shared_outs) nw_ins += [safe_to_gpu(x, context_name) for x in node.inputs[1:e]] b = e e = e + node.op.n_nit_sot nw_ins += node.inputs[b:e] nw_ins += [safe_to_gpu(x, context_name) for x in node.inputs[e:]] scan_ins = [tensor_to_gpu(x, context_name) for x in node.op.inputs] # The inner output corresponding to the looping condition should not be # moved to the gpu if node.op.info['as_while']: scan_outs = [safe_to_gpu(x, context_name) for x in node.op.outputs[:-1]] scan_outs += [node.op.outputs[-1]] else: scan_outs = [safe_to_gpu(x, context_name) for x in node.op.outputs] scan_outs = scan_utils.clone( scan_outs, replace=list(zip(node.op.inputs, (safe_to_cpu(x) for x in scan_ins)))) # We need to construct the hash here, because scan # __init__ does not know about the gpu and can not # handle graphs with inputs being on the gpu tmp_in, tmp_out = gpu_reconstruct_graph(scan_ins, scan_outs) local_fgraph = gof.FunctionGraph(tmp_in, tmp_out, clone=True) _cmodule_key = gof.CLinker().cmodule_key_(local_fgraph, []) info['gpu_hash'] = hash(_cmodule_key) def typebuild(dtype, broadcastable, context_name=context_name): return GpuArrayType(dtype=dtype, broadcastable=broadcastable, context_name=context_name) nw_op = scan_op.Scan(scan_ins, scan_outs, info, typeConstructor=typebuild).make_node(*nw_ins) return nw_op.outputs def _scan_type_infer(node): context_name = infer_context_name(*node.inputs) def typebuild(dtype, broadcastable, context_name=context_name): return GpuArrayType(dtype=dtype, broadcastable=broadcastable, context_name=context_name) return typebuild # Do not register in fast_run or fast_compile. # It will be added to fast_run if the GPU is enabled. optdb.register('gpua_scanOp_make_inplace', scan_opt.ScanInplaceOptimizer(typeInfer=_scan_type_infer, gpua_flag=True), 75, 'gpuarray', 'inplace', 'scan')
bsd-3-clause
-5,803,320,694,378,629,000
34.113274
87
0.599526
false
3.479
false
false
false
subins2000/TorrentBro
torrentbro/lib/tpb/constants.py
1
3066
import sys if sys.version_info >= (3, 0): class_type = type else: from new import classobj class_type = classobj class ConstantType(type): """ Tree representation metaclass for class attributes. Metaclass is extended to all child classes too. """ def __new__(cls, clsname, bases, dct): """ Extend metaclass to all class attributes too. """ attrs = {} for name, attr in dct.items(): if isinstance(attr, class_type): # substitute attr with a new class with Constants as # metaclass making it possible to spread this same method # to all child classes attr = ConstantType( attr.__name__, attr.__bases__, attr.__dict__) attrs[name] = attr return super(ConstantType, cls).__new__(cls, clsname, bases, attrs) def __repr__(cls): """ Tree representation of class attributes. Child classes are also represented. """ # dump current class name tree = cls.__name__ + ':\n' for name in dir(cls): if not name.startswith('_'): attr = getattr(cls, name) output = repr(attr) if not isinstance(attr, ConstantType): output = '{}: {}'.format(name, output) # indent all child attrs tree += '\n'.join([' ' * 4 + line for line in output.splitlines()]) + '\n' return tree def __str__(cls): return repr(cls) Constants = ConstantType('Constants', (object,), {}) class ORDERS(Constants): class NAME: DES = 1 ASC = 2 class UPLOADED: DES = 3 ASC = 4 class SIZE: DES = 5 ASC = 6 class SEEDERS: DES = 7 ASC = 8 class LEECHERS: DES = 9 ASC = 10 class UPLOADER: DES = 11 ASC = 12 class TYPE: DES = 13 ASC = 14 class CATEGORIES(Constants): ALL = 0 class AUDIO: ALL = 100 MUSIC = 101 AUDIO_BOOKS = 102 SOUND_CLIPS = 103 FLAC = 104 OTHER = 199 class VIDEO: ALL = 200 MOVIES = 201 MOVIES_DVDR = 202 MUSIC_VIDEOS = 203 MOVIE_CLIPS = 204 TV_SHOWS = 205 HANDHELD = 206 HD_MOVIES = 207 HD_TV_SHOWS = 208 THREE_DIMENSIONS = 209 OTHER = 299 class APPLICATIONS: ALL = 300 WINDOWS = 301 MAC = 302 UNIX = 303 HANDHELD = 304 IOS = 305 ANDROID = 306 OTHER = 399 class GAMES: ALL = 400 PC = 401 MAC = 402 PSX = 403 XBOX360 = 404 WII = 405 HANDHELD = 406 IOS = 407 ANDROID = 408 OTHER = 499 class OTHER: EBOOKS = 601 COMICS = 602 PICTURES = 603 COVERS = 604 PHYSIBLES = 605 OTHER = 699
gpl-3.0
-6,296,941,637,029,682,000
21.217391
80
0.483366
false
4.093458
false
false
false
wikimedia/user_metrics
user_metrics/api/run.py
1
4196
#!/usr/bin/python # -*- coding: utf-8 -*- """ This module defines the entry point for flask_ web server implementation of the Wikimedia User Metrics API. This module is consumable by the Apache web server via WSGI interface via mod_wsgi. An Apache server can be pointed to api.wsgi such that Apache may be used as a wrapper in this way. .. _flask: http://flask.pocoo.org Cohort Data ^^^^^^^^^^^ Cohort data is maintained in the host s1-analytics-slave.eqiad.wmnet under the `staging` database in the `usertags` and `usertags_meta` tables: :: +---------+-----------------+------+-----+---------+-------+ | Field | Type | Null | Key | Default | Extra | +---------+-----------------+------+-----+---------+-------+ | ut_user | int(5) unsigned | NO | PRI | NULL | | | ut_tag | int(4) unsigned | NO | PRI | NULL | | +---------+-----------------+------+-----+---------+-------+ +-------------+-----------------+------+-----+---------+ | Field | Type | Null | Key | Default | +-------------+-----------------+------+-----+---------+ | utm_id | int(5) unsigned | NO | PRI | NULL | | utm_name | varchar(255) | NO | | | | utm_notes | varchar(255) | YES | | NULL | | utm_touched | datetime | YES | | NULL | +-------------+-----------------+------+-----+---------+ """ __author__ = { "dario taraborelli": "[email protected]", "ryan faulkner": "[email protected]" } __date__ = "2012-12-21" __license__ = "GPL (version 2 or later)" import multiprocessing as mp from user_metrics.config import logging, settings from user_metrics.api.engine.request_manager import job_control, \ requests_notification_callback from user_metrics.api.engine.response_handler import process_responses from user_metrics.api.views import app from user_metrics.api.engine.request_manager import api_request_queue, \ req_notification_queue_out, req_notification_queue_in, api_response_queue from user_metrics.utils import terminate_process_with_checks job_controller_proc = None response_controller_proc = None rm_callback_proc = None ###### # # Define Custom Classes # ####### def teardown(): """ When the instance is deleted store the pickled data and shutdown the job controller """ # Try to shutdown the job control proc gracefully try: terminate_process_with_checks(job_controller_proc) terminate_process_with_checks(response_controller_proc) terminate_process_with_checks(rm_callback_proc) except Exception: logging.error(__name__ + ' :: Could not shut down callbacks.') def setup_controller(req_queue, res_queue, msg_queue_in, msg_queue_out): """ Sets up the process that handles API jobs """ job_controller_proc = mp.Process(target=job_control, args=(req_queue, res_queue)) response_controller_proc = mp.Process(target=process_responses, args=(res_queue, msg_queue_in)) rm_callback_proc = mp.Process(target=requests_notification_callback, args=(msg_queue_in, msg_queue_out)) job_controller_proc.start() response_controller_proc.start() rm_callback_proc.start() ###### # # Execution # ####### # initialize API data - get the instance setup_controller(api_request_queue, api_response_queue, req_notification_queue_in, req_notification_queue_out) app.config['SECRET_KEY'] = settings.__secret_key__ # With the presence of flask.ext.login module if settings.__flask_login_exists__: from user_metrics.api.session import login_manager login_manager.setup_app(app) if __name__ == '__main__': try: app.run(debug=True, use_reloader=False, host=settings.__instance_host__, port=settings.__instance_port__,) finally: teardown()
bsd-3-clause
5,606,985,563,411,942,000
32.568
78
0.547188
false
3.958491
false
false
false
benosteen/mypaint
gui/brushcreationwidget.py
1
9333
# This file is part of MyPaint. # Copyright (C) 2009 by Martin Renold <[email protected]> # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. import os import gtk gdk = gtk.gdk from lib import document import tileddrawwidget, brushmanager, dialogs from gettext import gettext as _ def startfile(path): import os import platform if platform.system == 'Windows': os.startfile(path) else: os.system("xdg-open " + path) def stock_button(stock_id): b = gtk.Button() img = gtk.Image() img.set_from_stock(stock_id, gtk.ICON_SIZE_MENU) b.add(img) return b class BrushManipulationWidget(gtk.HBox): """ """ def __init__(self, app, brushicon_editor): gtk.HBox.__init__(self) self.app = app self.bm = app.brushmanager self.brushicon_editor = brushicon_editor self.init_widgets() self.bm.selected_brush_observers.append(self.brush_selected_cb) def init_widgets(self): l = self.brush_name_label = gtk.Label() l.set_text(_('(unnamed brush)')) self.pack_start(l, expand=True) right_vbox_buttons = [ (gtk.STOCK_SAVE, self.update_settings_cb, _('Save Settings')), (gtk.STOCK_ADD, self.create_brush_cb, _('Add As New')), (gtk.STOCK_PROPERTIES, self.edit_brush_cb, _('Edit Brush Icon')), (gtk.STOCK_EDIT, self.rename_brush_cb, _('Rename...')), (gtk.STOCK_DELETE, self.delete_brush_cb, _('Remove...')), ] for stock_id, clicked_cb, tooltip in reversed(right_vbox_buttons): b = stock_button(stock_id) b.connect('clicked', clicked_cb) b.set_tooltip_text(tooltip) self.pack_end(b, expand=False) def brush_selected_cb(self, managed_brush, brushinfo): name = managed_brush.name if name is None: name = _('(unnamed brush)') else: name = name.replace('_', ' ') # XXX safename/unsafename utils? self.brush_name_label.set_text(name) def edit_brush_cb(self, window): self.edit_brush_properties_cb() def create_brush_cb(self, window): """Create and save a new brush based on the current working brush.""" b = brushmanager.ManagedBrush(self.bm) b.brushinfo = self.app.brush.clone() b.brushinfo.set_string_property("parent_brush_name", None) #avoid mis-hilight b.preview = self.brushicon_editor.get_preview_pixbuf() b.save() if self.bm.active_groups: group = self.bm.active_groups[0] else: group = brushmanager.DEFAULT_BRUSH_GROUP brushes = self.bm.get_group_brushes(group, make_active=True) brushes.insert(0, b) b.persistent = True # Brush was saved b.in_brushlist = True for f in self.bm.brushes_observers: f(brushes) self.bm.select_brush(b) # Pretend that the active app.brush is a child of the new one, for the # sake of the strokemap and strokes drawn immediately after. self.app.brush.set_string_property("parent_brush_name", b.name) def rename_brush_cb(self, window): src_brush = self.bm.selected_brush if not src_brush.name: dialogs.error(self, _('No brush selected!')) return dst_name = dialogs.ask_for_name(self, _("Rename Brush"), src_brush.name.replace('_', ' ')) if not dst_name: return dst_name = dst_name.replace(' ', '_') # ensure we don't overwrite an existing brush by accident dst_deleted = None for group, brushes in self.bm.groups.iteritems(): for b2 in brushes: if b2.name == dst_name: if group == brushmanager.DELETED_BRUSH_GROUP: dst_deleted = b2 else: dialogs.error(self, _('A brush with this name already exists!')) return print 'renaming brush', repr(src_brush.name), '-->', repr(dst_name) if dst_deleted: deleted_brushes = self.bm.get_group_brushes(brushmanager.DELETED_BRUSH_GROUP) deleted_brushes.remove(dst_deleted) for f in self.bm.brushes_observers: f(deleted_brushes) # save src as dst src_name = src_brush.name src_brush.name = dst_name src_brush.save() src_brush.name = src_name # load dst dst_brush = brushmanager.ManagedBrush(self.bm, dst_name, persistent=True) dst_brush.load() dst_brush.in_brushlist = True # replace src with dst (but keep src in the deleted list if it is a stock brush) self.delete_brush_internal(src_brush, replacement=dst_brush) self.bm.select_brush(dst_brush) def update_settings_cb(self, window): b = self.bm.selected_brush if not b.name: dialogs.error(self, _('No brush selected, please use "Add As New" instead.')) return b.brushinfo = self.app.brush.clone() b.save() def delete_brush_cb(self, window): b = self.bm.selected_brush if not b.name: dialogs.error(self, _('No brush selected!')) return if not dialogs.confirm(self, _("Really delete brush from disk?")): return self.bm.select_brush(None) self.delete_brush_internal(b) def delete_brush_internal(self, b, replacement=None): for brushes in self.bm.groups.itervalues(): if b in brushes: idx = brushes.index(b) if replacement: brushes[idx] = replacement else: del brushes[idx] for f in self.bm.brushes_observers: f(brushes) assert b not in brushes, 'Brush exists multiple times in the same group!' if not b.delete_from_disk(): # stock brush can't be deleted deleted_brushes = self.bm.get_group_brushes(brushmanager.DELETED_BRUSH_GROUP) deleted_brushes.insert(0, b) for f in self.bm.brushes_observers: f(deleted_brushes) class BrushIconEditorWidget(gtk.VBox): def __init__(self, app): gtk.VBox.__init__(self) self.app = app self.bm = app.brushmanager self.set_border_width(8) self.init_widgets() self.bm.selected_brush_observers.append(self.brush_selected_cb) self.set_brush_preview_edit_mode(False) def init_widgets(self): button_box = gtk.HBox() doc = document.Document(self.app.brush) self.tdw = tileddrawwidget.TiledDrawWidget(self.app, doc) self.tdw.set_size_request(brushmanager.preview_w*2, brushmanager.preview_h*2) self.tdw.scale = 2.0 tdw_box = gtk.HBox() tdw_box.pack_start(self.tdw, expand=False, fill=False) tdw_box.pack_start(gtk.Label(), expand=True) self.pack_start(tdw_box, expand=False, fill=False, padding=3) self.pack_start(button_box, expand=False, fill=False, padding=3) self.brush_preview_edit_mode_button = b = gtk.CheckButton(_('Edit')) b.connect('toggled', self.brush_preview_edit_mode_cb) button_box.pack_start(b, expand=False, padding=3) self.brush_preview_clear_button = b = gtk.Button(_('Clear')) def clear_cb(window): self.tdw.doc.clear_layer() b.connect('clicked', clear_cb) button_box.pack_start(b, expand=False, padding=3) self.brush_preview_save_button = b = gtk.Button(_('Save')) b.connect('clicked', self.update_preview_cb) button_box.pack_start(b, expand=False, padding=3) def brush_preview_edit_mode_cb(self, button): self.set_brush_preview_edit_mode(button.get_active()) def set_brush_preview_edit_mode(self, edit_mode): self.brush_preview_edit_mode = edit_mode self.brush_preview_edit_mode_button.set_active(edit_mode) self.brush_preview_save_button.set_sensitive(edit_mode) self.brush_preview_clear_button.set_sensitive(edit_mode) self.tdw.set_sensitive(edit_mode) def set_preview_pixbuf(self, pixbuf): if pixbuf is None: self.tdw.doc.clear() else: self.tdw.doc.load_from_pixbuf(pixbuf) def get_preview_pixbuf(self): pixbuf = self.tdw.doc.render_as_pixbuf(0, 0, brushmanager.preview_w, brushmanager.preview_h) return pixbuf def update_preview_cb(self, window): pixbuf = self.get_preview_pixbuf() b = self.bm.selected_brush if not b.name: dialogs.error(self, _('No brush selected, please use "Add As New" instead.')) return b.preview = pixbuf b.save() for brushes in self.bm.groups.itervalues(): if b in brushes: for f in self.bm.brushes_observers: f(brushes) def brush_selected_cb(self, managed_brush, brushinfo): # Update brush icon preview if it is not in edit mode if not self.brush_preview_edit_mode: self.set_preview_pixbuf(managed_brush.preview)
gpl-2.0
4,452,946,291,741,899,300
35.457031
100
0.605807
false
3.571757
false
false
false
dietrichc/streamline-ppc-reports
examples/dfp/v201405/label_service/get_labels_by_statement.py
1
1743
#!/usr/bin/python # # Copyright 2014 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """This code example gets all labels ordered by name. To create a label, run create_label.py. This feature is only available to DFP premium solution networks. """ __author__ = ('Nicholas Chen', 'Joseph DiLallo') # Import appropriate modules from the client library. from googleads import dfp def main(client): # Initialize appropriate service. label_service = client.GetService('LabelService', version='v201405') # Create statement to get all labels statement = dfp.FilterStatement('ORDER BY name') # Get labels by statement. while True: response = label_service.getLabelsByStatement(statement.ToStatement()) if 'results' in response: # Display results. for label in response['results']: print ('Label with id \'%s\' and name \'%s\' was found.' % (label['id'], label['name'])) statement.offset += dfp.SUGGESTED_PAGE_LIMIT else: break print '\nNumber of results found: %s' % response['totalResultSetSize'] if __name__ == '__main__': # Initialize client object. dfp_client = dfp.DfpClient.LoadFromStorage() main(dfp_client)
apache-2.0
-264,873,779,856,759,300
30.690909
77
0.703385
false
3.908072
false
false
false
ryfeus/lambda-packs
Pandas_numpy/source/numpy/core/_internal.py
3
21639
""" A place for code to be called from core C-code. Some things are more easily handled Python. """ from __future__ import division, absolute_import, print_function import re import sys from numpy.compat import basestring from .multiarray import dtype, array, ndarray try: import ctypes except ImportError: ctypes = None from .numerictypes import object_ if (sys.byteorder == 'little'): _nbo = b'<' else: _nbo = b'>' def _makenames_list(adict, align): allfields = [] fnames = list(adict.keys()) for fname in fnames: obj = adict[fname] n = len(obj) if not isinstance(obj, tuple) or n not in [2, 3]: raise ValueError("entry not a 2- or 3- tuple") if (n > 2) and (obj[2] == fname): continue num = int(obj[1]) if (num < 0): raise ValueError("invalid offset.") format = dtype(obj[0], align=align) if (n > 2): title = obj[2] else: title = None allfields.append((fname, format, num, title)) # sort by offsets allfields.sort(key=lambda x: x[2]) names = [x[0] for x in allfields] formats = [x[1] for x in allfields] offsets = [x[2] for x in allfields] titles = [x[3] for x in allfields] return names, formats, offsets, titles # Called in PyArray_DescrConverter function when # a dictionary without "names" and "formats" # fields is used as a data-type descriptor. def _usefields(adict, align): try: names = adict[-1] except KeyError: names = None if names is None: names, formats, offsets, titles = _makenames_list(adict, align) else: formats = [] offsets = [] titles = [] for name in names: res = adict[name] formats.append(res[0]) offsets.append(res[1]) if (len(res) > 2): titles.append(res[2]) else: titles.append(None) return dtype({"names": names, "formats": formats, "offsets": offsets, "titles": titles}, align) # construct an array_protocol descriptor list # from the fields attribute of a descriptor # This calls itself recursively but should eventually hit # a descriptor that has no fields and then return # a simple typestring def _array_descr(descriptor): fields = descriptor.fields if fields is None: subdtype = descriptor.subdtype if subdtype is None: if descriptor.metadata is None: return descriptor.str else: new = descriptor.metadata.copy() if new: return (descriptor.str, new) else: return descriptor.str else: return (_array_descr(subdtype[0]), subdtype[1]) names = descriptor.names ordered_fields = [fields[x] + (x,) for x in names] result = [] offset = 0 for field in ordered_fields: if field[1] > offset: num = field[1] - offset result.append(('', '|V%d' % num)) offset += num if len(field) > 3: name = (field[2], field[3]) else: name = field[2] if field[0].subdtype: tup = (name, _array_descr(field[0].subdtype[0]), field[0].subdtype[1]) else: tup = (name, _array_descr(field[0])) offset += field[0].itemsize result.append(tup) if descriptor.itemsize > offset: num = descriptor.itemsize - offset result.append(('', '|V%d' % num)) return result # Build a new array from the information in a pickle. # Note that the name numpy.core._internal._reconstruct is embedded in # pickles of ndarrays made with NumPy before release 1.0 # so don't remove the name here, or you'll # break backward compatibility. def _reconstruct(subtype, shape, dtype): return ndarray.__new__(subtype, shape, dtype) # format_re was originally from numarray by J. Todd Miller format_re = re.compile(br'(?P<order1>[<>|=]?)' br'(?P<repeats> *[(]?[ ,0-9L]*[)]? *)' br'(?P<order2>[<>|=]?)' br'(?P<dtype>[A-Za-z0-9.?]*(?:\[[a-zA-Z0-9,.]+\])?)') sep_re = re.compile(br'\s*,\s*') space_re = re.compile(br'\s+$') # astr is a string (perhaps comma separated) _convorder = {b'=': _nbo} def _commastring(astr): startindex = 0 result = [] while startindex < len(astr): mo = format_re.match(astr, pos=startindex) try: (order1, repeats, order2, dtype) = mo.groups() except (TypeError, AttributeError): raise ValueError('format number %d of "%s" is not recognized' % (len(result)+1, astr)) startindex = mo.end() # Separator or ending padding if startindex < len(astr): if space_re.match(astr, pos=startindex): startindex = len(astr) else: mo = sep_re.match(astr, pos=startindex) if not mo: raise ValueError( 'format number %d of "%s" is not recognized' % (len(result)+1, astr)) startindex = mo.end() if order2 == b'': order = order1 elif order1 == b'': order = order2 else: order1 = _convorder.get(order1, order1) order2 = _convorder.get(order2, order2) if (order1 != order2): raise ValueError( 'inconsistent byte-order specification %s and %s' % (order1, order2)) order = order1 if order in [b'|', b'=', _nbo]: order = b'' dtype = order + dtype if (repeats == b''): newitem = dtype else: newitem = (dtype, eval(repeats)) result.append(newitem) return result class dummy_ctype(object): def __init__(self, cls): self._cls = cls def __mul__(self, other): return self def __call__(self, *other): return self._cls(other) def __eq__(self, other): return self._cls == other._cls def __ne__(self, other): return self._cls != other._cls def _getintp_ctype(): val = _getintp_ctype.cache if val is not None: return val if ctypes is None: import numpy as np val = dummy_ctype(np.intp) else: char = dtype('p').char if (char == 'i'): val = ctypes.c_int elif char == 'l': val = ctypes.c_long elif char == 'q': val = ctypes.c_longlong else: val = ctypes.c_long _getintp_ctype.cache = val return val _getintp_ctype.cache = None # Used for .ctypes attribute of ndarray class _missing_ctypes(object): def cast(self, num, obj): return num def c_void_p(self, num): return num class _ctypes(object): def __init__(self, array, ptr=None): if ctypes: self._ctypes = ctypes else: self._ctypes = _missing_ctypes() self._arr = array self._data = ptr if self._arr.ndim == 0: self._zerod = True else: self._zerod = False def data_as(self, obj): return self._ctypes.cast(self._data, obj) def shape_as(self, obj): if self._zerod: return None return (obj*self._arr.ndim)(*self._arr.shape) def strides_as(self, obj): if self._zerod: return None return (obj*self._arr.ndim)(*self._arr.strides) def get_data(self): return self._data def get_shape(self): return self.shape_as(_getintp_ctype()) def get_strides(self): return self.strides_as(_getintp_ctype()) def get_as_parameter(self): return self._ctypes.c_void_p(self._data) data = property(get_data, None, doc="c-types data") shape = property(get_shape, None, doc="c-types shape") strides = property(get_strides, None, doc="c-types strides") _as_parameter_ = property(get_as_parameter, None, doc="_as parameter_") def _newnames(datatype, order): """ Given a datatype and an order object, return a new names tuple, with the order indicated """ oldnames = datatype.names nameslist = list(oldnames) if isinstance(order, str): order = [order] seen = set() if isinstance(order, (list, tuple)): for name in order: try: nameslist.remove(name) except ValueError: if name in seen: raise ValueError("duplicate field name: %s" % (name,)) else: raise ValueError("unknown field name: %s" % (name,)) seen.add(name) return tuple(list(order) + nameslist) raise ValueError("unsupported order value: %s" % (order,)) def _copy_fields(ary): """Return copy of structured array with padding between fields removed. Parameters ---------- ary : ndarray Structured array from which to remove padding bytes Returns ------- ary_copy : ndarray Copy of ary with padding bytes removed """ dt = ary.dtype copy_dtype = {'names': dt.names, 'formats': [dt.fields[name][0] for name in dt.names]} return array(ary, dtype=copy_dtype, copy=True) def _getfield_is_safe(oldtype, newtype, offset): """ Checks safety of getfield for object arrays. As in _view_is_safe, we need to check that memory containing objects is not reinterpreted as a non-object datatype and vice versa. Parameters ---------- oldtype : data-type Data type of the original ndarray. newtype : data-type Data type of the field being accessed by ndarray.getfield offset : int Offset of the field being accessed by ndarray.getfield Raises ------ TypeError If the field access is invalid """ if newtype.hasobject or oldtype.hasobject: if offset == 0 and newtype == oldtype: return if oldtype.names: for name in oldtype.names: if (oldtype.fields[name][1] == offset and oldtype.fields[name][0] == newtype): return raise TypeError("Cannot get/set field of an object array") return def _view_is_safe(oldtype, newtype): """ Checks safety of a view involving object arrays, for example when doing:: np.zeros(10, dtype=oldtype).view(newtype) Parameters ---------- oldtype : data-type Data type of original ndarray newtype : data-type Data type of the view Raises ------ TypeError If the new type is incompatible with the old type. """ # if the types are equivalent, there is no problem. # for example: dtype((np.record, 'i4,i4')) == dtype((np.void, 'i4,i4')) if oldtype == newtype: return if newtype.hasobject or oldtype.hasobject: raise TypeError("Cannot change data-type for object array.") return # Given a string containing a PEP 3118 format specifier, # construct a NumPy dtype _pep3118_native_map = { '?': '?', 'c': 'S1', 'b': 'b', 'B': 'B', 'h': 'h', 'H': 'H', 'i': 'i', 'I': 'I', 'l': 'l', 'L': 'L', 'q': 'q', 'Q': 'Q', 'e': 'e', 'f': 'f', 'd': 'd', 'g': 'g', 'Zf': 'F', 'Zd': 'D', 'Zg': 'G', 's': 'S', 'w': 'U', 'O': 'O', 'x': 'V', # padding } _pep3118_native_typechars = ''.join(_pep3118_native_map.keys()) _pep3118_standard_map = { '?': '?', 'c': 'S1', 'b': 'b', 'B': 'B', 'h': 'i2', 'H': 'u2', 'i': 'i4', 'I': 'u4', 'l': 'i4', 'L': 'u4', 'q': 'i8', 'Q': 'u8', 'e': 'f2', 'f': 'f', 'd': 'd', 'Zf': 'F', 'Zd': 'D', 's': 'S', 'w': 'U', 'O': 'O', 'x': 'V', # padding } _pep3118_standard_typechars = ''.join(_pep3118_standard_map.keys()) def _dtype_from_pep3118(spec): class Stream(object): def __init__(self, s): self.s = s self.byteorder = '@' def advance(self, n): res = self.s[:n] self.s = self.s[n:] return res def consume(self, c): if self.s[:len(c)] == c: self.advance(len(c)) return True return False def consume_until(self, c): if callable(c): i = 0 while i < len(self.s) and not c(self.s[i]): i = i + 1 return self.advance(i) else: i = self.s.index(c) res = self.advance(i) self.advance(len(c)) return res @property def next(self): return self.s[0] def __bool__(self): return bool(self.s) __nonzero__ = __bool__ stream = Stream(spec) dtype, align = __dtype_from_pep3118(stream, is_subdtype=False) return dtype def __dtype_from_pep3118(stream, is_subdtype): field_spec = dict( names=[], formats=[], offsets=[], itemsize=0 ) offset = 0 common_alignment = 1 is_padding = False # Parse spec while stream: value = None # End of structure, bail out to upper level if stream.consume('}'): break # Sub-arrays (1) shape = None if stream.consume('('): shape = stream.consume_until(')') shape = tuple(map(int, shape.split(','))) # Byte order if stream.next in ('@', '=', '<', '>', '^', '!'): byteorder = stream.advance(1) if byteorder == '!': byteorder = '>' stream.byteorder = byteorder # Byte order characters also control native vs. standard type sizes if stream.byteorder in ('@', '^'): type_map = _pep3118_native_map type_map_chars = _pep3118_native_typechars else: type_map = _pep3118_standard_map type_map_chars = _pep3118_standard_typechars # Item sizes itemsize_str = stream.consume_until(lambda c: not c.isdigit()) if itemsize_str: itemsize = int(itemsize_str) else: itemsize = 1 # Data types is_padding = False if stream.consume('T{'): value, align = __dtype_from_pep3118( stream, is_subdtype=True) elif stream.next in type_map_chars: if stream.next == 'Z': typechar = stream.advance(2) else: typechar = stream.advance(1) is_padding = (typechar == 'x') dtypechar = type_map[typechar] if dtypechar in 'USV': dtypechar += '%d' % itemsize itemsize = 1 numpy_byteorder = {'@': '=', '^': '='}.get( stream.byteorder, stream.byteorder) value = dtype(numpy_byteorder + dtypechar) align = value.alignment else: raise ValueError("Unknown PEP 3118 data type specifier %r" % stream.s) # # Native alignment may require padding # # Here we assume that the presence of a '@' character implicitly implies # that the start of the array is *already* aligned. # extra_offset = 0 if stream.byteorder == '@': start_padding = (-offset) % align intra_padding = (-value.itemsize) % align offset += start_padding if intra_padding != 0: if itemsize > 1 or (shape is not None and _prod(shape) > 1): # Inject internal padding to the end of the sub-item value = _add_trailing_padding(value, intra_padding) else: # We can postpone the injection of internal padding, # as the item appears at most once extra_offset += intra_padding # Update common alignment common_alignment = _lcm(align, common_alignment) # Convert itemsize to sub-array if itemsize != 1: value = dtype((value, (itemsize,))) # Sub-arrays (2) if shape is not None: value = dtype((value, shape)) # Field name if stream.consume(':'): name = stream.consume_until(':') else: name = None if not (is_padding and name is None): if name is not None and name in field_spec['names']: raise RuntimeError("Duplicate field name '%s' in PEP3118 format" % name) field_spec['names'].append(name) field_spec['formats'].append(value) field_spec['offsets'].append(offset) offset += value.itemsize offset += extra_offset field_spec['itemsize'] = offset # extra final padding for aligned types if stream.byteorder == '@': field_spec['itemsize'] += (-offset) % common_alignment # Check if this was a simple 1-item type, and unwrap it if (field_spec['names'] == [None] and field_spec['offsets'][0] == 0 and field_spec['itemsize'] == field_spec['formats'][0].itemsize and not is_subdtype): ret = field_spec['formats'][0] else: _fix_names(field_spec) ret = dtype(field_spec) # Finished return ret, common_alignment def _fix_names(field_spec): """ Replace names which are None with the next unused f%d name """ names = field_spec['names'] for i, name in enumerate(names): if name is not None: continue j = 0 while True: name = 'f{}'.format(j) if name not in names: break j = j + 1 names[i] = name def _add_trailing_padding(value, padding): """Inject the specified number of padding bytes at the end of a dtype""" if value.fields is None: field_spec = dict( names=['f0'], formats=[value], offsets=[0], itemsize=value.itemsize ) else: fields = value.fields names = value.names field_spec = dict( names=names, formats=[fields[name][0] for name in names], offsets=[fields[name][1] for name in names], itemsize=value.itemsize ) field_spec['itemsize'] += padding return dtype(field_spec) def _prod(a): p = 1 for x in a: p *= x return p def _gcd(a, b): """Calculate the greatest common divisor of a and b""" while b: a, b = b, a % b return a def _lcm(a, b): return a // _gcd(a, b) * b # Exception used in shares_memory() class TooHardError(RuntimeError): pass class AxisError(ValueError, IndexError): """ Axis supplied was invalid. """ def __init__(self, axis, ndim=None, msg_prefix=None): # single-argument form just delegates to base class if ndim is None and msg_prefix is None: msg = axis # do the string formatting here, to save work in the C code else: msg = ("axis {} is out of bounds for array of dimension {}" .format(axis, ndim)) if msg_prefix is not None: msg = "{}: {}".format(msg_prefix, msg) super(AxisError, self).__init__(msg) def array_ufunc_errmsg_formatter(dummy, ufunc, method, *inputs, **kwargs): """ Format the error message for when __array_ufunc__ gives up. """ args_string = ', '.join(['{!r}'.format(arg) for arg in inputs] + ['{}={!r}'.format(k, v) for k, v in kwargs.items()]) args = inputs + kwargs.get('out', ()) types_string = ', '.join(repr(type(arg).__name__) for arg in args) return ('operand type(s) all returned NotImplemented from ' '__array_ufunc__({!r}, {!r}, {}): {}' .format(ufunc, method, args_string, types_string)) def _ufunc_doc_signature_formatter(ufunc): """ Builds a signature string which resembles PEP 457 This is used to construct the first line of the docstring """ # input arguments are simple if ufunc.nin == 1: in_args = 'x' else: in_args = ', '.join('x{}'.format(i+1) for i in range(ufunc.nin)) # output arguments are both keyword or positional if ufunc.nout == 0: out_args = ', /, out=()' elif ufunc.nout == 1: out_args = ', /, out=None' else: out_args = '[, {positional}], / [, out={default}]'.format( positional=', '.join( 'out{}'.format(i+1) for i in range(ufunc.nout)), default=repr((None,)*ufunc.nout) ) # keyword only args depend on whether this is a gufunc kwargs = ( ", casting='same_kind'" ", order='K'" ", dtype=None" ", subok=True" "[, signature" ", extobj]" ) if ufunc.signature is None: kwargs = ", where=True" + kwargs # join all the parts together return '{name}({in_args}{out_args}, *{kwargs})'.format( name=ufunc.__name__, in_args=in_args, out_args=out_args, kwargs=kwargs )
mit
-6,998,548,371,938,173,000
27.698939
82
0.526411
false
3.833304
false
false
false
Taifxx/xxtrep
context.addtolib/resources/lib/ext/base/tags.py
1
15746
# -*- coding: utf-8 -*- # # Copyright (C) 2011-2014 Martijn Kaijser # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ########## DEFINE TAGS: #### System param's ... ### Library folder name ... TAG_PAR_LIB_FOLDER = 'LIB' ### TMP folders names ... TAG_PAR_TMP = 'TMP' TAG_PAR_TMPA = 'TMPA' ### Addon ... TAG_PAR_SCRIPT_ID = 'context.addtolib' TAG_PAR_SERVICE_PY = 'service.py' TAG_PAR_ADDON_PY = 'context.py' TAG_PAR_COLORS_FILE = 'colors' ### Addon folders ... TAG_PAR_RESFOLDER = 'resources' TAG_PAR_BSFOLDER = 'bs' TAG_PAR_SKINSFOLDER = [TAG_PAR_RESFOLDER,'skins'] TAG_PAR_SPLASH_FILE = [TAG_PAR_RESFOLDER, TAG_PAR_BSFOLDER, 'splash.mp4'] ### RunScript's ... TAG_PAR_SERVICE = 'special://home/addons/%s/%s' % (TAG_PAR_SCRIPT_ID, TAG_PAR_SERVICE_PY) TAG_PAR_ADDON = 'special://home/addons/%s/%s' % (TAG_PAR_SCRIPT_ID, TAG_PAR_ADDON_PY) ### Strinsg XML (as default) ... TAG_PAR_STRINGSXML_PATH = [TAG_PAR_RESFOLDER,'language','english'] TAG_PAR_STRINGSXML_FILE = 'strings.xml' ### Dropbox API ... TAG_PAR_DROPBOX_LF = 'synclock' TAG_PAR_DROPBOX_LCODE = 'XX000000' TAG_PAR_DROPBOX_SYNC_FILE = 'vdbsync' TAG_PAR_DROPBOX_SYNC_T_FILE = 'vdbsync.tmp' TAG_PAR_DROPBOX_LI_FILE = 'libimg' TAG_PAR_DROPBOX_LI_T_FILE = 'libimg.tmp' TAG_PAR_DROPBOX_LI_S_FILE = 'libimg.sync' TAG_PAR_DROPBOX_CORR_FILE = 'corruption' TAG_PAR_DROPBOX_UID_FILE = 'uid' TAG_PAR_DROPBOX_SYNC_T_DIR = 'SYNC_TMP' TAG_PAR_DROPBOX_PATH = [TAG_PAR_RESFOLDER,'lib','dropbox'] TAG_PAR_DBXACCESSTOKEN_FILE = 'dropbox_access_token' TAG_PAR_DROPBOX_LISEPREC = '\n' TAG_PAR_DROPBOX_LISEPTM = '<**DBXTM**>' TAG_PAR_DROPBOX_MSGSEP = '#' TAG_PAR_DBXAPPKEY = 'cxa8c253kvoqbqd' TAG_PAR_DBXAPPSECRET = 'n7tx9emzji3aqnh' ### Addon work files ... TAG_PAR_TVSPACK_FILE = 'tvs.pack' TAG_PAR_TVSRAWFILE = 'tvs.eraw' TAG_PAR_STL_FILE = 'linktable' TAG_PAR_FSET_FILE = 'fset' TAG_PAR_PTYPETABLE_FILE = 'pttable' ### Addon work files (tmp) ... TAG_PAR_TVSUPD_FILE = 'tvsupd' TAG_PAR_TVSUPDNOW_FILE = 'updnow' #TAG_PAR_LOCKF = 'lock' TAG_PAR_STRARTF = 'lock_started' #TAG_PAR_STRARTAF = 'act' TAG_PAR_LAACTT = 'laactt' TAG_PAR_WS_FILE = 'watchsync' TAG_PAR_WS_TMP_FILE = 'watchsync.tmp' ### Video extensions ... TAG_PAR_VIDEOSEXT = ['.avi', '.mpeg', '.wmv', 'asf', '.flv', '.mkv', '.mka', '.mp4', '.m4a', '.aac', '.ogg', '.ogm', '.ram', '.rm', '.rv', '.ra', '.rmvb', '.3gp'] ### Backup files template ... TAG_PAR_SYSFLSTMPL = ['.strm', TAG_PAR_TVSPACK_FILE, TAG_PAR_TVSRAWFILE, TAG_PAR_STL_FILE, TAG_PAR_FSET_FILE, TAG_PAR_PTYPETABLE_FILE, TAG_PAR_TVSUPD_FILE, TAG_PAR_TVSUPDNOW_FILE, TAG_PAR_STRARTF, TAG_PAR_DROPBOX_SYNC_FILE, TAG_PAR_DBXACCESSTOKEN_FILE] TAG_PAR_DROPBOX_TMPL = ['.strm', TAG_PAR_TVSPACK_FILE, TAG_PAR_TVSRAWFILE, TAG_PAR_STL_FILE] ### Default tmpl ... TAG_PAR_TVSDEFSEASON = '01' TAG_PAR_SETDEF = 'Default' TAG_PAR_MNUCOLORFORMAT = '[COLOR %s]%s[/COLOR]' TAG_PAR_COLORTAG = '##COLOR##' TAG_PAR_ADDONLABEL_TMPL = '<string id="29999">%s</string>' TAG_PAR_ADDONLABEL_PATT = TAG_PAR_ADDONLABEL_TMPL % ('(.*)') TAG_PAR_ADDONLABEL = TAG_PAR_ADDONLABEL_TMPL % ('ADD to [COLOR %s]Lib[/COLOR]') TAG_PAR_LNPAGE = ' - (%s/%s)' TAG_PAR_LNSEP = ' > ' TAG_PAR_TTLQ = '%s ( %s ):' ### Zip ... TAG_PAR_ZIPCN = 'CN' TAG_PAR_ZIPST = 'atl.backup.' TAG_PAR_ZIPTMPL = TAG_PAR_ZIPST + '%s.%s.'+ TAG_PAR_ZIPCN + '.zip' ### XML TAG_PAR_XMLW_SELDLG = 'XDialogSelect.xml' TAG_PAR_XMLW_SELDLGSUB = 'XDialogSelectSub.xml' TAG_PAR_XMLW_OKDLG = 'XDialogOk.xml' TAG_PAR_XMLW_YESNODLG = 'XDialogYesNo.xml' TAG_PAR_XMLW_RESUMEDLG = 'XDialogResume.xml' TAG_PAR_XMLW_NOWPLAYDLG = 'XDialogNowPlay.xml' TAG_PAR_XMLW_DROPBOX = 'Dropbox.xml' ### Help ... TAG_PAG_HELPXML = 'DialogHelp.xml' TAG_PAR_HELPFILE = 'help.' TAG_PAR_HELPPATH = [TAG_PAR_RESFOLDER, 'help'] ### Time ... TAG_PAR_TIMENUMFORMAT = '{:0>2}' TAG_PAR_TIMESEP = ':' ### URL ... TAG_PAR_CALLURLTMPL = 'plugin://%s//?#strmtype=#%s&#strmfile=#%s&#strmurl=#' TAG_PAR_REPFN = '%s' TAG_PAR_ACTION = 'action=' TAG_PAR_IGNOREST = 'ignorestarted' ### tvs.pack separators ... TAG_PAR_TVSPACK_LSEP = '<**LSTSEP**>' TAG_PAR_TVSPACK_SSEP = '<**SRCSEP**>' TAG_PAR_TVSPACK_FSEP = '<**FRCSEP**>' TAG_PAR_TVSPACK_ESEP = '<**EPSSEP**>' TAG_PAR_TVSPACK_PSEP = '<**PRTSEP**>' TAG_PAR_TVSPACK_VERSEP = '<**VERSIONSEP**>' TAG_PAR_TVSPACK_VERSION = '10015' ### Containers starts with ... TAG_CON_STARTSW_EXT = 'plugin:' TAG_CON_STARTSW_VID = 'videodb:' TAG_CON_STARTSW_PVD = 'playlistvideo:' #### Const Tags ... ### Default ... DEFAULT = 10000 ### Types ... TAG_TYP_ALL = 10001 TAG_TYP_MOV = 10002 TAG_TYP_TVS = 10003 TAG_TYP_SRC = 10004 TAG_TYP_FOLDER = 10005 TAG_TYP_PREFILE = 10006 TAG_TYP_FILE = 10007 ### Containers ... TAG_CON_LOCAL = 10071 TAG_CON_EXT = 10072 TAG_CON_VID = 10073 TAG_CON_PVD = 10074 ### Condidions ... TAG_CND_FOUND = 10075 TAG_CND_NOTFOUND = 10076 TAG_CND_LISTEMPTY = 10077 TAG_CND_NEWSRC = 10078 TAG_CND_OLDSRC = 10079 TAG_CND_NOUPD = 10080 TAG_CND_NEWFRC = 10081 TAG_CND_OLDFRC = 10082 TAG_CND_UPDPRC = 10083 TAG_CND_NOUPDPRC = 10084 TAG_CND_NOGL = 10085 TAG_CND_NOACTION = 10086 TAG_CND_PLAY = 10087 TAG_CND_DBXNOAUTH = 10088 TAG_CND_NOTISMOV = 10089 TAG_CND_ISMOV = 10090 ### Free actions ... TAG_ACT_LPRESET = 10200 TAG_ACT_SHADOWUPD = 10201 TAG_ACT_DONOTHING = 10202 TAG_ACT_CHCOLOR = 10203 TAG_ACT_RENAMER = 10204 TAG_ACT_BACKUP = 10205 TAG_ACT_REMBACK = 10206 TAG_ACT_RESTBACK = 10207 TAG_ACT_RESETTBU = 10208 TAG_ACT_AUTOBACKUP = 10209 TAG_ACT_RESKIN = 10210 TAG_ACT_DBXCONNECT = 10211 TAG_ACT_DBXDISCONNECT = 10212 TAG_ACT_SYNC = 10213 TAG_ACT_WATCHSYNC = 10214 TAG_ACT_STOPSRV = 10215 TAG_ACT_STARTSRV = 10216 #### Strings Tags ... ### Language ... TAG_LNG_ID = 30000 ### Menue ... TAG_MNU_MOV = 30001 TAG_MNU_TVS = 30002 TAG_MNU_TVSU = 30003 TAG_MNU_OPEN = 30004 TAG_MNU_RESCAN = 30005 TAG_MNU_REMSRC = 30006 TAG_MNU_RESTORE = 30007 TAG_MNU_DELETE = 30008 TAG_MNU_VIDLIBU = 30009 TAG_MNU_CHKNEW = 30010 TAG_MNU_JOIN = 30011 TAG_MNU_TVSREN = 30012 TAG_MNU_SRCREN = 30013 TAG_MNU_UPDMAN = 30014 TAG_MNU_ADDEXIST = 30015 TAG_MNU_ADDNEW = 30016 TAG_MNU_SM = 30017 TAG_MNU_SHOWALL = 30018 TAG_MNU_SRCMAN = 30019 TAG_MNU_TVSMAN = 30020 TAG_MNU_QR = 30021 TAG_MNU_QL = 30022 TAG_MNU_NEW = 30023 TAG_MNU_ADDFOL = 30024 TAG_MNU_SRE = 30025 TAG_MNU_UPDFOL = 30026 TAG_MNU_VIDLIBCLN = 30027 TAG_MNU_SHDIR = 30028 TAG_MNU_REBSTL = 30029 TAG_MNU_DEFNMMOV = 30030 TAG_MNU_NEWNMMOV = 30031 TAG_MNU_ATVSNM = 30032 TAG_MNU_ATVSNUMT = 30033 TAG_MNU_ATVSNUM = 30034 TAG_MNU_DEFNM = 30035 TAG_MNU_SEQNUM = 30036 TAG_MNU_SEANUM = 30037 TAG_MNU_STARTADD = 30038 TAG_MNU_ATVS = 30039 TAG_MNU_ATVSSERT = 30040 TAG_MNU_SERDEF = 30041 TAG_MNU_SERTPL = 30042 TAG_MNU_SEASON = 30043 TAG_MNU_RFROM = 30044 TAG_MNU_SFRBEGIN = 30045 TAG_MNU_ADVADD = 30046 TAG_MNU_CHKNEWGL = 30047 TAG_MNU_RESTOREALL = 30048 TAG_MNU_SMM = 30049 TAG_MNU_RAWADD = 30050 TAG_MNU_BRWSREN = 30051 TAG_MNU_CONTUPD = 30052 TAG_MNU_RESCANALLS = 30053 TAG_MNU_RESCANFULL = 30054 TAG_MNU_YES = 30055 TAG_MNU_NO = 30056 TAG_MNU_CLOSEDLG = 30057 TAG_MNU_ADVLSORT = 30058 TAG_MNU_ADVLSORTDOWN = 30059 TAG_MNU_ADVLSORTUP = 30060 TAG_MNU_EPSLISTCORR = 30061 TAG_MNU_NUMBCORR = 30062 TAG_MNU_PBTYPES = 30063 TAG_MNU_DBSYNC = 30064 TAG_MNU_DELMOV = 30065 TAG_MNU_DELTVS = 30066 TAG_MNU_REMARKALL = 30067 TAG_MNU_TVSSTALN = 30068 TAG_MNU_FOLDMODE = 30069 ### Static mnu ... TAG_MNU_MORE = 30090 TAG_MNU_BACKMAIN = 30091 TAG_MNU_OK = 30092 TAG_MNU_HELP = 30096 TAG_MNU_SET = 30097 TAG_MNU_BACK = 30098 TAG_MNU_CANCEL = 30099 ### Confirms ... TAG_CFR_RESCAN = 30071 TAG_CFR_REMSRC = 30072 TAG_CFR_RESTORE = 30073 TAG_CFR_DELETE = 30074 TAG_CFR_TVSREN = 30075 TAG_CFR_JOIN = 30076 TAG_CFR_CLEANVL = 30077 TAG_CFR_DEFNM = 30078 TAG_CFR_RESTOREALL = 30079 TAG_CFR_RESCANALLS = 30080 TAG_CFR_RESCANFULL = 30081 TAG_CFR_RENAMER = 30082 TAG_CFR_UNLOCK = 30083 TAG_CFR_REMBACK = 30084 TAG_CFR_RESTBACK = 30085 TAG_CFR_EXCLPLUG = 30086 ### Dialogs messages ... TAG_DLG_OK = 30100 TAG_DLG_NX = 30101 TAG_DLG_PR = 30102 TAG_DLG_INNM = 30103 TAG_DLG_INSE = 30104 TAG_DLG_NUMSKIP = 30105 TAG_DLG_SUPPRES = 30106 TAG_DLG_PBT1 = 30107 TAG_DLG_PBT2 = 30108 TAG_DLG_PBTAD1 = 30109 TAG_DLG_PBTAD2 = 30110 TAG_DLG_PBTADTIMEO = 30111 TAG_DLG_PBTADTCLAS = 30112 TAG_DLG_PBTADTISP = 30113 TAG_DLG_PBTADTFOLD = 30114 TAG_DLG_PBTT1 = 30115 TAG_DLG_PBTT2 = 30116 TAG_DLG_PBTT3 = 30117 TAG_DLG_PBTT4 = 30118 TAG_DLG_PBTT5 = 30119 TAG_DLG_PBTALT = 30120 TAG_DLG_PBTREM = 30121 TAG_DLG_NPINFO = 30122 TAG_DLG_NPINFRAT = 30123 TAG_DLG_NPINFSRC = 30124 TAG_DLG_NPINFPBT = 30125 TAG_DLG_NPDIRL = 30126 TAG_DLG_PBTTRAN = 30127 TAG_DLG_PBTTRANI = 30128 TAG_DLG_DBXP1 = 30129 TAG_DLG_DBXP2 = 30130 TAG_DLG_DBXP3 = 30131 TAG_DLG_DBXP4 = 30132 TAG_DLG_DBXP5 = 30133 TAG_DLG_DBXPEC = 30134 TAG_DLG_DBXPRGSMSGS = 30135 TAG_DLG_CORR1 = 30136 TAG_DLG_CORR2 = 30137 TAG_DLG_CORR3 = 30138 TAG_DLG_CORR_FORCE = 30139 TAG_DLG_CORR_UNL = 30140 TAG_DLG_MOVIEDEL = 30141 TAG_DLG_TVSDEL = 30142 TAG_DLG_SCLNDB = 30143 TAG_DLG_SREMEF = 30144 TAG_DLG_LOCKSYQ = 30145 TAG_DLG_RENM = 30146 TAG_DLG_CURRTVS = 30147 TAG_DLG_EXCLADDON = 30148 ### Titles ... TAG_TTL_NM = 30150 TAG_TTL_ENTNAME = 30151 TAG_TTL_CHSNAME = 30152 TAG_TTL_ADDTVS = 30153 TAG_TTL_NEWEPS = 30154 TAG_TTL_EXITVS = 30155 TAG_TTL_CHKUPD = 30156 TAG_TTL_ADDMOV = 30157 TAG_TTL_ENTNAMEM = 30158 TAG_TTL_ADVADD = 30159 TAG_TTL_RESTOREALL = 30160 TAG_TTL_CHKUPDGL = 30161 TAG_TTL_POSHLP = 30162 TAG_TTL_CAST = 30163 TAG_TTL_BRWSREN = 30164 TAG_TTL_BRWSRENEP = 30165 TAG_TTL_COLORIZE = 30166 TAG_TTL_SEASON = 30167 TAG_TTL_BACKUP = 30168 TAG_TTL_RESTBACK = 30169 TAG_TTL_RESTLIB = 30170 TAG_TTL_RESTRL = 30171 TAG_TTL_RESTUL = 30172 TAG_TTL_RESTCHK = 30173 TAG_TTL_BCKNM = 30174 TAG_TTL_RESTAT = 30175 TAG_TTL_RESTATC = 30176 TAG_TTL_RESTRTMP = 30177 TAG_TTL_PACK = 30178 TAG_TTL_REMOLDBCK = 30179 TAG_TTL_CLRERRDT = 30180 TAG_TTL_CLRERRD = 30181 TAG_TTL_HELP = 30182 TAG_TTL_MAINMNU = 30183 TAG_TTL_RESKIN = 30184 TAG_TTL_RAWADDEPS = 30185 TAG_TTL_SYNCAUTO = 30186 TAG_TTL_SYNCUP = 30187 TAG_TTL_SYNCDOWN = 30188 TAG_TTL_SYNCUNLOCK = 30189 TAG_TTL_SYNCSENDCH = 30190 TAG_TTL_DBXTTL = 30191 TAG_TTL_DBXOK = 30192 TAG_TTL_DBXCANCEL = 30193 TAG_TTL_DBXCOPY = 30194 TAG_TTL_DBXKEYB = 30195 TAG_TTL_DBXPASTE = 30196 TAG_TTL_DBXOPEN = 30197 TAG_TTL_SVIDDB = 30198 TAG_TTL_SWS = 30199 TAG_TTL_LOCKSY = 30200 ### Set ... TAG_SET_RENAMER = 30436 ### Ok messages ... TAG_ERR_OK = 30301 TAG_ERR_OK_MOVADD = 30302 TAG_ERR_OK_TVSADD = 30303 TAG_ERR_OK_TVSUPD = 30304 TAG_ERR_OK_RESCAN = 30305 TAG_ERR_OK_RESTOR = 30306 TAG_ERR_OK_REMSRC = 30307 TAG_ERR_OK_DELETE = 30308 TAG_ERR_OK_CHKNEW = 30309 TAG_ERR_OK_TVSREN = 30310 TAG_ERR_OK_SRCREN = 30311 TAG_ERR_OK_JOIN = 30312 TAG_ERR_OK_ADDFOL = 30313 TAG_ERR_OK_UPDFOL = 30314 TAG_ERR_OK_SETUPD = 30315 TAG_ERR_OK_VIDLIBU = 30316 TAG_ERR_OK_REBSTL = 30317 TAG_ERR_OK_RESTOREALL = 30318 TAG_ERR_OK_BRWSREN = 30319 TAG_ERR_OK_NEWFRC = 30320 TAG_ERR_OK_RESCANALLS = 30321 TAG_ERR_OK_RESCANFULL = 30322 TAG_ERR_OK_RENAMER = 30323 TAG_ERR_OK_BACKUP = 30324 TAG_ERR_OK_REMBACK = 30325 TAG_ERR_OK_RESTBACK = 30326 TAG_ERR_OK_NOBACK = 30327 TAG_ERR_OK_DBXSMAC = 30328 TAG_ERR_OK_DBXSMDL = 30329 TAG_ERR_OK_DBXSMUP = 30330 TAG_ERR_OK_DBXWSMAC = 30331 TAG_ERR_OK_DBXWSMDL = 30332 TAG_ERR_OK_DBXWSMUP = 30333 TAG_ERR_OK_SYNCUNLOCK = 30334 TAG_ERR_OK_MTVSDEL = 30335 TAG_ERR_OK_SYNCLOCK = 30336 TAG_ERR_OK_EPSREM = 30337 TAG_ERR_OK_EXCLUPLUG = 30338 ### Errors ... TAG_ERR_NOTFILE = 30201 TAG_ERR_INCINPUT = 30202 TAG_ERR_LISTEMPTY = 30203 TAG_ERR_ABORT = 30204 TAG_ERR_NOTOJOIN = 30205 TAG_ERR_DEDLINK = 30206 TAG_ERR_NONAME = 30207 TAG_ERR_NONAME2 = 30208 TAG_ERR_DEFEPS = 30209 TAG_ERR_BROKENLINK = 30210 TAG_ERR_BROKENLINK2 = 30211 TAG_ERR_LIB = 30212 TAG_ERR_LIBACT = 30213 TAG_ERR_LOCK = 30214 TAG_ERR_OL = 30215 TAG_ERR_BADZIP = 30216 TAG_ERR_NOBCKPATH = 30217 TAG_ERR_NOBCKPATHM = 30218 TAG_ERR_INCPBTYPE = 30219 TAG_ERR_NODBXCONNECT = 30220 TAG_ERR_DBXISLOCK = 30221 TAG_ERR_DBXRAISE = 30222 ### Other ... TAG_SET_RUN = 30479 TAG_SET_STOP = 30480
gpl-3.0
-7,829,866,278,117,454,000
31.262295
261
0.560658
false
2.624875
false
false
false
yingcuhk/LeetCode
Algorithms/#303 Range Sum Query - Immutable/PythonCode.py
1
1082
""" Given an integer array nums, find the sum of the elements between indices i and j (i ¡Ü j), inclusive. Example: Given nums = [-2, 0, 3, -5, 2, -1] sumRange(0, 2) -> 1 sumRange(2, 5) -> -1 sumRange(0, 5) -> -3 Note: You may assume that the array does not change. There are many calls to sumRange function. """ class NumArray(object): def __init__(self, nums): """ initialize your data structure here. :type nums: List[int] """ #self.nums = nums L = len(nums) CumSum = [0 for i in xrange(L+1)] for i in range(1,L+1): CumSum[i] = CumSum[i-1]+nums[i-1] #print CumSum self.CumSum = CumSum def sumRange(self, i, j): """ sum of elements nums[i..j], inclusive. :type i: int :type j: int :rtype: int """ return self.CumSum[j+1] - self.CumSum[i] # Your NumArray object will be instantiated and called as such: # numArray = NumArray(nums) # numArray.sumRange(0, 1) # numArray.sumRange(1, 2)
mit
6,948,932,555,707,017,000
22.042553
102
0.550832
false
3.145349
false
false
false
vascotenner/holoviews
holoviews/plotting/mpl/annotation.py
1
3913
import matplotlib from matplotlib import patches as patches from ...core.util import match_spec from ...core.options import abbreviated_exception from .element import ElementPlot class AnnotationPlot(ElementPlot): """ AnnotationPlot handles the display of all annotation elements. """ def __init__(self, annotation, **params): self._annotation = annotation super(AnnotationPlot, self).__init__(annotation, **params) self.handles['annotations'] = [] def initialize_plot(self, ranges=None): annotation = self.hmap.last key = self.keys[-1] ranges = self.compute_ranges(self.hmap, key, ranges) ranges = match_spec(annotation, ranges) axis = self.handles['axis'] opts = self.style[self.cyclic_index] with abbreviated_exception(): handles = self.draw_annotation(axis, annotation.data, opts) self.handles['annotations'] = handles return self._finalize_axis(key, ranges=ranges) def update_handles(self, key, axis, annotation, ranges, style): # Clear all existing annotations for element in self.handles['annotations']: element.remove() with abbreviated_exception(): self.handles['annotations'] = self.draw_annotation(axis, annotation.data, style) class VLinePlot(AnnotationPlot): "Draw a vertical line on the axis" style_opts = ['alpha', 'color', 'linewidth', 'linestyle', 'visible'] def draw_annotation(self, axis, position, opts): return [axis.axvline(position, **opts)] class HLinePlot(AnnotationPlot): "Draw a horizontal line on the axis" style_opts = ['alpha', 'color', 'linewidth', 'linestyle', 'visible'] def draw_annotation(self, axis, position, opts): "Draw a horizontal line on the axis" return [axis.axhline(position, **opts)] class TextPlot(AnnotationPlot): "Draw the Text annotation object" style_opts = ['alpha', 'color', 'family', 'weight', 'rotation', 'fontsize', 'visible'] def draw_annotation(self, axis, data, opts): (x,y, text, fontsize, horizontalalignment, verticalalignment, rotation) = data opts['fontsize'] = fontsize return [axis.text(x,y, text, horizontalalignment = horizontalalignment, verticalalignment = verticalalignment, rotation=rotation, **opts)] class ArrowPlot(AnnotationPlot): "Draw an arrow using the information supplied to the Arrow annotation" _arrow_style_opts = ['alpha', 'color', 'lw', 'linewidth', 'visible'] _text_style_opts = TextPlot.style_opts style_opts = sorted(set(_arrow_style_opts + _text_style_opts)) def draw_annotation(self, axis, data, opts): direction, text, xy, points, arrowstyle = data arrowprops = dict({'arrowstyle':arrowstyle}, **{k: opts[k] for k in self._arrow_style_opts if k in opts}) textopts = {k: opts[k] for k in self._text_style_opts if k in opts} if direction in ['v', '^']: xytext = (0, points if direction=='v' else -points) elif direction in ['>', '<']: xytext = (points if direction=='<' else -points, 0) return [axis.annotate(text, xy=xy, textcoords='offset points', xytext=xytext, ha="center", va="center", arrowprops=arrowprops, **textopts)] class SplinePlot(AnnotationPlot): "Draw the supplied Spline annotation (see Spline docstring)" style_opts = ['alpha', 'edgecolor', 'linewidth', 'linestyle', 'visible'] def draw_annotation(self, axis, data, opts): verts, codes = data patch = patches.PathPatch(matplotlib.path.Path(verts, codes), facecolor='none', **opts) axis.add_patch(patch) return [patch]
bsd-3-clause
-3,434,174,948,459,445,000
34.899083
92
0.620751
false
4.131996
false
false
false
Dacelonid/gerrymander
gerrymander/reports.py
1
49794
# # Copyright (C) 2014 Red Hat, Inc # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import collections import prettytable import logging import time import re import json import sys import xml.dom.minidom from gerrymander.operations import OperationQuery from gerrymander.model import ModelApproval from gerrymander.format import format_date from gerrymander.format import format_delta from gerrymander.format import format_title from gerrymander.format import format_color LOG = logging.getLogger(__name__) class ReportOutputColumn(object): ALIGN_LEFT = "l" ALIGN_RIGHT = "r" ALIGN_CENTER = "c" def __init__(self, key, label, mapfunc, sortfunc=None, format=None, truncate=0, align=ALIGN_LEFT, visible=True): self.key = key self.label = label self.mapfunc = mapfunc self.sortfunc = sortfunc self.format = format self.truncate = truncate self.align = align self.visible = visible def get_value(self, report, row): val = self.mapfunc(report, self.key, row) if self.format is not None: val = self.format % val elif val is None: val = "" if type(val) != str: val = val.encode('utf-8') if self.truncate and len(val) > self.truncate: val = val[0:self.truncate] + "..." return val def get_sort_value(self, report, row): if self.sortfunc: return self.sortfunc(report, self.key, row) else: return self.mapfunc(report, self.key, row) class ReportOutput(object): DISPLAY_MODE_TEXT = "text" DISPLAY_MODE_CSV = "csv" DISPLAY_MODE_XML = "xml" DISPLAY_MODE_JSON = "json" def __init__(self, usecolor=False): super(ReportOutput, self).__init__() self.usecolor = usecolor def display(self, mode, stream=sys.stdout): if mode == ReportOutput.DISPLAY_MODE_TEXT: stream.write(self.to_text()) elif mode == ReportOutput.DISPLAY_MODE_CSV: stream.write(self.to_csv()) elif mode == ReportOutput.DISPLAY_MODE_XML: impl = xml.dom.minidom.getDOMImplementation() doc = impl.createDocument(None, "report", None) self.to_xml(doc, doc.documentElement) stream.write(doc.toprettyxml()) elif mode == ReportOutput.DISPLAY_MODE_JSON: doc = [] self.to_json(doc) stream.write(json.dumps(doc, indent=2) + "\n") else: raise Exception("Unknown display mode '%s'" % mode) def to_text(self): raise NotImplementedError("Subclass should implement the 'to_text' method") def to_csv(self): raise NotImplementedError("Subclass should implement the 'to_csv' method") def to_xml(self, root): raise NotImplementedError("Subclass should implement the 'to_xml' method") def to_json(self, root): raise NotImplementedError("Subclass should implement the 'to_json' method") class ReportOutputCompound(ReportOutput): def __init__(self): self.report = [] def add_report(self, report): self.report.append(report) def to_text(self): blocks = [] for report in self.report: blocks.append(report.to_text()) return "\n".join(blocks) def to_json(self, root): for report in self.report: report.to_json(root) def to_xml(self, doc, root): for report in self.report: report.to_xml(doc, root) class ReportOutputList(ReportOutput): def __init__(self, columns, title=None, usecolor=False): super(ReportOutputList, self).__init__(usecolor) self.columns = columns self.row = {} self.title = title def set_row(self, row): self.row = row def to_xml(self, doc, root): lst = doc.createElement("list") root.appendChild(lst) if self.title is not None: title = doc.createElement("title") title.appendChild(doc.createTextNode(self.title)) lst.appendChild(title) headers = doc.createElement("headers") content = doc.createElement("content") lst.appendChild(headers) lst.appendChild(content) for col in self.columns: if col.visible: xmlcol = doc.createElement(col.key) xmlcol.appendChild(doc.createTextNode(col.label)) headers.appendChild(xmlcol) for col in self.columns: if col.visible: xmlfield = doc.createElement(col.key) xmlfield.appendChild(doc.createTextNode(col.get_value(self, self.row))) content.appendChild(xmlfield) def to_json(self, root): headers = {} for col in self.columns: if col.visible: headers[col.key] = col.label content = {} for col in self.columns: if col.visible: content[col.key] = col.get_value(self, self.row) node = { "list": { "headers": headers, "content": content } } if self.title is not None: node["list"]["title"] = self.title root.append(node) def to_text(self): labels = [] width = 1 for col in self.columns: if col.visible: if len(col.label) > width: width = len(col.label) labels.append(col.label) fmt = " %" + str(width) + "s: %s" lines = [] for col in self.columns: if col.visible: line = fmt % (col.label, col.get_value(self, self.row)) lines.append(line) prolog = "" if self.title is not None: prolog = format_title(self.title) + "\n" return prolog + "\n".join(lines) + "\n" class ReportOutputTable(ReportOutput): def __init__(self, columns, sortcol, reverse, limit, title=None, usecolor=False): super(ReportOutputTable, self).__init__(usecolor) self.columns = list(columns) self.rows = [] self.sortcol = sortcol self.reverse = reverse self.limit = limit self.title = title def add_column(self, col): self.columns.append(col) def add_row(self, row): self.rows.append(row) def sort_rows(self): sortcol = None for col in self.columns: if col.key == self.sortcol: sortcol = col if sortcol is not None: self.rows.sort(key = lambda item: sortcol.get_sort_value(self, item), reverse=self.reverse) def to_xml(self, doc, root): self.sort_rows() table = doc.createElement("table") root.appendChild(table) if self.title is not None: title = doc.createElement("title") title.appendChild(doc.createTextNode(self.title)) table.appendChild(title) headers = doc.createElement("headers") content = doc.createElement("content") table.appendChild(headers) table.appendChild(content) for col in self.columns: if col.visible: xmlcol = doc.createElement(col.key) xmlcol.appendChild(doc.createTextNode(col.label)) headers.appendChild(xmlcol) rows = self.rows if self.limit is not None: rows = rows[0:self.limit] for row in rows: xmlrow = doc.createElement("row") for col in self.columns: if col.visible: xmlfield = doc.createElement(col.key) xmlfield.appendChild(doc.createTextNode(col.get_value(self, row))) xmlrow.appendChild(xmlfield) content.appendChild(xmlrow) return doc def to_json(self, root): self.sort_rows() headers = {} for col in self.columns: if col.visible: headers[col.key] = col.label content = [] rows = self.rows if self.limit is not None: rows = rows[0:self.limit] for row in rows: data = {} for col in self.columns: if col.visible: data[col.key] = col.get_value(self, row) content.append(data) node = { "table": { "headers": headers, "content": content } } if self.title is not None: node["table"]["title"] = self.title root.append(node) def to_text(self): self.sort_rows() labels = [] for col in self.columns: if col.visible: labels.append(col.label) table = prettytable.PrettyTable(labels) for col in self.columns: table.align[col.label] = col.align table.padding_width = 1 rows = self.rows if self.limit is not None: rows = rows[0:self.limit] for row in rows: data = [] for col in self.columns: if col.visible: data.append(col.get_value(self, row)) table.add_row(data) prolog = "" if self.title is not None: prolog = format_title(self.title) + "\n" return prolog + str(table) + "\n" def to_csv(self): self.sort_rows() labels = [] for col in self.columns: if col.visible: labels.append(col.label) lines = [] if self.title is not None: lines.append(self.title) lines.append(",".join(labels)) rows = self.rows if self.limit is not None: rows = rows[0:self.limit] for row in rows: data = [] for col in self.columns: if col.visible: data.append(col.get_value(self, row)) lines.append(",".join(data)) return "\n".join(lines) class Report(object): def __init__(self, client): self.client = client def generate(self): raise NotImplementedError("Subclass must override generate method") def display(self, mode): output = self.generate() output.display(mode) class ReportTable(Report): def __init__(self, client, columns, sort=None, reverse=False): super(ReportTable, self).__init__(client) self.columns = columns self.limit = None self.set_sort_column(sort, reverse) def get_columns(self): return self.columns def get_column(self, key): for col in self.columns: if col.key == key: return col return None def has_column(self, key): col = self.get_column(key) if col is None: return False return True def set_sort_column(self, key, reverse=False): got = False for col in self.columns: if col.key == key: got = True if not got: raise Exception("Unknown sort column %s" % key) self.sort = key self.reverse = reverse def set_data_limit(self, limit): self.limit = limit def new_table(self, title=None): return ReportOutputTable(self.columns, self.sort, self.reverse, self.limit, title, self.usecolor) class ReportPatchReviewStats(ReportTable): def user_mapfunc(rep, col, row): return row[0] def team_mapfunc(rep, col, row): return row[2] def review_mapfunc(rep, col, row): return row[1]['total'] def ratio_mapfunc(rep, col, row): plus = float(row[1]['votes']['flag-p2'] + row[1]['votes']['flag-p1']) minus = float(row[1]['votes']['flag-m2'] + row[1]['votes']['flag-m1']) ratio = (plus / (plus + minus)) * 100 return ratio def vote_mapfunc(rep, col, row): return row[1]['votes'][col] COLUMNS = [ ReportOutputColumn("user", "User", user_mapfunc, align=ReportOutputColumn.ALIGN_LEFT), ReportOutputColumn("team", "Team", team_mapfunc, align=ReportOutputColumn.ALIGN_LEFT), ReportOutputColumn("reviews", "Reviews", review_mapfunc, align=ReportOutputColumn.ALIGN_RIGHT), ReportOutputColumn("flag-m2", "-2", vote_mapfunc, align=ReportOutputColumn.ALIGN_RIGHT), ReportOutputColumn("flag-m1", "-1", vote_mapfunc, align=ReportOutputColumn.ALIGN_RIGHT), ReportOutputColumn("flag-p1", "+1", vote_mapfunc, align=ReportOutputColumn.ALIGN_RIGHT), ReportOutputColumn("flag-p2", "+2", vote_mapfunc, align=ReportOutputColumn.ALIGN_RIGHT), ReportOutputColumn("ratio", "+/-", ratio_mapfunc, format="%0.0lf%%", align=ReportOutputColumn.ALIGN_RIGHT), ] def __init__(self, client, projects, maxagedays=30, teams={}, usecolor=False): super(ReportPatchReviewStats, self).__init__(client, ReportPatchReviewStats.COLUMNS, sort="reviews", reverse=True) self.projects = projects self.teams = teams self.maxagedays = maxagedays self.usecolor = usecolor def generate(self): # We could query all projects at once, but if we do them # individually it means we get better hit rate against the # cache if the report is re-run for many different project # combinations reviews = [] cutoff = time.time() - (self.maxagedays * 24 * 60 * 60) for project in self.projects: query = OperationQuery(self.client, { "project": [project], }, patches=OperationQuery.PATCHES_ALL, approvals=True) def querycb(change): for patch in change.patches: for approval in patch.approvals: if approval.is_newer_than(cutoff): reviews.append(approval) query.run(querycb) reviewers = {} for review in reviews: if review.action != ModelApproval.ACTION_REVIEWED or review.user is None: continue reviewer = review.user.username if reviewer is None: reviewer = review.user.name if reviewer is None: continue if reviewer.lower() in ["jenkins", "smokestack"]: continue reviewers.setdefault(reviewer, { 'votes': {'flag-m2': 0, 'flag-m1': 0, 'flag-p1': 0, 'flag-p2': 0}, 'total': 0, }) reviewers[reviewer]['total'] = reviewers[reviewer]['total'] + 1 votes = { "-2" : "flag-m2", "-1" : "flag-m1", "1" : "flag-p1", "2" : "flag-p2" } cur = reviewers[reviewer]['votes'][votes[str(review.value)]] reviewers[reviewer]['votes'][votes[str(review.value)]] = cur + 1 compound = ReportOutputCompound() table = self.new_table("Review statistics") compound.add_report(table) for user, votes in reviewers.items(): userteam = "" for team in self.teams.keys(): if user in self.teams[team]: userteam = team table.add_row([user, votes, userteam]) summary = ReportOutputList([ ReportOutputColumn("nreviews", "Total reviews", format="%d", mapfunc=lambda rep, col, row: row[0]), ReportOutputColumn("nreviewers", "Total rviewers", format="%d", mapfunc=lambda rep, col, row: row[1]) ], title="Review summary") summary.set_row([len(reviews), len(reviewers.keys())]) compound.add_report(summary) return compound class ReportPatchReviewRate(ReportTable): def user_mapfunc(rep, col, row): return row[0] def team_mapfunc(rep, col, row): return row[1] def week_mapfunc(rep, col, row): if col not in row[2]: return 0.0 return (row[2][col] / 7.0) def total_mapfunc(rep, col, row): if col not in row[2]: return 0.0 return (row[2][col] / (52.0 * 7.0)) COLUMNS = [ ReportOutputColumn("user", "User", user_mapfunc, align=ReportOutputColumn.ALIGN_LEFT), ReportOutputColumn("team", "Team", team_mapfunc, align=ReportOutputColumn.ALIGN_LEFT), ReportOutputColumn("total", "Total", total_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"), ReportOutputColumn("week1", "1 week", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"), ReportOutputColumn("week2", "2 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"), ReportOutputColumn("week3", "3 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"), ReportOutputColumn("week4", "4 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"), ReportOutputColumn("week5", "5 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"), ReportOutputColumn("week6", "6 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"), ReportOutputColumn("week7", "7 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"), ReportOutputColumn("week8", "8 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"), ReportOutputColumn("week9", "9 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"), ReportOutputColumn("week10", "10 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"), ReportOutputColumn("week11", "11 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"), ReportOutputColumn("week12", "12 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"), ReportOutputColumn("week13", "13 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"), ReportOutputColumn("week14", "14 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"), ReportOutputColumn("week15", "15 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"), ReportOutputColumn("week16", "16 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"), ReportOutputColumn("week17", "17 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"), ReportOutputColumn("week18", "18 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"), ReportOutputColumn("week19", "19 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"), ReportOutputColumn("week20", "20 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"), ReportOutputColumn("week21", "21 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"), ReportOutputColumn("week22", "22 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"), ReportOutputColumn("week23", "23 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"), ReportOutputColumn("week24", "24 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"), ReportOutputColumn("week25", "25 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"), ReportOutputColumn("week26", "26 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"), ReportOutputColumn("week27", "27 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"), ReportOutputColumn("week28", "28 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"), ReportOutputColumn("week29", "29 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"), ReportOutputColumn("week30", "30 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"), ReportOutputColumn("week31", "31 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"), ReportOutputColumn("week32", "32 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"), ReportOutputColumn("week33", "33 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"), ReportOutputColumn("week34", "34 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"), ReportOutputColumn("week35", "35 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"), ReportOutputColumn("week36", "36 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"), ReportOutputColumn("week37", "37 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"), ReportOutputColumn("week38", "38 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"), ReportOutputColumn("week39", "39 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"), ReportOutputColumn("week40", "40 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"), ReportOutputColumn("week41", "41 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"), ReportOutputColumn("week42", "42 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"), ReportOutputColumn("week43", "43 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"), ReportOutputColumn("week44", "44 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"), ReportOutputColumn("week45", "45 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"), ReportOutputColumn("week46", "46 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"), ReportOutputColumn("week47", "47 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"), ReportOutputColumn("week48", "48 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"), ReportOutputColumn("week49", "49 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"), ReportOutputColumn("week50", "50 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"), ReportOutputColumn("week51", "51 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"), ReportOutputColumn("week52", "52 weeks", week_mapfunc, align=ReportOutputColumn.ALIGN_LEFT, format="%0.2f"), ] def __init__(self, client, projects, teams={}, usecolor=False): super(ReportPatchReviewRate, self).__init__(client, ReportPatchReviewRate.COLUMNS, sort="total", reverse=True) self.projects = projects self.teams = teams self.usecolor = usecolor def generate(self): # We could query all projects at once, but if we do them # individually it means we get better hit rate against the # cache if the report is re-run for many different project # combinations reviewers = {} now = time.time() for project in self.projects: query = OperationQuery(self.client, { "project": [project], }, patches=OperationQuery.PATCHES_ALL, approvals=True) def querycb(change): for patch in change.patches: for approval in patch.approvals: if approval.action == ModelApproval.ACTION_VERIFIED: continue user = approval.user if user is None or user.username is None: continue username = user.username if username not in reviewers: reviewers[username] = { "total": 0} agesecs = approval.get_age(now) ageweeks = int(agesecs / (60 * 60 * 24 * 7)) + 1 key = "week%d" % ageweeks if key not in reviewers[username]: reviewers[username][key] = 0 reviewers[username][key] = reviewers[username][key] + 1 if ageweeks <= 52: reviewers[username]["total"] = reviewers[username]["total"] + 1 query.run(querycb) table = self.new_table("Daily review rates per week") for reviewer in reviewers.keys(): userteam = "" for team in self.teams.keys(): if reviewer in self.teams[team]: userteam = team table.add_row([reviewer, userteam, reviewers[reviewer]]) return table class ReportBaseChange(ReportTable): @staticmethod def get_approval_votes(patch): # Yes, the numbers are slightly odd order # A +2 or -2 more important than any -1 or +1 # so we prefer them as the summary value levels = ["-2", "2", "-1", "1"] votes = { "c": { "total": collections.defaultdict(int), "list": [], "summary": "", "details": "", }, "v": { "total": collections.defaultdict(int), "list": [], "summary": "", "details": "", }, "w": { "total": collections.defaultdict(int), "list": [], "summary": "", "details": "", }, } for approval in patch.approvals: got_type = approval.action[0:1].lower() if got_type not in votes: continue vote = str(approval.value) votes[got_type]["total"][vote] = votes[got_type]["total"][vote] + 1 votes[got_type]["list"].append(vote) for key in votes.keys(): votes[key]["details"] = ",".join(votes[key]["list"]) vals = [] for level in levels: if level in votes[key]["total"]: votes[key]["summary"] = level break return votes def approvals_mapfunc(rep, col, row): patch = row.get_current_patch() if patch is None: LOG.error("No patch") return "" votes = ReportBaseChange.get_approval_votes(patch) keys = list(votes.keys()) keys.sort(reverse=True) data = " ".join(map(lambda val: "%s=%s" % (val, votes[val]["details"]), keys)) if rep.usecolor: if votes["w"]["total"]["1"] > 0: # Stuff pending merge return format_color(data, fg="blue", styles=["bold"]) elif votes["w"]["total"]["-1"] > 0: # Work-in-progress return format_color(data, fg="magenta", styles=[]) elif votes["c"]["total"]["-2"] > 0: # Hard-nack from core return format_color(data, fg="red", styles=["bold"]) elif votes["c"]["total"]["-1"] > 0 or votes["v"]["total"]["-1"] > 0: # Nack from any or bots return format_color(data, fg="red", styles=[]) elif votes["c"]["total"]["2"] > 0: # Approval from core return format_color(data, fg="green", styles=["bold"]) elif votes["c"]["total"]["1"] > 0: # Approval from any return format_color(data, fg="green", styles=[]) else: return data else: return data def votes_mapfunc(rep, col, row): patch = row.get_current_patch() if patch is None: LOG.error("No patch") return "" if col == "tests": coltype = "v" elif col == "reviews": coltype = "c" else: coltype = "w" votes = ReportBaseChange.get_approval_votes(patch) data = "%2s" % votes[coltype]["summary"] if rep.usecolor: if votes[coltype]["total"]["-2"] > 0: # Hard-nack from core return format_color(data, fg="red", styles=["bold"]) elif votes[coltype]["total"]["2"] > 0: # Approval from core return format_color(data, fg="green", styles=["bold"]) elif votes[coltype]["total"]["-1"] > 0: # Soft-nack from any return format_color(data, fg="red", styles=[]) elif votes[coltype]["total"]["1"] > 0: # Approval from any return format_color(data, fg="green", styles=[]) else: return data else: return data def user_mapfunc(rep, col, row): if not row.owner or not row.owner.username: return "<unknown>" return row.owner.username def date_mapfunc(rep, col, row): if col == "lastUpdated": return format_date(row.lastUpdated) else: return format_date(row.createdOn) def date_sortfunc(rep, col, row): if col == "lastUpdated": return row.lastUpdated else: return row.createdOn COLUMNS = [ ReportOutputColumn("status", "Status", lambda rep, col, row: row.status), ReportOutputColumn("topic", "Topic", lambda rep, col, row: row.topic, visible=False), ReportOutputColumn("url", "URL", lambda rep, col, row: row.url), ReportOutputColumn("owner", "Owner", user_mapfunc), ReportOutputColumn("project", "Project", lambda rep, col, row: row.project, visible=False), ReportOutputColumn("branch", "Branch", lambda rep, col, row: row.branch, visible=False), ReportOutputColumn("subject", "Subject", lambda rep, col, row: row.subject, truncate=30), ReportOutputColumn("createdOn", "Created", date_mapfunc, date_sortfunc), ReportOutputColumn("lastUpdated", "Updated", date_mapfunc, date_sortfunc), ReportOutputColumn("approvals", "Approvals", approvals_mapfunc, visible=False), ReportOutputColumn("tests", "Tests", votes_mapfunc), ReportOutputColumn("reviews", "Reviews", votes_mapfunc), ReportOutputColumn("workflow", "Workflow", votes_mapfunc), ] def __init__(self, client, usecolor=False): super(ReportBaseChange, self).__init__(client, ReportBaseChange.COLUMNS, sort="createdOn", reverse=False) self.usecolor = usecolor class ReportChanges(ReportBaseChange): def __init__(self, client, projects=[], owners=[], status=[], messages=[], branches=[], topics=[], reviewers=[], approvals=[], files=[], rawquery=None, usecolor=False): super(ReportChanges, self).__init__(client, usecolor) self.projects = projects self.owners = owners self.status = status self.messages = messages self.branches = branches self.topics = topics self.reviewers = reviewers self.approvals = approvals self.files = files self.rawquery = rawquery def generate(self): needFiles = False if len(self.files) > 0: needFiles = True query = OperationQuery(self.client, { "project": self.projects, "owner": self.owners, "message": self.messages, "branch": self.branches, "topic": self.topics, "status": self.status, "reviewer": self.reviewers, }, rawquery=self.rawquery, patches=OperationQuery.PATCHES_CURRENT, approvals=True, files=needFiles) def match_files(change): if len(self.files) == 0: return True for filere in self.files: for file in change.get_current_patch().files: if re.search(filere, file.path): return True return False table = self.new_table("Changes") def querycb(change): if match_files(change): table.add_row(change) query.run(querycb) return table class ReportToDoList(ReportBaseChange): def __init__(self, client, projects=[], branches=[], files=[], topics=[], reviewers=[], usecolor=False): super(ReportToDoList, self).__init__(client, usecolor) self.projects = projects self.branches = branches self.reviewers = reviewers self.files = files self.topics = topics def filter(self, change): return True def generate(self): needFiles = False if len(self.files) > 0: needFiles = True query = OperationQuery(self.client, { "project": self.projects, "status": [ OperationQuery.STATUS_OPEN ], "branch": self.branches, "topic": self.topics, "reviewer": self.reviewers, }, patches=OperationQuery.PATCHES_ALL, approvals=True, files=needFiles) def match_files(change): if len(self.files) == 0: return True for filere in self.files: for patch in change.patches: for file in patch.files: if re.search(filere, file.path): return True return False table = self.new_table("Changes To Do List") def querycb(change): if self.filter(change) and match_files(change): table.add_row(change) query.run(querycb) return table class ReportToDoListMine(ReportToDoList): def __init__(self, client, username, projects=[], branches=[], files=[], topics=[], usecolor=False): ''' Report to provide a list of changes 'username' has reviewed an older version of the patch, and needs to provide feedback on latest version ''' super(ReportToDoListMine, self).__init__(client, projects, reviewers=[ username ], branches=branches, files=files, topics=topics, usecolor=usecolor) self.username = username def filter(self, change): if (not change.has_current_reviewers([self.username]) and not change.has_owner([self.username])): return True return False class ReportToDoListOthers(ReportToDoList): def __init__(self, client, username, bots=[], projects=[], branches=[], files=[], topics=[], usecolor=False): ''' Report to provide a list of changes where 'username' has never reviewed, but at least one other non-bot user has provided review ''' super(ReportToDoListOthers, self).__init__(client, projects, reviewers=[ "!", username ], branches=branches, files=files, topics=topics, usecolor=usecolor) self.bots = bots def filter(self, change): # allchanges contains changes where 'username' has # not reviewed any version of the patch. We want to # filter out changes which only have bots, or have # no reviewers at all. if change.has_any_other_reviewers(self.bots): return True return False class ReportToDoListAnyones(ReportToDoList): def __init__(self, client, username, bots=[], projects=[], branches=[], files=[], topics=[], usecolor=False): ''' Report to provide a list of changes where at least one other non-bot user has provided review ''' super(ReportToDoListAnyones, self).__init__(client, projects, branches=branches, files=files, topics=topics, usecolor=usecolor) self.bots = bots self.username = username def filter(self, change): if change.has_current_reviewers([self.username]): return False if change.has_any_other_reviewers(self.bots): return True return False class ReportToDoListNoones(ReportToDoList): def __init__(self, client, bots=[], projects=[], branches=[], files=[], topics=[], usecolor=False): ''' Report to provide a list of changes that no one has ever reviewed ''' super(ReportToDoListNoones, self).__init__(client, projects, branches=branches, files=files, topics=topics, usecolor=usecolor) self.bots = bots def filter(self, change): if not change.has_any_other_reviewers(self.bots): return True return False class ReportToDoListApprovable(ReportToDoList): def __init__(self, client, username, strict, projects=[], branches=[], files=[], topics=[], usecolor=False): ''' Report to provide a list of changes that no one has ever reviewed ''' super(ReportToDoListApprovable, self).__init__(client, projects, branches=branches, files=files, topics=topics, usecolor=usecolor) self.username = username self.strict = strict def filter(self, change): if (change.has_current_approval(ModelApproval.ACTION_REVIEWED, 2) and not change.has_owner([self.username]) and not change.has_current_approval(ModelApproval.ACTION_WORKFLOW, -1) and not change.has_current_approval(ModelApproval.ACTION_WORKFLOW, 1) and not change.has_current_approval(ModelApproval.ACTION_REVIEWED, -2) and not change.has_current_reviewers([self.username])): if (self.strict and change.has_current_approval(ModelApproval.ACTION_REVIEWED, -1)): return False return True return False class ReportToDoListExpirable(ReportToDoList): def __init__(self, client, age=28, projects=[], branches=[], files=[], topics=[], usecolor=False): ''' Report to provide a list of changes that are stale and can potentially be expired ''' super(ReportToDoListExpirable, self).__init__(client, projects, branches=branches, files=files, topics=topics, usecolor=usecolor) self.age = age def filter(self, change): if change.get_current_reviewer_nack_age() > (self.age * 24 * 60 * 60): return True return False class ReportOpenReviewStats(ReportBaseChange): def __init__(self, client, projects, branch="master", topic="", days=7, usecolor=False): super(ReportOpenReviewStats, self).__init__(client, usecolor) self.projects = projects self.branch = branch self.topic = topic self.days = days @staticmethod def average_age(changes, ages): if len(changes) == 0: return 0 total = 0 for change in changes: total += ages[change] return format_delta(total / len(changes)) @staticmethod def median_age(changes, ages): if len(changes) == 0: return 0 total = 0 wantages = [] for change in changes: wantages.append(ages[change]) wantages.sort() return format_delta(wantages[int(len(wantages)/2)]) @staticmethod def older_than(changes, ages, cutoffdays): cutoff = cutoffdays * 24 * 60 * 60 older = 0 for change in changes: if ages[change] > cutoff: older = older + 1 return older @staticmethod def get_longest_changes(ids, changes, ages, count): want = [] for id in sorted(ids, key=lambda x: ages[x]): want.append(changes[id]) return want def generate(self): # We could query all projects at once, but if we do them # individually it means we get better hit rate against the # cache if the report is re-run for many different project # combinations agecurrent = {} agefirst = {} agenonnacked = {} wait_reviewer = [] wait_submitter = [] changes = {} for project in self.projects: query = OperationQuery(self.client, { "project": [project], "status": [OperationQuery.STATUS_OPEN], "branch": [self.branch], "topic": [self.topic], }, patches=OperationQuery.PATCHES_ALL, approvals=True) def querycb(change): if change.status != "NEW": return now = time.time() current = change.get_current_patch() first = change.get_first_patch() nonnacked = change.get_reviewer_not_nacked_patch() changes[change.id] = change if current.is_nacked(): wait_submitter.append(change.id) else: wait_reviewer.append(change.id) agecurrent[change.id] = current.get_age(now) agefirst[change.id] = first.get_age(now) if nonnacked: agenonnacked[change.id] = nonnacked.get_age(now) else: agenonnacked[change.id] = 0 query.run(querycb) compound = ReportOutputCompound() summary = ReportOutputList([ ReportOutputColumn("nreviews", "Total open reviews", format="%d", mapfunc=lambda rep, col, row: row[0] + row [1]), ReportOutputColumn("waitsubmitter", "Waiting on submitter", format="%d", mapfunc=lambda rep, col, row: row[0]), ReportOutputColumn("waitreviewer", "Waiting on reviewer", format="%d", mapfunc=lambda rep, col, row: row[1]), ], title="Review summary") summary.set_row([len(wait_submitter), len(wait_reviewer)]) compound.add_report(summary) lastrev = ReportOutputList([ ReportOutputColumn("average", "Average wait time", mapfunc=lambda rep, col, row: row[0]), ReportOutputColumn("median", "Median wait time", mapfunc=lambda rep, col, row: row[1]), ReportOutputColumn("stale", "Older than %d days" % self.days, format="%d", mapfunc=lambda rep, col, row: row[2]), ], title="Summary since current revision") lastrev.set_row([self.average_age(wait_reviewer, agecurrent), self.median_age(wait_reviewer, agecurrent), self.older_than(wait_reviewer, agecurrent, self.days)]) compound.add_report(lastrev) firstrev = ReportOutputList([ ReportOutputColumn("average", "Average wait time", mapfunc=lambda rep, col, row: row[0]), ReportOutputColumn("median", "Median wait time", mapfunc=lambda rep, col, row: row[1]), ], title="Summary since first revision") firstrev.set_row([self.average_age(wait_reviewer, agefirst), self.median_age(wait_reviewer, agefirst)]) compound.add_report(firstrev) nonnackedrev = ReportOutputList([ ReportOutputColumn("average", "Average wait time", mapfunc=lambda rep, col, row: row[0]), ReportOutputColumn("median", "Median wait time", mapfunc=lambda rep, col, row: row[1]), ], title="Summary since last revision without -1/-2 from reviewer") nonnackedrev.set_row([self.average_age(wait_reviewer, agenonnacked), self.median_age(wait_reviewer, agenonnacked)]) compound.add_report(nonnackedrev) def waitlastmap(rep, col, row): return format_delta(row.get_current_age()) def waitlastsort(rep, col, row): return row.get_current_age() waitlastrev = self.new_table("Longest waiting since current revision") waitlastrev.add_column(ReportOutputColumn("age", "Age", sortfunc=waitlastsort, mapfunc=waitlastmap)) waitlastrev.sortcol = "age" waitlastrev.reverse = True for change in self.get_longest_changes(wait_reviewer, changes, agecurrent, 5): waitlastrev.add_row(change) compound.add_report(waitlastrev) def waitfirstmap(rep, col, row): return format_delta(row.get_first_age()) def waitfirstsort(rep, col, row): return row.get_first_age() waitfirstrev = self.new_table("Longest waiting since first revision") waitfirstrev.add_column(ReportOutputColumn("age", "Age", sortfunc=waitfirstsort, mapfunc=waitfirstmap)) waitfirstrev.sortcol = "age" waitfirstrev.reverse = True for change in self.get_longest_changes(wait_reviewer, changes, agefirst, 5): waitfirstrev.add_row(change) compound.add_report(waitfirstrev) def waitnonnackedmap(rep, col, row): return format_delta(row.get_reviewer_not_nacked_age()) def waitnonnackedsort(rep, col, row): return row.get_reviewer_not_nacked_age() waitnonnackedrev = self.new_table("Longest waiting since last revision without -1/-2 from reviewer") waitnonnackedrev.add_column(ReportOutputColumn("age", "Age", sortfunc=waitnonnackedsort, mapfunc=waitnonnackedmap)) waitnonnackedrev.sortcol = "age" waitnonnackedrev.reverse = True for change in self.get_longest_changes(wait_reviewer, changes, agenonnacked, 5): waitnonnackedrev.add_row(change) compound.add_report(waitnonnackedrev) return compound
apache-2.0
1,719,606,987,100,816,600
37.750195
116
0.540989
false
4.298886
false
false
false
ebrensi/registry-frontend
ff.py
1
1240
#! usr/bin/env python # This script is for testing without having to host the flask app. import folium import pandas as pd import os from sqlalchemy import create_engine import geojson DATABASE_URL = os.environ["DATABASE_URL"] STATES_GEOJSON_PATH = "static/us-states.json" engine = create_engine(DATABASE_URL) with engine.connect() as db: query = "Select state, count(*) From registry Group By state;" df = pd.read_sql_query(query, db) with open(STATES_GEOJSON_PATH, "r") as file: gj = geojson.load(file) # Folium choropleth requires a one-to-one correspondence between GeoJSON # features (state definitions) and shade values, so we will make a new # GeoJSON object that is a FeatureCollection of only the states that we # have data for. relevant_features = [feature for feature in gj["features"] if ("id" in feature) and (feature["id"] in df["state"].values)] gj_relevant = geojson.FeatureCollection(relevant_features) geo_str = geojson.dumps(gj_relevant) base_map = folium.Map([43, -100], zoom_start=5) base_map.choropleth( geo_str=geo_str, data=df, columns=['state', 'count'], key_on='feature.id', fill_color='PuBuGn', ) base_map.save("map.html")
mit
-3,929,384,207,766,329,000
25.956522
72
0.691935
false
3.246073
false
false
false
cattleio/stampede
docs/do-demo/deploy.py
1
6809
#!/usr/bin/env python import cattle import sys ZK_NODES = 3 REDIS_NODES = 3 API_SERVER_NODES = 3 PROCESS_SERVER_NODES = 3 AGENT_SERVER_NODES = 3 MYSQL_COMPUTE = 1 # Set if you want to override the cattle.jar in the Docker image with a custom one URL = '' TAG = 'latest' client = cattle.from_env() def wait(c): return client.wait_success(c, timeout=120) deleted = [] for c in client.list_container(removed_null=True): if c.name != 'Agent': client.delete(c) print 'Deleting', c.name deleted.append(c) print 'Waiting for deleting' for c in deleted: wait(c) print 'Done' def set_link(instance, name, target): instance = wait(instance) for link in instance.instanceLinks(): if link.linkName == name: print 'Linking {} to {}'.format(instance.name, target.name) wait(client.update(link, targetInstanceId=target.id)) def deploy_zk(): # Deploying ZK is complicated.... # Create dummy ZK to link against, then we will create the circle # We want it to be stopped so that ZooKeeper doesn't actually connect print 'Creating Dummy ZK node' zk_dummy = wait(client.create_container(imageUuid='docker:ibuildthecloud/zookeeper', name='zk_dummy')) zk_dummy = wait(zk_dummy.stop()) zks = [] for i in range(1, ZK_NODES + 1): links = {} for j in range(1, ZK_NODES + 1): if j != i: links['zk{}'.format(j)] = zk_dummy.id zk = client.create_container(imageUuid='docker:ibuildthecloud/zookeeper', name='zk{}'.format(i), environment={ 'ID': i }, instanceTriggeredStop='restart', instanceLinks=links) print 'Created', zk.name zks.append(wait(zk)) for zk_target in zks: for zk in zks: set_link(zk, zk_target.name, zk_target) client.delete(zk_dummy) return zks def deploy_redis(): print 'Create Redis' redises = [] for i in range(1, REDIS_NODES + 1): redis = client.create_container(imageUuid='docker:ibuildthecloud/redis', instanceTriggeredStop='restart', name='redis{}'.format(i)) print 'Created', redis.name redises.append(redis) return redises def haproxy(targets, name, listen_port): links = {} for i, c in enumerate(targets): links['TARGET{}'.format(i)] = wait(c).id return client.create_container(imageUuid='docker:ibuildthecloud/haproxy', instanceLinks=links, instanceTriggeredStop='restart', name=name, ports=['{}:80'.format(listen_port)]) zookeepers = deploy_zk() redises = deploy_redis() mysql = client.create_container(imageUuid='docker:ibuildthecloud/mysql', compute=MYSQL_COMPUTE, instanceTriggeredStop='restart', ports=['9082:80'], name='mysql') print 'Created', mysql.name graphite = client.create_container(imageUuid='docker:ibuildthecloud/graphite', instanceTriggeredStop='restart', ports=['9083:80'], name='graphite') print 'Created', graphite.name es = client.create_container(imageUuid='docker:ibuildthecloud/logstash', instanceTriggeredStop='restart', ports=['9200:9200'], name='logstash/elasticache') print 'Created', es.name kibana = client.create_container(imageUuid='docker:ibuildthecloud/kibana', name='Kibana', instanceTriggeredStop='restart', ports=['9081:80'], environment={ 'ES_PORT_9200_TCP_ADDR': wait(es).hosts()[0].ipAddresses()[0].address, 'ES_PORT_9200_TCP_PORT': '9200' }) print 'Created', kibana.name print 'Create Cattle' links = { 'gelf': wait(es).id, 'graphite': wait(graphite).id } instances = [] instances.extend(zookeepers) instances.extend(redises) instances.append(mysql) for c in instances: links[c.name] = wait(c).id api_servers = [] agent_servers = [] for i in range(1, API_SERVER_NODES + 1): c = client.create_container(imageUuid='docker:cattle/api-server:{}'.format(TAG), name='API Server {}'.format(i), environment={ 'URL': URL, 'CATTLE_CATTLE_SERVER_ID': 'apiserver{}'.format(i) }, instanceTriggeredStop='restart', instanceLinks=links) print 'Created', c.name api_servers.append(c) for i in range(1, PROCESS_SERVER_NODES + 1): c = client.create_container(imageUuid='docker:cattle/process-server:{}'.format(TAG), name='Process Server {}'.format(i), environment={ 'URL': URL, 'CATTLE_JAVA_OPTS': '-Xmx1024m', 'CATTLE_CATTLE_SERVER_ID': 'processserver{}'.format(i) }, instanceTriggeredStop='restart', instanceLinks=links) print 'Created', c.name for i in range(1, AGENT_SERVER_NODES + 1): c = client.create_container(imageUuid='docker:cattle/agent-server:{}'.format(TAG), name='Agent Server {}'.format(i), environment={ 'URL': URL, 'CATTLE_JAVA_OPTS': '-Xmx1024m', 'CATTLE_CATTLE_SERVER_ID': 'agentserver{}'.format(i) }, instanceTriggeredStop='restart', instanceLinks=links) print 'Created', c.name agent_servers.append(c) h1 = haproxy(api_servers, 'Api Servers Load Balancer', 8080) print 'Created', h1.name h2 = haproxy(agent_servers, 'Agent Servers Load Balancer', 8081) print 'Created', h2.name wait(h1) wait(h2)
apache-2.0
-6,914,724,294,481,272,000
33.21608
107
0.500661
false
4.13921
false
false
false
Aydarkhan/cca
automata.py
1
5250
"""Copyright 2010 Aydarkhanov Ruslan, Kurochkin Ilya, Rusinov Ivan This file is part of CCA. CCA is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 2 of the License, or (at your option) any later version. CCA is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with CCA. If not, see http://www.gnu.org/licenses/. """ from state import * class Automata(object): def __init__(self, width=150, height=70, states=None): self.width = width self.height = height if states == None: self.states = [State("Dead", '-', "white", '0', [5]), State("Alive", '+', "black", '1', [0, 1, 4, 5, 6, 7, 8])] else: self.states = states self.symbols = {} self.st_sym = {} for num, st in enumerate(self.states): self.symbols[st.symbol] = num self.st_sym[st.symbol] = st self.field = [] for row in range(height): self.field.append([]) for col in range(width): self.field[row].append(self.states[0].symbol) def next_step(self): changed = [] for row in range(1, self.height - 1): for col in range(1, self.width - 1): symbol = self.field[row][col] num = 0 for vert in range(row - 1, row + 2): for horiz in range(col - 1, col + 2): if self.field[vert][horiz] == symbol: num += 1 if self.st_sym[symbol].next_state(num - 1): changed.append((row, col)) for row in range(1, self.height - 1): symbol1 = self.field[row][0] symbol2 = self.field[row][self.width - 1] num1 = 0 num2 = 0 for vert in range(row - 1, row + 2): for horiz in [0, 1, self.width - 1]: if self.field[vert][horiz] == symbol1: num1 += 1 for horiz in [self.width - 2, self.width - 1, 0]: if self.field[vert][horiz] == symbol2: num2 += 1 if self.st_sym[symbol1].next_state(num1 - 1): changed.append((row, 0)) if self.st_sym[symbol2].next_state(num2 - 1): changed.append((row, self.width - 1)) for col in range(1, self.width - 1): symbol1 = self.field[0][col] symbol2 = self.field[self.height - 1][col] num1 = 0 num2 = 0 for horiz in range(col - 1, col + 2): for vert in [0, 1, self.height - 1]: if self.field[vert][horiz] == symbol1: num1 += 1 for vert in [self.height - 2, self.height - 1, 0]: if self.field[vert][horiz] == symbol2: num2 += 1 if self.st_sym[symbol1].next_state(num1 - 1): changed.append((0, col)) if self.st_sym[symbol2].next_state(num2 - 1): changed.append((self.height - 1, col)) for row, col in [(0, 0), (self.height - 1, self.width - 1), (0, self.width - 1), (self.height - 1, 0)]: symbol = self.field[row][col] num = 0 for vert_long in range(row + self.height - 1, row + self.height + 2): for horiz_long in range(col + self.width - 1, col + self.width + 2): vert = vert_long % self.height horiz = horiz_long % self.width if self.field[vert][horiz] == symbol: num += 1 if self.st_sym[symbol].next_state(num - 1): changed.append((row, col)) for row, col in changed: index = (self.symbols[self.field[row][col]] + 1) % len(self.states) self.field[row][col] = self.states[index].symbol return changed def change_size(self, value, side): "0-up, 1-right, 2-down, 3-left" new_field = [] if side == 0: self.height += value for row in range(value): new_field.append([]) for col in range(self.width): new_field[row].append(self.states[0].symbol) init = value if value < 0: init = 0 for row in range(init, self.height): new_field.append([]) for col in range(self.width): new_field[row].append(self.field[row - value][col]) if side == 2: self.height += value term = value if value < 0: term = 0 for row in range(self.height - term): new_field.append([]) for col in range(self.width): new_field[row].append(self.field[row][col]) for row in range(self.height - term, self.height): new_field.append([]) for col in range(self.width): new_field[row].append(self.states[0].symbol) if side == 1: self.width += value term = value if value < 0: term = 0 for row in range(self.height): new_field.append([]) for col in range(self.width - term): new_field[row].append(self.field[row][col]) for row in range(self.height): for col in range(self.width - term, self.width): new_field[row].append(self.states[0].symbol) if side == 3: self.width += value for row in range(self.height): new_field.append([]) for col in range(value): new_field[row].append(self.states[0].symbol) init = value if value < 0: init = 0 for row in range(self.height): for col in range(init, self.width): new_field[row].append(self.field[row][col - value]) self.field = new_field
gpl-2.0
5,469,230,736,711,367,000
30.25
68
0.60781
false
2.924791
false
false
false
nonamenix/yandex-vesna-generator
yandex_vesna_generator/vesna.py
1
2537
# -*- coding: utf-8 -*- from lxml import etree from slugify import slugify class Entry(object): def __init__(self, title="", paragraphs=[], themes=[], **kwargs): self.title = title self.paragraphs = paragraphs self.themes = themes self.header_wrapper = kwargs.get("header_wrapper", "h2") self.paragraph_wrapper = kwargs.get("paragraph_wrapper", "p") self.slug = slugify(title, to_lower=True) self.description = self.paragraphs[0][0:kwargs.get("description_length", 220)] def render_html(self): html = self.header html += self.body return html @property def header(self): return "<%(wrapper)s>%(title)s</%(wrapper)s> \n" % { 'title': self.title, 'wrapper': self.header_wrapper } @property def body(self): return "".join(["<%(wrapper)s>%(text)s</$(wrapper)s> \n" % { "text": p, "wrapper": self.paragraph_wrapper } for p in self.paragraphs]) def __repr__(self): return '<Entry theme="%s" id="%s">' % (", ".join(self.themes), hex(id(self))) def __getitem__(self, field): return self.__dict__[field] class VesnaGenerator(object): """ Class for generate crazy text on your site """ # Themes AVAILABLE_THEMES = [ 'astronomy', 'geology', 'gyroscope', 'literature', 'marketing', 'mathematics', 'music', 'polit', 'agrobiologia', 'law', 'psychology', 'geography', 'physics', 'philosophy', 'chemistry'] def __init__(self, themes=[], entry_options={}): self.themes = [theme for theme in themes if theme in self.AVAILABLE_THEMES] or self.AVAILABLE_THEMES self.entry_options = entry_options # Generate yandex vesna url self.base_url = "http://referats.yandex.ru/referats/" self.url = self.base_url + "?t=" + "+".join(self.themes) self.entries = [] def generate_entry(self): self.parser = etree.HTMLParser(recover=True) self.doc = etree.parse(self.url, self.parser) title = self.doc.xpath('/html/body/div[2]/div[1]/div[1]/div/div[2]/div[1]/strong')[0].text title = title.encode('utf-8').replace('Тема: «', '').replace('»', '').decode('utf-8') paragraps = self.doc.xpath('/html/body/div[2]/div[1]/div[1]/div/div[2]/div[1]/p') return Entry( title=title, paragraphs=[p.text for p in paragraps], themes=self.themes, **self.entry_options )
apache-2.0
-2,670,042,156,606,405,000
33.216216
108
0.575267
false
3.476648
false
false
false
razorpay/razorpay-python
tests/test_client_utility.py
1
1858
import responses from .helpers import mock_file, ClientTestCase from razorpay.errors import SignatureVerificationError class TestClientValidator(ClientTestCase): def setUp(self): super(TestClientValidator, self).setUp() @responses.activate def test_verify_payment_signature(self): sig = 'b2335e3b0801106b84a7faff035df56ecffde06918c9ddd1f0fafbb37a51cc89' parameters = {} parameters['razorpay_order_id'] = 'fake_order_id' parameters['razorpay_payment_id'] = 'fake_payment_id' parameters['razorpay_signature'] = sig self.assertEqual( self.client.utility.verify_payment_signature(parameters), True) @responses.activate def test_verify_payment_signature_with_exception(self): parameters = {} parameters['razorpay_order_id'] = 'fake_order_id' parameters['razorpay_payment_id'] = 'fake_payment_id' parameters['razorpay_signature'] = 'test_signature' self.assertRaises( SignatureVerificationError, self.client.utility.verify_payment_signature, parameters) @responses.activate def test_verify_webhook_signature(self): secret = self.client.auth[1] sig = 'd60e67fd884556c045e9be7dad57903e33efc7172c17c6e3ef77db42d2b366e9' body = mock_file('fake_payment_authorized_webhook') self.assertEqual( self.client.utility.verify_webhook_signature(body, sig, secret), True) @responses.activate def test_verify_webhook_signature_with_exception(self): secret = self.client.auth[1] sig = 'test_signature' body = '' self.assertRaises( SignatureVerificationError, self.client.utility.verify_webhook_signature, body, sig, secret)
mit
4,018,015,657,456,469,500
31.596491
80
0.653929
false
3.799591
true
false
false
nosuchtim/VizBench
src/PyLoopyCam/testit.py
1
5268
""" Copyright (c) 2015, Tim Thompson All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: - Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. - Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. - Neither the name of Tim Thompson, nosuch.com, nor the names of any contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """ import sys import time import traceback import thread import threading import copy import asyncore import asynchat import socket import sys import re import xml.dom.minidom as xmldom import string import pygame.pypm import os.path import os, pygame import pickle import random from os.path import isdir, isfile, isabs, abspath from urllib import quote, unquote from threading import * from ctypes import * from time import sleep from Queue import Queue, Empty from xml.sax import saxutils from xml.dom import Node from traceback import format_exc from dircache import listdir from pygame.locals import * from thread import * from ffff import * global debug debug = False global debugosc debugosc = False global debugosc2 debugosc2 = False class NthEventServer(Thread): """ Provides an event stream that can serve multiple listeners track of what fingers are currently down, smoothing drag motion, etc. """ oneServer = None def __init__(self): Thread.__init__(self) self.setDaemon(True) NthEventServer.oneServer = self print "NthEventServer.oneServer = ", NthEventServer.oneServer self.dispenser = PushedEventDispenser() self.throttle = 0.005 self.throttle = 0.0 self.inputs = {} self.outputs = {} self.cv = threading.Condition() self.events = {} self.firstevent = 0 self.nextevent = 0 self.osc_recipients = {"music":[], "graphic":[]} self.start() self.too_old_seconds = 30.0 self.event_inputs = {} self.forward_inputs = {} self.forward_finger = None self.tm0 = time.time() self.osc_count = 0 def send_osc(self, o, apptype): (msg_addr, msg_data) = o if msg_addr == "": print "No msg_addr value in send_osc?" return now = time.time() self.osc_count += 1 if now - self.tm0 > 1.0: print "OSC Per second = ", self.osc_count self.osc_count = 0 self.tm0 = now msg_addr = str(msg_addr) b = createBinaryMsg(msg_addr, msg_data) # print "createBinary msg_addr=",msg_addr," msg_data=",msg_data print("SHOULD BE sending %s OSC=%s" % (apptype, o.__str__())) # r.osc_socket.sendto(b, (r.osc_addr, r.osc_port)) def main(): debug = True httpaddr = "127.0.0.1" httpport = 7777 rootdir = None print "SYS.ARGV len=", len(sys.argv) argn = len(sys.argv) if len(sys.argv) == 1: print "NO arguments..." else: argn = 1 if sys.argv[argn] == "-d": debug = True print "Debug is True" argn += 1 else: debug = False argn += 1 for i in range(argn, len (sys.argv)): a = sys.argv[i] print("a = ", a) if a.startswith("rootdir:"): rootdir = abspath(a[8:]) elif a.startswith("httpaddr:"): httpaddr = a[9:] elif a.startswith("httpport:"): httpport = int(a[9:]) try: import os position = (-800, 0) position = (600, 360) os.environ['SDL_VIDEO_WINDOW_POS'] = str(position[0]) + "," + str(position[1]) pygame.init() width = 250 height = 500 flags = pygame.SRCALPHA # from panel import NthControlPanel # ui = NthControlPanel(width, height, flags) # time.sleep(1.0) # pygame.event.set_grab(True) try: ffff = Ffff("localhost",80) except: print "EXCEPT caught in creating Ffff! Exception=", format_exc() plugin = ffff.get_ffgl("Twisted") param = plugin.get_param("Twirl") # ffff.set_all_params(plugin,1.0) for nm in plugin.param: p = plugin.param[nm] val = random.random() % 1.0 ffff.change_plugin_param_val(plugin,p,val) except KeyboardInterrupt: print("KeyboardInterrupt received...\n"); # server.shutdown_quick() except: s = format_exc() if not re.search(".*shutdown_quick.*", s): print("Exception while running myserver?\n"); print(s) # server.shutdown_quick() if __name__ == '__main__': main() # import cProfile # cProfile.run('main()')
mit
7,051,323,283,669,740,000
24.205742
80
0.705201
false
3.26192
false
false
false
juntalis/aio-pika
docs/source/rabbitmq-tutorial/examples/3-publish-subscribe/receive_logs.py
1
1064
import asyncio from aio_pika import connect, IncomingMessage, ExchangeType loop = asyncio.get_event_loop() def on_message(message: IncomingMessage): with message.process(): print("[x] %r" % message.body) async def main(): # Perform connection connection = await connect("amqp://guest:guest@localhost/", loop=loop) # Creating a channel channel = await connection.channel() await channel.set_qos(prefetch_count=1) logs_exchange = await channel.declare_exchange( 'logs', ExchangeType.FANOUT ) # Declaring queue queue = await channel.declare_queue(exclusive=True) # Binding the queue to the exchange await queue.bind(logs_exchange) # Start listening the queue with name 'task_queue' queue.consume(on_message) if __name__ == "__main__": loop = asyncio.get_event_loop() loop.create_task(main()) # we enter a never-ending loop that waits for data and runs callbacks whenever necessary. print(' [*] Waiting for logs. To exit press CTRL+C') loop.run_forever()
apache-2.0
1,176,001,167,043,564,000
24.95122
93
0.675752
false
3.8
false
false
false
griddynamics/bunch
lettuce_bunch/dependencies.py
1
2875
# -*- coding: utf-8 -*- # <Bunch - BDD test tool for Lettuce scenarios> # Copyright (c) 2012 Grid Dynamics Consulting Services, Inc, All Rights Reserved # http://www.griddynamics.com # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE # FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. from exceptions import CyclicDependencySpecification from topsort import topsort_levels,CycleError from itertools import chain, tee, izip, product def pairwise(iterable): a, b = tee(iterable) next(b) return izip(a, b) def dependency_lists_to_pairs(dependency_lists): return chain(*(pairwise(dep_list) for dep_list in dependency_lists)) def dependency_groups_to_pairs(groups): return chain(*(product(a,b) for a,b in pairwise(groups))) def split_solitaries(deps): solitaries = [] linked = [] for dep in deps: if len(dep) == 1 and len(dep[0]) > 0: solitaries.append(dep[0]) else: linked.append(dep) return solitaries, linked def filter_empties(deps): return filter(None, deps) def combine_fixture_deps(deps): solitaries, linked = split_solitaries(filter_empties(deps)) try: result = [sorted(group) for group in topsort_levels(chain(*map(dependency_groups_to_pairs, linked)))] for solitary in solitaries: if solitary not in result: result.append(solitary) except CycleError as cycle_details: raise CyclicDependencySpecification(cycle_details) return result
gpl-3.0
8,455,682,489,359,907,000
39.666667
109
0.712348
false
3.869448
false
false
false
tonyshardlow/reg_sde
run_pf.py
1
1560
from __future__ import (absolute_import, division, print_function, unicode_literals) exec(open("ground.py").read()) # mine import hamiltonian import diffeo import sde from utility import * # # # all data defined in utility (exp2,...) # def run(dict): import os.path if 'fname' in dict: filename=dict['fname'] else: print("No filename given") exit(1) print("filename: ",filename+dict['ext']) # G=hamiltonian.GaussGreen(dict['ell'],0) no_steps=dict['no_steps'] # SDE = sde.SDE(G) SDE.set_no_steps(no_steps) SDE.set_landmarks(dict['landmarks_n']) SDE.set_lam_beta(dict['lam'],dict['beta'],True) # plot a push-forward sample (with current shape) plot_setup() plt.axis('equal') plt.axis('off') Q0=dict['landmarks'][0,:,:] D=SDE.sample_push_forward(Q0) D.plot_qpath_01(0) D.plot_warped_grid(10) plt.savefig(filename+dict['ext']+'.pdf',bbox_inches='tight') print("...finished.") # #################################################################### if __name__ == "__main__": # do this plt.ion() noise_var=0.2 dict=exp1(noise_var) #dict=exp2(noise_var) #dict=exp4(noise_var) # dict=exp4(noise_var) dict['lam']=0.5 scale=1.0e1;betas=np.array([1., 2., 4.0, 8.])*scale exts=['a_pf', 'b_pf', 'c_pf', 'd_pf'] for i in range(4): print("=======") dict['beta']=betas[i] dict['ext']=exts[i] run(dict)
mit
5,528,959,925,701,617,000
25.857143
68
0.523077
false
3.035019
false
false
false
xbed/Mixly_Arduino
mixly_arduino/mpBuild/ESP32_MixGo/lib/mixgo.py
1
5214
from machine import Pin from machine import PWM from machine import ADC from machine import DAC from machine import I2C from machine import Timer from machine import RTC from machine import TouchPad import time from neopixel import NeoPixel def get_brightness(pin = 39): return ADCSensor(pin).read() def get_soundlevel(pin = 35): return ADCSensor(pin).read() # Button class Button: def __init__(self, pin): from machine import Pin self.pin = Pin(pin, Pin.IN) def get_presses(self, delay = 1): last_time, last_state, presses = time.time(), 0, 0 while time.time() < last_time + delay: time.sleep_ms(50) if last_state == 0 and self.pin.value() == 1: last_state = 1 if last_state == 1 and self.pin.value() == 0: last_state, presses = 0, presses + 1 return presses def is_pressed(self, flag = 0): return self.pin.value() == flag def was_pressed(self, flag = 0): last_state = self.pin.value() if flag: if not last_state: return False else: while self.pin.value(): time.sleep_ms(10) return True else: if last_state: return False else: while not self.pin.value(): time.sleep_ms(10) return True def irq(self, handler, trigger): self.pin.irq(handler = handler, trigger = trigger) # Pin class MyPin(Pin): def write_digital(self,val): self.init(Pin.OUT) self.value(val) def read_digital(self): self.init(Pin.IN) return self.value() def write_analog(self,val): id = int(str(self)[4:-1]) #unsafe! self = PWM(Pin(id),duty=val) def dac_write(self,val): id = int(str(self)[4:-1]) #unsafe! self = DAC(Pin(id)).write(val) def read_analog(self): id = int(str(self)[4:-1]) #unsafe! self = ADC(Pin(id)) return self.read() def set_frequency(self,val): id = int(str(self)[4:-1]) self = PWM(Pin(id),freq=val) def is_touched(self): id = int(str(self)[4:-1]) #unsafe! if id in (0,2,4,12,13,14,15,27,32,33): # print(TouchPad(Pin(id)).read()) return (TouchPad(Pin(id)).read() - 150 < 0) else: self.init(Pin.IN) return self.value() == 1 class Infrared(MyPin): def near(self): id = int(str(self)[4:-1]) #unsafe! pin15=Pin(15,Pin.OUT) pin15.value(1) adc=ADC(Pin(id)) adc.atten(ADC.ATTN_11DB) approximate =adc.read() pin15.value(0) return approximate # Servo class Servo: def __init__(self,pin): self.pin=pin def write_angle(self,angle): id = int(str(self.pin)[4:-1]) PWM(Pin(id),freq=50,duty=int(40 + 75 * angle / 180)) # Sonar class Sonar: def __init__(self, trig, echo): self.trig=Pin(trig, Pin.OUT) self.echo=Pin(echo, Pin.IN) def checkdist(self): self.trig.value(0) self.echo.value(0) self.trig.value(1) time.sleep_us(10) self.trig.value(0) while(self.echo.value()==0): pass t1 = time.ticks_us() while(self.echo.value()==1): pass t2 = time.ticks_us() return round(time.ticks_diff(t2, t1) / 10000 * 340 / 2, 2) class led: def __init__(self, pin, flag=1): self.val = flag self.pin = pin self.flag = flag def setbrightness(self,val): self.val = val if self.flag: PWM(Pin(self.pin)).duty(self.val) else: PWM(Pin(self.pin)).duty(1023 - self.val) def setonoff(self,val): if(val == -1): Pin(self.pin,Pin.OUT).value(1 - Pin(self.pin).value()) elif(val == 1): Pin(self.pin,Pin.OUT).value(self.flag) elif(val == 0): Pin(self.pin,Pin.OUT).value(1 - self.flag) def getonoff(self): if self.flag: return Pin(self.pin).value() else: return 1 - Pin(self.pin).value() class ADCSensor: def __init__(self,pin): self.adc=ADC(Pin(pin)) self.adc.atten(ADC.ATTN_11DB) def read(self): return self.adc.read() class RGB: def __init__(self, pin, num): self = NeoPixel(Pin(pin), num) def write(self,n,r,g,b): self[n] = (r, g, b) self.write() i2c = I2C(scl = Pin(22), sda = Pin(21), freq = 100000) buf = bytearray(1) rtc = RTC() tim = Timer(-1) try: i2c.readfrom_mem_into(0x68, 0X75, buf) except: pass else: if buf[0] == 0x71: from mpu9250 import * mpu = MPU9250(i2c) compass = Compass(mpu) button_a = Button(17) button_b = Button(16) led1 = led(pin = 0, flag = 0) led2 = led(pin = 5, flag = 0) infrared_left = Infrared(34) infrared_right = Infrared(36) touch1 = MyPin(32) touch2 = MyPin(33) touch3 = MyPin(25) touch4 = MyPin(26) rgb = NeoPixel(Pin(2), 2)
apache-2.0
-1,362,624,201,661,192,700
25.472081
69
0.528002
false
3.150453
false
false
false
tea321000/django-project
musicsite/music/migrations/0002_auto_20170305_2121.py
1
1364
# -*- coding: utf-8 -*- # Generated by Django 1.10 on 2017-03-05 13:21 from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('music', '0001_initial'), ] operations = [ migrations.AddField( model_name='music', name='singer', field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, related_name='Musician_singer', to='music.Musician'), ), migrations.AlterField( model_name='musician', name='birthday', field=models.DateTimeField(verbose_name='\u51fa\u751f\u65e5\u671f'), ), migrations.AlterField( model_name='musician', name='name', field=models.CharField(max_length=40, verbose_name='\u539f\u540d'), ), migrations.AlterField( model_name='musician', name='sex', field=models.CharField(choices=[('M', '\u7537'), ('F', '\u5973')], max_length=1, verbose_name='\u6027\u522b'), ), migrations.AlterField( model_name='musician', name='stagename', field=models.CharField(blank=True, max_length=40, null=True, verbose_name='\u827a\u540d'), ), ]
mit
-5,270,324,763,746,811,000
32.268293
145
0.579179
false
3.706522
false
false
false
mRokita/DPLib
dplib/server.py
1
47676
# DPLib - Asynchronous bot framework for Digital Paint: Paintball 2 servers # Copyright (C) 2017 Michał Rokita # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import re import select from collections import OrderedDict from enum import Enum from subprocess import Popen import asyncio import os from socket import socket, AF_INET, SOCK_DGRAM from time import time from dplib.parse import render_text, decode_ingame_text class ServerEvent(Enum): TIMEOUT = 0 CHAT = 1 ELIM = 2 RESPAWN = 3 MAPCHANGE = 4 DATE = 5 NAMECHANGE = 6 ENTRANCE = 7 FLAG_CAPTURED = 8 ELIM_TEAMS_FLAG = 9 ROUND_STARTED = 10 TEAM_SWITCHED = 11 DISCONNECT = 12 FLAG_GRAB = 13 FLAG_DROP = 14 ROUND_END = 15 GAMEMODE = 16 GAME_END = 17 class GameMode(Enum): CTF = 'CTF' ONE_FLAG = '1Flag' ELIMINATION = 'Elim' DEATHMATCH = 'DM' SIEGE = 'Siege' TDM = 'TDM' KOTH = 'KOTH' PONG = 'Pong' class BadRconPasswordError(Exception): pass class SecurityCheckError(Exception): pass class MapNotFoundError(Exception): pass class ListenerType(Enum): PERMANENT = 0 TRIGGER_ONCE = 1 REGEXPS = OrderedDict([ (re.compile('^\\[\d\d:\d\d:\d\d\\] (?:(?:\\[OBS\\] )|(?:\\[ELIM\\] ))?(.*?): (.*?)\r?\n'), ServerEvent.CHAT), # [19:54:18] hTml: test (re.compile( '^\\[\d\d:\d\d:\d\d\\] \\*(.*?) (?:\\((.*?)\\) eliminated \\*(.*?) \\((.*?)\\)\\.\r?\n|' 'eliminated ((?:himself)|(?:herself)) with a paintgren\\.\r?\n)'), ServerEvent.ELIM), # [18:54:24] *|ACEBot_1| (Spyder SE) eliminated *|herself| (Spyder SE). # [12:25:44] *whoa eliminated herself with a paintgren. # [12:26:09] *whoa eliminated himself with a paintgren. (re.compile('^\\[\d\d:\d\d:\d\d\\] \\*(.*?)\\\'s (.*?) revived!\r?\n'), ServerEvent.RESPAWN), # [19:03:57] *Red's ACEBot_6 revived! (re.compile('^\\[\d\d:\d\d:\d\d\\] (.*?) entered the game \\((.*?)\\) \\[(.*?)\\]\r?\n'), ServerEvent.ENTRANCE), # [19:03:57] mRokita entered the game (build 41) (re.compile('^\\[\d\d:\d\d:\d\d\\] \\*(.*?)\\\'s (.*?) returned the(?: \\*(.*?))? flag!\r?\n'), ServerEvent.FLAG_CAPTURED), # [18:54:24] *Red's hTml returned the *Blue flag! (re.compile('^\\[\d\d:\d\d:\d\d\\] \\*(.*?)\\\'s (.*?) earned (\d+) points for possesion of eliminated teams flag!\r?\n'), ServerEvent.ELIM_TEAMS_FLAG), # [19:30:23] *Blue's mRokita earned 3 points for possesion of eliminated teams flag! (re.compile('^\\[\d\d:\d\d:\d\d\\] Round started\\.\\.\\.\r?\n'), ServerEvent.ROUND_STARTED), # [10:20:11] Round started... (re.compile( '(?:^\\[\d\d:\d\d:\d\d\\] (.*?) switched from \\*((?:Red)|(?:Purple)|(?:Blue)|(?:Yellow))' ' to \\*((?:Red)|(?:Purple)|(?:Blue)|(?:Yellow))\\.\r?\n)|' '(?:^\\[\d\d:\d\d:\d\d\\] (.*?) joined the \\*((?:Red)|(?:Purple)|(?:Blue)|(?:Yellow)) team\\.\r?\n)|' '(?:^\\[\d\d:\d\d:\d\d\\] (.*?) is now (observing)?\\.\r?\n)'), ServerEvent.TEAM_SWITCHED), # [10:20:11] mRokita switched from Blue to Red. # [10:20:11] mRokita is now observing. # [10:20:11] mRokita is now observing. (re.compile('^\\[\d\d:\d\d:\d\d\\] [\t|-]{2}GameEnd[\t-](.*?)\r?\n'), ServerEvent.GAME_END), # [22:40:33] GameEnd 441.9 No winner # [22:40:33] GameEnd 1032.6 Red:23,Blue:22 # [22:40:33] GameEnd 4.9 DPBot01 wins! # [22:40:33] GameEnd 42.9 Yellow:5,Blue:0,Purple:0,Red:0 # [22:40:33] GameEnd 42.9 Yellow:5,Blue:12,Purple:7 (re.compile('^\\[\d\d:\d\d:\d\d\\] == Map Loaded: (.+) ==\r?\n'), ServerEvent.MAPCHANGE), # [10:20:11] == Map Loaded: airtime == (re.compile('^\\[\d\d:\d\d:\d\d\\] (.*?) changed name to (.*?)\\.\r?\n'), ServerEvent.NAMECHANGE), # [19:54:54] name1 changed name to name2. (re.compile('^\\[\d\d:\d\d:\d\d\\] (.*?) disconnected\\.\r?\n'), ServerEvent.DISCONNECT), # [19:03:57] whoa disconnected. (re.compile('^\\[\d\d:\d\d:\d\d\\] \\*(.*?) got the(?: \\*(.*?))? flag\\!\r?\n'), ServerEvent.FLAG_GRAB), # [19:03:57] *whoa got the *Red flag! (re.compile('^\\[\d\d:\d\d:\d\d\\] \\*(.*?) dropped the flag\\!\r?\n'), ServerEvent.FLAG_DROP), # [19:03:57] *whoa dropped the flag! (re.compile('^\\[\d\d:\d\d:\d\d\\] (.*?) team wins the round\\!\r?\n'), ServerEvent.ROUND_END), # [14:38:50] Blue team wins the round! (re.compile('^\\[\d\d:\d\d:\d\d\\] === ((?:Deathmatch)|(?:Team Flag CTF)|(?:Single Flag CTF)|(?:Team Siege)|(?:Team Elim)|(?:Team Siege)|(?:Team Deathmatch)|(?:Team KOTH)|(?:Pong)) ===\r?\n'), ServerEvent.GAMEMODE), # [09:58:11] === Team Flag CTF === # [13:16:19] === Team Siege === # [21:53:54] === Pong === # [12:21:05] === Deathmatch === ]) class Player(object): """ Player info from sv players command :Attributes: * dplogin - dplogin.com account id, None when Player has no account * nick - nickname: * build - game build * server - an instance of :class:`Server` """ def __init__(self, server, id, dplogin, nick, build): self.server = server self.id = id self.dplogin = dplogin self.nick = nick self.build = build class Server(object): """ Represents a DP:PB2 server :param hostname: Server hostname, for example '127.0.0.1' :type hostname: str :param port: Server port, default 27910 :type port: int :param logfile: Path to logfile :param rcon_password: rcon password :param pty_master: Master of the dp2 process (useful only if you want to run the server from your Python script). Go to the getting started section for details. :type pty_master: int :param init_vars: Send come commands used for security """ def __init__(self, hostname, port=27910, logfile=None, rcon_password=None, pty_master=None, init_vars=True): self.__rcon_password = rcon_password self.__hostname = hostname self.__init_vars = init_vars self.__port = port self.__log_file = None self.__is_secure = False self.__alive = False self.__logfile_name = logfile if not pty_master else None self.__pty_master = pty_master self.handlers = { ServerEvent.CHAT: 'on_chat', ServerEvent.ELIM: 'on_elim', ServerEvent.RESPAWN: 'on_respawn', ServerEvent.ENTRANCE: 'on_entrance', ServerEvent.FLAG_CAPTURED: 'on_flag_captured', ServerEvent.ELIM_TEAMS_FLAG: 'on_elim_teams_flag', ServerEvent.ROUND_STARTED: 'on_round_started', ServerEvent.TEAM_SWITCHED: 'on_team_switched', ServerEvent.GAME_END: 'on_game_end', ServerEvent.MAPCHANGE: 'on_mapchange', ServerEvent.NAMECHANGE: 'on_namechange', ServerEvent.DISCONNECT: 'on_disconnect', ServerEvent.FLAG_GRAB: 'on_flag_grab', ServerEvent.FLAG_DROP: 'on_flag_drop', ServerEvent.ROUND_END: 'on_round_end', ServerEvent.GAMEMODE: 'gamemode', } self.__listeners = { ServerEvent.CHAT: [], ServerEvent.ELIM: [], ServerEvent.RESPAWN: [], ServerEvent.ENTRANCE: [], ServerEvent.FLAG_CAPTURED: [], ServerEvent.ELIM_TEAMS_FLAG: [], ServerEvent.ROUND_STARTED: [], ServerEvent.TEAM_SWITCHED: [], ServerEvent.GAME_END: [], ServerEvent.MAPCHANGE: [], ServerEvent.NAMECHANGE: [], ServerEvent.DISCONNECT: [], ServerEvent.FLAG_GRAB: [], ServerEvent.FLAG_DROP: [], ServerEvent.ROUND_END: [], ServerEvent.GAMEMODE: [], } self.loop = asyncio.get_event_loop() def is_listening(self): """ Check if the main loop is running. :rtype: bool """ return self.__alive @asyncio.coroutine def on_chat(self, nick, message): """ On chat, can be overridden using the :func:`.Server.event` decorator. :param nick: Player's nick. :type nick: str :param message: Message. :type message: str """ pass @asyncio.coroutine def on_flag_captured(self, team, nick, flag): """ On flag captured, can be overridden using the :func:`.Server.event` decorator. :param team: Player's team. :type team: str :param nick: Player's nick. :type nick: str :param flag: Captured flag (Blue|Red|Yellow|Purple|White) :type flag: str """ pass @asyncio.coroutine def on_team_switched(self, nick, old_team, new_team): """ On team switched, can be overridden using the :func:`.Server.event` decorator. :param nick: Player's nick :type nick: str :param old_team: Old team (Blue|Red|Yellow|Purple|Observer) :type old_team: str :param new_team: New team (Blue|Red|Yellow|Purple|Observer) :type new_team: str """ pass @asyncio.coroutine def on_round_started(self): """ On round started, can be overridden using the :func:`.Server.event` decorator. """ pass @asyncio.coroutine def on_elim_teams_flag(self, team, nick, points): """ On scored points for possession of eliminated teams flag, can be overridden using the :func:`.Server.event` decorator. :param team: Player's team. :type team: str :param nick: Player's nick. :type nick: str :param points: Points earned. :type points: int """ pass @asyncio.coroutine def on_entrance(self, nick, build, addr): """ On entrance, can be overriden using the :func:`.Server.event` decorator. :param nick: Player's nick :type nick: str :param build: Player's game version ('build 41' for example :type build: str :param addr: Player's address, IP:PORT ('127.0.0.1:23414' for example) :type addr: str """ pass @asyncio.coroutine def on_game_end(self, score_blue, score_red, score_yellow, score_purple): """ On game end, can be overriden using the :func:`.Server.event` decorator. :param score_blue: Blue's score - None if there was no Blue team. :param score_red: Red's score - None if there was no Red team. :param score_yellow: Yellow's score - None if there was no Yellow team. :param score_purple: Purple's score - None if there was no Purple team. """ pass @asyncio.coroutine def on_elim(self, killer_nick, killer_weapon, victim_nick, victim_weapon, suicide): """ On elim can be overridden using the :func:`.Server.event` decorator. :param killer_nick: Killer's nick :type killer_nick: str :param killer_weapon: Killer's weapon :type killer_weapon: str :param victim_nick: Victim's nick :type victim_nick: str :param victim_weapon: Victim's weapon :type victim_weapon: str """ pass @asyncio.coroutine def on_respawn(self, team, nick): """ On respawn, can be overridden using the :func:`.Server.event` decorator. :param team: Player's team (Blue|Red|Yellow|Purple) :type team: str :param nick: Player's nick :type nick: str """ pass @asyncio.coroutine def on_mapchange(self, mapname): """ On mapcange, can be overridden using the :func:`.Server.event` decorator. :param mapname: Mapname :type mapname: str """ pass @asyncio.coroutine def on_namechange(self, old_nick, new_nick): """ On name change, can be overridden using the :func:`.Server.event` decorator. :param old_nick: Old nick :type old_nick: str :param new_nick: Old nick :type new_nick: str """ pass @asyncio.coroutine def on_disconnect(self, nick): """ On disconnect, can be overridden using the :func:`.Server.event`decorator. :param nick: Disconnected player's nick :type nick: str """ pass @asyncio.coroutine def on_flag_grab(self, nick, flag): """ On flag grab, can be overridden using the :func:`.Server.event` decorator. :param nick: Player's nick :type nick: str :param team: Flag color (Blue|Red|Yellow|Purple) :type team: str """ pass @asyncio.coroutine def on_flag_drop(self, nick): """ On flag grab, can be overridden using the :func:`.Server.event` decorator. :param nick: Player's nick :type nick: str :param team: Flag color (Blue|Red|Yellow|Purple) :type team: str """ pass @asyncio.coroutine def on_round_end(self): """ Onround end, can be overridden using the :func:`.Server.event` decorator. """ pass @asyncio.coroutine def gamemode(self, gamemode): """ Onround end, can be overridden using the :func:`.Server.event` decorator. :param gamemode: map's gamemode :type gamemode: str """ pass def event(self, func): """ Decorator, used for event registration. :param func: function to register :rtype: builtin_function_or_method :example: .. code-block:: python :linenos: >>> from dplib.server import Server >>> s = Server(hostname='127.0.0.1', port=27910, logfile=r'qconsole27910.log', rcon_password='hello') >>> @s.event ... def on_chat(nick, message): ... print((nick, message)) ... >>> s.run() ('mRokita', 'Hi') """ if func.__name__ in self.handlers.values(): setattr(self, func.__name__, asyncio.coroutine(func)) return func else: raise Exception('Event \'%s\' doesn\'t exist' % func.__name__) def stop_listening(self): """ Stop the main loop """ self.__alive = False def __perform_listeners(self, event_type, args, kwargs): """ Performs all pending listeners. :param event_type: Event type, one of members :class:`ServerEvent` :param args: Event info :type args: tuple :param kwargs: Event info :type kwargs: dict """ to_remove = list() for i, (check, future) in enumerate(self.__listeners[event_type]): if not future.cancelled() and not future.done(): if check(*args): future.set_result(kwargs) else: to_remove.append(i) for i in reversed(to_remove): self.__listeners[event_type].pop(i) def nicks_valid(self, *nicks): nicks_ingame = [p.nick for p in self.get_players()] for nick in nicks: if nick not in nicks_ingame: return False return True @asyncio.coroutine def __handle_event(self, event_type, args): """ Handles an event. :param event_type: Event type, one of members :class:`ServerEvent` :param args: Event info (re.findall() results) """ kwargs = dict() if event_type == ServerEvent.CHAT: if args[0] not in [p.nick for p in self.get_players()]: return kwargs = { 'nick': args[0], 'message': args[1], } self.__perform_listeners(ServerEvent.CHAT, args, kwargs) elif event_type == ServerEvent.ELIM: kwargs = { 'killer_nick': args[0], 'killer_weapon': args[1], 'victim_nick': args[2], 'victim_weapon': args[3], 'suicide': args[4], } self.__perform_listeners(ServerEvent.ELIM, args, kwargs) elif event_type == ServerEvent.RESPAWN: kwargs = { 'team': args[0], 'nick': args[1], } self.__perform_listeners(ServerEvent.RESPAWN, args, kwargs) elif event_type == ServerEvent.ENTRANCE: kwargs = { 'nick': args[0], 'build': args[1], 'addr': args[2], } self.__perform_listeners(ServerEvent.ENTRANCE, args, kwargs) elif event_type == ServerEvent.FLAG_CAPTURED: kwargs = { 'team': args[0], 'nick': args[1], 'flag': args[2], } self.__perform_listeners(ServerEvent.FLAG_CAPTURED, args, kwargs) elif event_type == ServerEvent.ELIM_TEAMS_FLAG: kwargs = { 'team': args[0], 'nick': args[1], 'points': int(args[2]), } self.__perform_listeners(ServerEvent.ELIM_TEAMS_FLAG, args, kwargs) elif event_type == ServerEvent.ROUND_STARTED: kwargs = dict() self.__perform_listeners(ServerEvent.ROUND_STARTED, args, kwargs) elif event_type == ServerEvent.TEAM_SWITCHED: new_args = tuple([arg for arg in args if arg]) kwargs = { 'nick': new_args[0], 'old_team': new_args[1] if len(new_args) > 2 else 'Observer', 'new_team': new_args[2] if len(new_args) > 2 else new_args[1] } if kwargs['new_team'] == 'observing': kwargs['new_team'] = 'Observer' kwargs['old_team'] = None self.__perform_listeners(ServerEvent.TEAM_SWITCHED, new_args, kwargs) elif event_type == ServerEvent.GAME_END: kwargs = { 'score_blue': None, 'score_red': None, 'score_purple': None, 'score_yellow': None, } teams = args.split(',') for t in teams: data = t.split(':') if data[0] == 'Blue': kwargs['score_blue'] = data[1] elif data[0] == 'Red': kwargs['score_red'] = data[1] elif data[0] == 'Yellow': kwargs['score_yellow'] = data[1] elif data[0] == 'Purple': kwargs['score_purple'] = data[1] self.__perform_listeners(ServerEvent.GAME_END, (kwargs['score_blue'], kwargs['score_red'], kwargs['score_yellow'], kwargs['score_purple']), kwargs) elif event_type == ServerEvent.MAPCHANGE: kwargs = { 'mapname': args } self.__perform_listeners(ServerEvent.MAPCHANGE, (kwargs['mapname'],), kwargs) elif event_type == ServerEvent.NAMECHANGE: kwargs = { 'old_nick': args[0], 'new_nick': args[1] } self.__perform_listeners(ServerEvent.NAMECHANGE, (kwargs['old_nick'], kwargs['new_nick']), kwargs) elif event_type == ServerEvent.DISCONNECT: kwargs = { 'nick': args } self.__perform_listeners(ServerEvent.DISCONNECT, (kwargs['nick'],), kwargs) elif event_type == ServerEvent.FLAG_GRAB: kwargs = { 'nick': args[0], 'flag': args[1], } self.__perform_listeners(ServerEvent.FLAG_GRAB, (kwargs['nick'], kwargs['flag']), kwargs) elif event_type == ServerEvent.FLAG_DROP: kwargs = { 'nick': args } self.__perform_listeners(ServerEvent.FLAG_GRAB, (kwargs['nick'],), kwargs) elif event_type == ServerEvent.ROUND_END: kwargs = dict() self.__perform_listeners(ServerEvent.ROUND_END, args, kwargs) elif event_type == ServerEvent.GAMEMODE: kwargs = { 'gamemode': args } self.__perform_listeners(ServerEvent.GAMEMODE, args, kwargs) asyncio.ensure_future(self.get_event_handler(event_type)(**kwargs)) def get_event_handler(self, event_type): return getattr(self, self.handlers[event_type]) @asyncio.coroutine def __parse_line(self, line): """ Tries to match line with all event regexps. :param line: Line from logs """ for r in REGEXPS: results = r.findall(line) e = REGEXPS[r] for res in results: if e == ServerEvent.CHAT: # For security reasons if self.nicks_valid(res[0]): yield from self.__handle_event(event_type=e, args=res) return else: continue yield from self.__handle_event(event_type=e, args=res) def rcon(self, command, socket_timeout=3): """ Execute a console command using RCON. :param command: Command :param socket_timeout: Timeout for the UDP socket. :return: Response from server :rtype: str :example: .. code-block:: python :linenos: >>> from dplib.server import Server >>> s = Server(hostname='127.0.0.1', port=27910, logfile=r'qconsole27910.log', rcon_password='hello') >>> s.rcon('sv listuserip') 'ÿÿÿÿprint\\n mRokita [127.0.0.1:9419]\\nadmin is listing IP for mRokita [127.0.0.1:9419]\\n' """ sock = socket(AF_INET, SOCK_DGRAM) sock.connect((self.__hostname, self.__port)) sock.settimeout(socket_timeout) sock.send(bytes('\xFF\xFF\xFF\xFFrcon {} {}\n'.format(self.__rcon_password, command).encode('latin-1'))) ret = sock.recv(2048).decode('latin-1') return ret def status(self): """ Execute status query. :return: Status string :rtype: str """ sock = socket(AF_INET, SOCK_DGRAM) sock.connect((self.__hostname, self.__port)) sock.settimeout(3) sock.send(b'\xFF\xFF\xFF\xFFstatus\n') return sock.recv(2048).decode('latin-1') def new_map(self, map_name, gamemode=None): """ Changes the map using sv newmap <mapname> <gamemode> :param map_name: map name, without .bsp :param gamemode: Game mode :type gamemode: GameMode :return: Rcon response :raises MapNotFoundError: When map is not found on the server :rtype: str """ command = 'sv newmap {map}' if gamemode: command += ' {gamemode}' res = self.rcon(command.format(map=map_name, gamemode=gamemode)) if 'Cannot find mapfile' in res or 'usage' in res: raise MapNotFoundError return res def permaban(self, ip=None): """ Bans IP address or range of adresses and saves ban list to disk. :param ip: IP address to ban :return: Rcon response :rtype: str """ if ip: resp = self.rcon('addip %s' % ip) resp += '\n' + self.rcon('writeban') return resp else: raise TypeError('IP address is required.') def remove_permaban(self, ip=None): """ Removes ban on IP address and saves ban list to disk. :param ip: IP address to unban :return: Rcon response :rtype: str """ if ip: resp = self.rcon('removeip %s' % ip) resp += '\n' + self.rcon('writeban') return resp else: raise TypeError('IP address is required.') def tempoban(self, id=None, nick=None, duration=3): """ Temporarily bans a player with specified id using rcon :param id: Player's id :param nick: Player's nick :param duration: Ban duration in minutes (defaults to 3) :return: Rcon response :rtype: str """ if type(duration) != int: raise TypeError('Ban duration should be an integer, not a ' + str(type(duration))) if nick: id = self.get_ingame_info(nick).id if id: return self.rcon('tban %s %s' % (id, str(duration))) else: raise TypeError('Player id or nick is required.') def remove_tempobans(self): """ Removes all temporary bans :return: Rcon response :rtype: str """ return self.rcon("removetbans") def kick(self, id=None, nick=None): """ Kicks a player with id using rcon. :param id: Player's id :param nick: Player's nick :return: Rcon response :rtype: str """ if nick: id = self.get_ingame_info(nick).id if id: return self.rcon('kick %s' % id) else: raise TypeError('Player id or nick is required.') def say(self, message): """ Say a message :param message: Text, can contain {C} - color char {U} - underline char {I} italic. Remember to escape user input using :func:`dplib.parse.escape_braces`. :rtype: str :return: Rcon response :example: .. code-block:: python :linenos: >>> from dplib.server import Server >>> s = Server(hostname='127.0.0.1', port=27910, logfile=r'qconsole27910.log', rcon_password='hello') >>> s.say('{C}ARed text') >>> s.say('{U}Underline{U}') >>> s.say('{I}Italic{I}') :ingame result: .. image:: ..\..\doc\images\say_test.png """ return self.rcon('say "%s"' % render_text(message)) def cprint(self, message): """ Cprints a message. :param message: Text, can contain {C} - color char {U} - underline char {I} italic. Remember to escape user input using :func:`dplib.parse.escape_brac :return: Rcon response :rtype: str """ return self.rcon('sv cprint "%s"' % render_text(message)) def set_cvar(self, var, value): """ Set a server cvar :param var: cvar name :param value: value to set :return: Rcon response :rtype: str """ return self.rcon('set %s "%s"' % (var, value)) def get_cvar(self, var): """ Gets cvar value :param var: Variable name :type var: str :return: Cvar value :rtype: str """ res = self.rcon('"%s"' % var) if re.match('^....print\\\nUnknown command \\"%s"\\.\\\n' % re.escape(var), res): raise NameError('Cvar "%s" does not exist' % var) return re.findall('^....print\\\n\\"%s\\" is \\"(.*?)\\"\\\n' % re.escape(var), res)[0] @staticmethod def __get_predicate(margs, check): """ Returns a comparator. :param margs: Args to check :param check: Check function :return: Returns a function that compiles the check function and comparision strings """ def predicate(*args): if len(args) != len(margs): raise TypeError('predicate() takes %d positional arguments but %d were given' % (len(margs), len(args))) result = True for i, a in enumerate(margs): if a: result = result and a == args[i] if callable(check): result = result and check(*args) return result return predicate @asyncio.coroutine def wait_for_entrance(self, timeout=None, nick=None, build=None, addr=None, check=None): """ Waits for entrance. :param timeout: Time to wait for entrance event, if exceeded, returns None. :param nick: Player's nick. :param build: Player's build. :param addr: Player's address (IP:PORT) :return: """ future = asyncio.Future(loop=self.loop) margs = (nick, build, addr) predicate = self.__get_predicate(margs, check) self.__listeners[ServerEvent.ENTRANCE].append((predicate, future)) try: data = yield from asyncio.wait_for(future, timeout, loop=self.loop) except asyncio.TimeoutError: data = None return data @asyncio.coroutine def wait_for_respawn(self, timeout=None, team=None, nick=None, check=None): """ Waits for respawn event. :param timeout: Time to wait for respawn event, if exceeded, returns None. :param team: Player's team. :param nick: Player's nick. :param check: Check function, ignored if none. :return: Returns message info dict keys: ('team', 'nick'). :rtype: dict """ future = asyncio.Future(loop=self.loop) margs = (team, nick) predicate = self.__get_predicate(margs, check) self.__listeners[ServerEvent.RESPAWN].append((predicate, future)) try: data = yield from asyncio.wait_for(future, timeout, loop=self.loop) except asyncio.TimeoutError: data = None return data @asyncio.coroutine def wait_for_elim_teams_flag(self, timeout=None, team=None, nick=None, points=None, check=None): """ Waits for elim teams flag event. :param timeout: Time to wait for event, if exceeded, returns None. :param team: Player's team. :param nick: Player's nick. :param points: Points scored. :type points: int :param check: Check function, ignored if none. :return: Returns message info dict keys: ('team', 'nick', 'points'). :rtype: dict """ future = asyncio.Future(loop=self.loop) margs = (team, nick, points) predicate = self.__get_predicate(margs, check) self.__listeners[ServerEvent.ELIM_TEAMS_FLAG].append((predicate, future)) try: data = yield from asyncio.wait_for(future, timeout, loop=self.loop) except asyncio.TimeoutError: data = None return data @asyncio.coroutine def wait_for_team_switched(self, timeout=None, nick=None, old_team=None, new_team=None, check=None): """ Waits for team switch event. :param timeout: Time to wait for event, if exceeded, returns None. :param old_team: Player's old team. :param new_team: Player's new team. :param nick: Player's nick. :param check: Check function, ignored if none. :return: Returns message info dict keys: ('nick', 'old_team', 'new_nick'). :rtype: dict """ future = asyncio.Future(loop=self.loop) margs = (nick, old_team, new_team) predicate = self.__get_predicate(margs, check) self.__listeners[ServerEvent.TEAM_SWITCHED].append((predicate, future)) try: data = yield from asyncio.wait_for(future, timeout, loop=self.loop) except asyncio.TimeoutError: data = None return data @asyncio.coroutine def wait_for_round_started(self, timeout=None, check=None): """ Waits for round start. :param timeout: Time to wait for event, if exceeded, returns None. :param check: Check function, ignored if none. :return: Returns an empty dict. :rtype: dict """ future = asyncio.Future(loop=self.loop) margs = tuple() predicate = self.__get_predicate(margs, check) self.__listeners[ServerEvent.ROUND_STARTED].append((predicate, future)) try: data = yield from asyncio.wait_for(future, timeout, loop=self.loop) except asyncio.TimeoutError: data = None return data @asyncio.coroutine def wait_for_flag_captured(self, timeout=None, team=None, nick=None, flag=None, check=None): """ Waits for flag capture. :param timeout: Time to wait for event, if exceeded, returns None. :param team: Player's team. :param nick: Player's nick. :param flag: Captured flag. :param check: Check function, ignored if none. :return: Returns an empty dict. :rtype: dict """ future = asyncio.Future(loop=self.loop) margs = (team, nick, flag) predicate = self.__get_predicate(margs, check) self.__listeners[ServerEvent.FLAG_CAPTURED].append((predicate, future)) try: data = yield from asyncio.wait_for(future, timeout, loop=self.loop) except asyncio.TimeoutError: data = None return data @asyncio.coroutine def wait_for_game_end(self, timeout=None, score_blue=None, score_red=None, score_yellow=None, score_purple=None, check=None): """ Waits for game end. :param timeout: Time to wait for event, if exceeded, returns None. :param score_blue: Blue score :param score_red: Red score. :param score_yellow: Yellow score. :param score_purple: Purple score. :param check: Check function, ignored if none. :return: Returns an empty dict. :rtype: dict """ future = asyncio.Future(loop=self.loop) margs = (score_blue, score_red, score_yellow, score_purple) predicate = self.__get_predicate(margs, check) self.__listeners[ServerEvent.GAME_END].append((predicate, future)) try: data = yield from asyncio.wait_for(future, timeout, loop=self.loop) except asyncio.TimeoutError: data = None return data @asyncio.coroutine def wait_for_elim(self, timeout=None, killer_nick=None, killer_weapon=None, victim_nick=None, victim_weapon=None, check=None): """ Waits for elimination event. :param timeout: Time to wait for elimination event, if exceeded, returns None. :param killer_nick: Killer's nick to match, ignored if None. :param killer_weapon: Killer's weapon to match, ignored if None. :param victim_nick: Victim's nick to match, ignored if None. :param victim_weapon: Victim's weapon to match, ignored if None. :param check: Check function, ignored if None. :return: Returns message info dict keys: ('killer_nick', 'killer_weapon', 'victim_nick', 'victim_weapon') :rtype: dict """ future = asyncio.Future(loop=self.loop) margs = (killer_nick, killer_weapon, victim_nick, victim_weapon) predicate = self.__get_predicate(margs, check) self.__listeners[ServerEvent.ELIM].append((predicate, future)) try: elim_info = yield from asyncio.wait_for(future, timeout, loop=self.loop) except asyncio.TimeoutError: elim_info = None return elim_info @asyncio.coroutine def wait_for_mapchange(self, timeout=None, mapname=None, check=None): """ Waits for mapchange. :param timeout: Time to wait for elimination event, if exceeded, returns None. :param mapname: Killer's nick to match, ignored if None. :param check: Check function, ignored if None. :return: Returns message info dict keys: ('killer_nick', 'killer_weapon', 'victim_nick', 'victim_weapon') :rtype: dict """ future = asyncio.Future(loop=self.loop) margs = (mapname,) predicate = self.__get_predicate(margs, check) self.__listeners[ServerEvent.MAPCHANGE].append((predicate, future)) try: mapchange_info = yield from asyncio.wait_for(future, timeout, loop=self.loop) except asyncio.TimeoutError: mapchange_info = None return mapchange_info @asyncio.coroutine def wait_for_namechange(self, timeout=None, old_nick=None, new_nick=None, check=None): """ Waits for mapchange. :param timeout: Time to wait for elimination event, if exceeded, returns None. :param mapname: Killer's nick to match, ignored if None. :param check: Check function, ignored if None. :return: Returns message info dict keys: ('killer_nick', 'killer_weapon', 'victim_nick', 'victim_weapon') :rtype: dict """ future = asyncio.Future(loop=self.loop) margs = (old_nick, new_nick) predicate = self.__get_predicate(margs, check) self.__listeners[ServerEvent.NAMECHANGE].append((predicate, future)) try: mapchange_info = yield from asyncio.wait_for(future, timeout, loop=self.loop) except asyncio.TimeoutError: mapchange_info = None return mapchange_info @asyncio.coroutine def wait_for_message(self, timeout=None, nick=None, message=None, check=None): """ Waits for a message. :param timeout: Time to wait for message, if exceeded, returns None. :param nick: Player's nick to match, ignored if None :type nick: str :param message: Message text to match, ignored if None :type message: str :param check: Check function, ignored if None :return: Returns message info dict keys: ('nick', 'message') :rtype: dict :example: .. code-block:: python :linenos: @s.event def on_chat(nick, message): if message == '!start' and not elim_active: msg = yield from s.wait_for_message(check=lambda n, m: m.startswith('!hi ')) s.say('Hi ' + msg['message'].split('!hi ')[1] + '!') """ future = asyncio.Future(loop=self.loop) margs = (nick, message) predicate = self.__get_predicate(margs, check) self.__listeners[ServerEvent.CHAT].append((predicate, future)) try: message = yield from asyncio.wait_for(future, timeout, loop=self.loop) except asyncio.TimeoutError: message = None return message @asyncio.coroutine def wait_for_flag_drop(self, timeout=None, nick=None, check=None): """ Waits for flag drop. :param timeout: Time to wait for event, if exceeded, returns None. :param nick: Player's nick. :param flag: dropped flag. :param check: Check function, ignored if none. :return: Returns an empty dict. :rtype: dict """ future = asyncio.Future(loop=self.loop) margs = (nick) predicate = self.__get_predicate(margs, check) self.__listeners[ServerEvent.FLAG_DROP].append((predicate, future)) try: data = yield from asyncio.wait_for(future, timeout, loop=self.loop) except asyncio.TimeoutError: data = None return data def start(self, scan_old=False, realtime=True, debug=False): """ Main loop. :param scan_old: Scan present logfile data :type scan_old: bool :param realtime: Wait for incoming logfile data :type realtime: bool """ if not (self.__logfile_name or self.__pty_master): raise AttributeError("Logfile name or a Popen process is required.") self.__alive = True if self.__logfile_name: self.__log_file = open(self.__logfile_name, 'rb') if self.__log_file and scan_old: self.__log_file.readlines() buf = '' if realtime: while self.__alive: try: buf += self._read_log() lines = buf.splitlines(True) line = '' for line in lines: if debug: print("[DPLib] %s" % line.strip()) yield from self.__parse_line(line) if not line or line[-1] != '\n': buf = line else: buf = '' yield from asyncio.sleep(0.05) except OSError as e: raise e if self.__log_file: self.__log_file.close() if self.__pty_master: os.close(self.__pty_master) def _read_log(self): if self.__log_file: return self.__log_file.readline().decode('latin-1') elif self.__pty_master: r, w, x = select.select([self.__pty_master], [], [], 0.01) if r: return os.read(self.__pty_master, 1024).decode('latin-1') else: return '' def get_players(self): """ Gets playerlist. :return: List of :class:`.Player` instances :rtype: list """ response = self.rcon('sv players') response = re.findall('(\d+) \\(?(.*?)\\)?\\] \\* (?:OP \d+, )?(.+) \\((b\d+)\\)', response) players = list() for p_data in response: player = Player(nick=p_data[2], id=p_data[0], dplogin=p_data[1], build=p_data[3], server=self) players.append(player) return players def get_simple_playerlist(self): """ Get a list of player names :return: List of nicks :rtype: list """ status = self.get_status() players = status['players'] playerlist = [] for p in players: playerlist.append(p['name']) return playerlist def get_status(self): """ Gets server status :example: .. code-block:: python :linenos: >>> s = Server(hostname='127.0.0.1', port=27910, logfile=r'C:\Games\Paintball2\pball\qconsole27910.log', rcon_password='hello') >>> s.get_status() {'players': [{'score': '0', 'ping': '13', 'name': 'mRokita'}], 'sv_certificated': '1', 'mapname': 'beta/wobluda_fix', 'TimeLeft': '20:00', '_scores': 'Red:0 Blue:0 ', 'gamename': 'Digital Paint Paintball 2 v1.930(186)', 'gameversion': 'DPPB2 v1.930(186)', 'sv_login': '1', 'needpass': '0', 'gamedate': 'Aug 10 2015', 'protocol': '34', 'version': '2.00 x86 Aug 10 2015 Win32 RELEASE (41)', 'hostname': 'asdfgh', 'elim': 'airtime', 'fraglimit': '50', 'timelimit': '20', 'gamedir': 'pball', 'game': 'pball', 'maxclients': '8'} :return: status dict :rtype: dict """ dictionary = {} players = [] response = self.status().split('\n')[1:] variables = response[0] players_str = (response[1:]) for i in players_str: if not i: continue temp_dict = {} cleaned_name = decode_ingame_text(i) separated = cleaned_name.split(' ') temp_dict['score'] = separated[0] temp_dict['ping'] = separated[1] temp_dict['name'] = cleaned_name.split("%s %s " % (separated[0], separated[1]))[1][1:-1] players.append(temp_dict) dictionary['players'] = players variables = variables.split('\\')[1:] for i in range(0, len(variables), 2): dictionary[variables[i]] = variables[i + 1] return dictionary def get_ingame_info(self, nick): """ Get ingame info about a player with nickname :param nick: Nick :return: An instance of :class:`.Player` """ players = self.get_players() for p in players: if p.nick == nick: return p return None def make_secure(self, timeout=10): """ This function fixes some compatibility and security issues on DP server side - Adds "mapchange" to sv_blockednames - Sets sl_logging to 1 All variables are set using the rcon protocol, use this function if you want to wait for the server to start. :param timeout: Timeout in seconds """ sl_logging_set = False sv_blockednames_set = False self.__is_secure = False start_time = time() while not (sl_logging_set and sv_blockednames_set) and time() - start_time < timeout: try: if not sl_logging_set: sl_logging = self.get_cvar('sl_logging') if sl_logging != '1': self.set_cvar('sl_logging', '1') else: sl_logging_set = True if not sv_blockednames_set: blockednames = self.get_cvar('sv_blockednames') if not 'maploaded' in blockednames: self.set_cvar('sv_blockednames', ','.join([blockednames, 'maploaded'])) else: sv_blockednames_set = True except ConnectionError or timeout: pass if not (sl_logging_set and sv_blockednames_set): raise SecurityCheckError( "Configuring the DP server failed," " check if the server is running " "and the rcon_password is correct.") else: self.__is_secure = True def run(self, scan_old=False, realtime=True, debug=False, make_secure=True): """ Runs the main loop using asyncio. :param scan_old: Scan present logfile data :type scan_old: bool :param realtime: Wait for incoming logfile data :type realtime: bool """ if make_secure and not self.__rcon_password: raise AttributeError( "Setting the rcon_password is required to secure DPLib." " You have to either set a rcon_password or add set" " \"sl_logging 1; set sv_blockednames mapname\" " "to your DP server config and use Server.run with" " make_secure=False") if make_secure: self.make_secure() self.loop.run_until_complete(self.start(scan_old, realtime, debug))
agpl-3.0
-7,791,786,125,953,492,000
33.949413
535
0.543832
false
3.845986
false
false
false