repo_name
stringlengths 5
100
| path
stringlengths 4
299
| copies
stringclasses 990
values | size
stringlengths 4
7
| content
stringlengths 666
1.03M
| license
stringclasses 15
values | hash
int64 -9,223,351,895,964,839,000
9,223,297,778B
| line_mean
float64 3.17
100
| line_max
int64 7
1k
| alpha_frac
float64 0.25
0.98
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
nuncjo/odoo | addons/auth_signup/__init__.py | 446 | 1039 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2012-today OpenERP SA (<http://www.openerp.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
#
##############################################################################
import controllers
import res_config
import res_users
| agpl-3.0 | 2,409,536,188,513,523,000 | 42.291667 | 78 | 0.615977 | false |
ctb/cvxpy | doc/sphinxext/docscrape.py | 68 | 15425 | """Extract reference documentation from the NumPy source tree.
"""
import inspect
import textwrap
import re
import pydoc
from StringIO import StringIO
from warnings import warn
class Reader(object):
"""A line-based string reader.
"""
def __init__(self, data):
"""
Parameters
----------
data : str
String with lines separated by '\n'.
"""
if isinstance(data,list):
self._str = data
else:
self._str = data.split('\n') # store string as list of lines
self.reset()
def __getitem__(self, n):
return self._str[n]
def reset(self):
self._l = 0 # current line nr
def read(self):
if not self.eof():
out = self[self._l]
self._l += 1
return out
else:
return ''
def seek_next_non_empty_line(self):
for l in self[self._l:]:
if l.strip():
break
else:
self._l += 1
def eof(self):
return self._l >= len(self._str)
def read_to_condition(self, condition_func):
start = self._l
for line in self[start:]:
if condition_func(line):
return self[start:self._l]
self._l += 1
if self.eof():
return self[start:self._l+1]
return []
def read_to_next_empty_line(self):
self.seek_next_non_empty_line()
def is_empty(line):
return not line.strip()
return self.read_to_condition(is_empty)
def read_to_next_unindented_line(self):
def is_unindented(line):
return (line.strip() and (len(line.lstrip()) == len(line)))
return self.read_to_condition(is_unindented)
def peek(self,n=0):
if self._l + n < len(self._str):
return self[self._l + n]
else:
return ''
def is_empty(self):
return not ''.join(self._str).strip()
class NumpyDocString(object):
def __init__(self, docstring, config={}):
docstring = textwrap.dedent(docstring).split('\n')
self._doc = Reader(docstring)
self._parsed_data = {
'Signature': '',
'Summary': [''],
'Extended Summary': [],
'Parameters': [],
'Returns': [],
'Raises': [],
'Warns': [],
'Other Parameters': [],
'Attributes': [],
'Methods': [],
'See Also': [],
'Notes': [],
'Warnings': [],
'References': '',
'Examples': '',
'index': {}
}
self._parse()
def __getitem__(self,key):
return self._parsed_data[key]
def __setitem__(self,key,val):
if not self._parsed_data.has_key(key):
warn("Unknown section %s" % key)
else:
self._parsed_data[key] = val
def _is_at_section(self):
self._doc.seek_next_non_empty_line()
if self._doc.eof():
return False
l1 = self._doc.peek().strip() # e.g. Parameters
if l1.startswith('.. index::'):
return True
l2 = self._doc.peek(1).strip() # ---------- or ==========
return l2.startswith('-'*len(l1)) or l2.startswith('='*len(l1))
def _strip(self,doc):
i = 0
j = 0
for i,line in enumerate(doc):
if line.strip(): break
for j,line in enumerate(doc[::-1]):
if line.strip(): break
return doc[i:len(doc)-j]
def _read_to_next_section(self):
section = self._doc.read_to_next_empty_line()
while not self._is_at_section() and not self._doc.eof():
if not self._doc.peek(-1).strip(): # previous line was empty
section += ['']
section += self._doc.read_to_next_empty_line()
return section
def _read_sections(self):
while not self._doc.eof():
data = self._read_to_next_section()
name = data[0].strip()
if name.startswith('..'): # index section
yield name, data[1:]
elif len(data) < 2:
yield StopIteration
else:
yield name, self._strip(data[2:])
def _parse_param_list(self,content):
r = Reader(content)
params = []
while not r.eof():
header = r.read().strip()
if ' : ' in header:
arg_name, arg_type = header.split(' : ')[:2]
else:
arg_name, arg_type = header, ''
desc = r.read_to_next_unindented_line()
desc = dedent_lines(desc)
params.append((arg_name,arg_type,desc))
return params
_name_rgx = re.compile(r"^\s*(:(?P<role>\w+):`(?P<name>[a-zA-Z0-9_.-]+)`|"
r" (?P<name2>[a-zA-Z0-9_.-]+))\s*", re.X)
def _parse_see_also(self, content):
"""
func_name : Descriptive text
continued text
another_func_name : Descriptive text
func_name1, func_name2, :meth:`func_name`, func_name3
"""
items = []
def parse_item_name(text):
"""Match ':role:`name`' or 'name'"""
m = self._name_rgx.match(text)
if m:
g = m.groups()
if g[1] is None:
return g[3], None
else:
return g[2], g[1]
raise ValueError("%s is not a item name" % text)
def push_item(name, rest):
if not name:
return
name, role = parse_item_name(name)
items.append((name, list(rest), role))
del rest[:]
current_func = None
rest = []
for line in content:
if not line.strip(): continue
m = self._name_rgx.match(line)
if m and line[m.end():].strip().startswith(':'):
push_item(current_func, rest)
current_func, line = line[:m.end()], line[m.end():]
rest = [line.split(':', 1)[1].strip()]
if not rest[0]:
rest = []
elif not line.startswith(' '):
push_item(current_func, rest)
current_func = None
if ',' in line:
for func in line.split(','):
if func.strip():
push_item(func, [])
elif line.strip():
current_func = line
elif current_func is not None:
rest.append(line.strip())
push_item(current_func, rest)
return items
def _parse_index(self, section, content):
"""
.. index: default
:refguide: something, else, and more
"""
def strip_each_in(lst):
return [s.strip() for s in lst]
out = {}
section = section.split('::')
if len(section) > 1:
out['default'] = strip_each_in(section[1].split(','))[0]
for line in content:
line = line.split(':')
if len(line) > 2:
out[line[1]] = strip_each_in(line[2].split(','))
return out
def _parse_summary(self):
"""Grab signature (if given) and summary"""
if self._is_at_section():
return
summary = self._doc.read_to_next_empty_line()
summary_str = " ".join([s.strip() for s in summary]).strip()
if re.compile('^([\w., ]+=)?\s*[\w\.]+\(.*\)$').match(summary_str):
self['Signature'] = summary_str
if not self._is_at_section():
self['Summary'] = self._doc.read_to_next_empty_line()
else:
self['Summary'] = summary
if not self._is_at_section():
self['Extended Summary'] = self._read_to_next_section()
def _parse(self):
self._doc.reset()
self._parse_summary()
for (section,content) in self._read_sections():
if not section.startswith('..'):
section = ' '.join([s.capitalize() for s in section.split(' ')])
if section in ('Parameters', 'Returns', 'Raises', 'Warns',
'Other Parameters', 'Attributes', 'Methods'):
self[section] = self._parse_param_list(content)
elif section.startswith('.. index::'):
self['index'] = self._parse_index(section, content)
elif section == 'See Also':
self['See Also'] = self._parse_see_also(content)
else:
self[section] = content
# string conversion routines
def _str_header(self, name, symbol='-'):
return [name, len(name)*symbol]
def _str_indent(self, doc, indent=4):
out = []
for line in doc:
out += [' '*indent + line]
return out
def _str_signature(self):
if self['Signature']:
return [self['Signature'].replace('*','\*')] + ['']
else:
return ['']
def _str_summary(self):
if self['Summary']:
return self['Summary'] + ['']
else:
return []
def _str_extended_summary(self):
if self['Extended Summary']:
return self['Extended Summary'] + ['']
else:
return []
def _str_param_list(self, name):
out = []
if self[name]:
out += self._str_header(name)
for param,param_type,desc in self[name]:
out += ['%s : %s' % (param, param_type)]
out += self._str_indent(desc)
out += ['']
return out
def _str_section(self, name):
out = []
if self[name]:
out += self._str_header(name)
out += self[name]
out += ['']
return out
def _str_see_also(self, func_role):
if not self['See Also']: return []
out = []
out += self._str_header("See Also")
last_had_desc = True
for func, desc, role in self['See Also']:
if role:
link = ':%s:`%s`' % (role, func)
elif func_role:
link = ':%s:`%s`' % (func_role, func)
else:
link = "`%s`_" % func
if desc or last_had_desc:
out += ['']
out += [link]
else:
out[-1] += ", %s" % link
if desc:
out += self._str_indent([' '.join(desc)])
last_had_desc = True
else:
last_had_desc = False
out += ['']
return out
def _str_index(self):
idx = self['index']
out = []
out += ['.. index:: %s' % idx.get('default','')]
for section, references in idx.iteritems():
if section == 'default':
continue
out += [' :%s: %s' % (section, ', '.join(references))]
return out
def __str__(self, func_role=''):
out = []
out += self._str_signature()
out += self._str_summary()
out += self._str_extended_summary()
for param_list in ('Parameters', 'Returns', 'Other Parameters',
'Raises', 'Warns'):
out += self._str_param_list(param_list)
out += self._str_section('Warnings')
out += self._str_see_also(func_role)
for s in ('Notes','References','Examples'):
out += self._str_section(s)
for param_list in ('Attributes', 'Methods'):
out += self._str_param_list(param_list)
out += self._str_index()
return '\n'.join(out)
def indent(str,indent=4):
indent_str = ' '*indent
if str is None:
return indent_str
lines = str.split('\n')
return '\n'.join(indent_str + l for l in lines)
def dedent_lines(lines):
"""Deindent a list of lines maximally"""
return textwrap.dedent("\n".join(lines)).split("\n")
def header(text, style='-'):
return text + '\n' + style*len(text) + '\n'
class FunctionDoc(NumpyDocString):
def __init__(self, func, role='func', doc=None, config={}):
self._f = func
self._role = role # e.g. "func" or "meth"
if doc is None:
if func is None:
raise ValueError("No function or docstring given")
doc = inspect.getdoc(func) or ''
NumpyDocString.__init__(self, doc)
if not self['Signature'] and func is not None:
func, func_name = self.get_func()
try:
# try to read signature
argspec = inspect.getargspec(func)
argspec = inspect.formatargspec(*argspec)
argspec = argspec.replace('*','\*')
signature = '%s%s' % (func_name, argspec)
except TypeError, e:
signature = '%s()' % func_name
self['Signature'] = signature
def get_func(self):
func_name = getattr(self._f, '__name__', self.__class__.__name__)
if inspect.isclass(self._f):
func = getattr(self._f, '__call__', self._f.__init__)
else:
func = self._f
return func, func_name
def __str__(self):
out = ''
func, func_name = self.get_func()
signature = self['Signature'].replace('*', '\*')
roles = {'func': 'function',
'meth': 'method'}
if self._role:
if not roles.has_key(self._role):
print "Warning: invalid role %s" % self._role
out += '.. %s:: %s\n \n\n' % (roles.get(self._role,''),
func_name)
out += super(FunctionDoc, self).__str__(func_role=self._role)
return out
class ClassDoc(NumpyDocString):
extra_public_methods = ['__call__']
def __init__(self, cls, doc=None, modulename='', func_doc=FunctionDoc,
config={}):
if not inspect.isclass(cls) and cls is not None:
raise ValueError("Expected a class or None, but got %r" % cls)
self._cls = cls
if modulename and not modulename.endswith('.'):
modulename += '.'
self._mod = modulename
if doc is None:
if cls is None:
raise ValueError("No class or documentation string given")
doc = pydoc.getdoc(cls)
NumpyDocString.__init__(self, doc)
if config.get('show_class_members', True):
if not self['Methods']:
self['Methods'] = [(name, '', '')
for name in sorted(self.methods)]
if not self['Attributes']:
self['Attributes'] = [(name, '', '')
for name in sorted(self.properties)]
@property
def methods(self):
if self._cls is None:
return []
return [name for name,func in inspect.getmembers(self._cls)
if ((not name.startswith('_')
or name in self.extra_public_methods)
and callable(func))]
@property
def properties(self):
if self._cls is None:
return []
return [name for name,func in inspect.getmembers(self._cls)
if not name.startswith('_') and func is None]
| gpl-3.0 | -2,520,109,012,333,131,300 | 29.544554 | 80 | 0.475656 | false |
kidaa30/spacewalk | backend/server/rhnSQL/sql_row.py | 4 | 4930 | #
# Copyright (c) 2008--2015 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public License,
# version 2 (GPLv2). There is NO WARRANTY for this software, express or
# implied, including the implied warranties of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2
# along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
#
# Red Hat trademarks are not licensed under GPLv2. No permission is
# granted to use or replicate Red Hat trademarks that are incorporated
# in this software or its documentation.
#
# a class used to handle a row of data in a particular table
#
import string
from rhn.UserDictCase import UserDictCase
from spacewalk.common.rhnException import rhnException
import sql_base
import sql_lib
class Row(UserDictCase):
""" This class allows one to work with the columns of a particular row in a more
convenient manner (ie, using a disctionary interface). It allows for the row
data to be loaded and saved and is generally easier to use than the Table
class which is really designed for bulk updates and stuff like that.
The easiest way to separate what these things are for is to remember that
the Table class indexes by KEY, while the Row class indexes by column
"""
def __init__(self, db, table, hashname, hashval=None):
UserDictCase.__init__(self)
if not isinstance(db, sql_base.Database):
raise rhnException("Argument db is not a database instance", db)
self.db = db
self.table = table
self.hashname = string.lower(hashname)
# and the data dictionary
self.data = {}
# is this a real entry (ie, use insert or update)
self.real = 0
if hashval is not None: # if we have to load an entry already...
self.load(hashval)
def __repr__(self):
return "<%s instance at 0x%0x on (%s, %s, %s)>" % (
self.__class__.__name__, abs(id(self)),
self.table, self.hashname, self.get(self.hashname))
__str__ = __repr__
def __setitem__(self, name, value):
""" make it work like a dictionary """
x = string.lower(name)
# forbid setting the value of the hash column because of the
# ambiguity of the operation (is it a "save as new id" or
# "load from new id"?). We provide interfaces for load, save
# and create instead.
if x == self.hashname:
raise AttributeError("Can not reset the value of the hash key")
if x not in self.data or self.data[x][0] != value:
self.data[x] = (value, 1)
def __getitem__(self, name):
x = string.lower(name)
if x in self.data:
return self.data[x][0]
raise KeyError("Key %s not found in the Row dictionary" % name)
def get(self, name):
x = string.lower(name)
if x in self.data:
return self.data[x][0]
return None
def reset(self, val=0):
""" reset the changed status for these entries """
for k, v in self.data.items():
# tuples do not support item assignement
self.data[k] = (v[0], val)
def create(self, hashval):
""" create it as a new entry """
self.data[self.hashname] = (hashval, 0)
self.real = 0
self.save()
def load(self, hashval):
""" load an entry """
return self.load_sql("%s = :hashval" % self.hashname, {'hashval': hashval})
def load_sql(self, sql, pdict={}):
""" load from a sql clause """
h = self.db.prepare("select * from %s where %s" % (self.table, sql))
h.execute(**pdict)
ret = h.fetchone_dict()
self.data = {}
if not ret:
self.real = 0
return 0
for k, v in ret.items():
self.data[k] = (v, 0)
self.real = 1
return 1
def save(self, with_updates=1):
""" now save an entry """
if self.hashname not in self.data:
raise AttributeError("Table does not have a hash `%s' key" % self.hashname)
# get a list of fields to be set
items = map(lambda a: (a[0], a[1][0]),
filter(lambda b: b[1][1] == 1, self.data.items()))
if not items: # if there is nothing for us to do, avoid doing it.
return
# and now build the SQL statements
if self.real: # Update
if not with_updates:
raise sql_base.ModifiedRowError()
sql, pdict = sql_lib.build_sql_update(self.table, self.hashname, items)
else:
sql, pdict = sql_lib.build_sql_insert(self.table, self.hashname, items)
h = self.db.prepare(sql)
pdict["p0"] = self.data[self.hashname][0]
# and now do it
h.execute(**pdict)
self.real = 1
return
| gpl-2.0 | -162,553,287,438,461,340 | 35.791045 | 87 | 0.596957 | false |
alexcuellar/odoo | addons/account_budget/account_budget.py | 194 | 9368 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from datetime import date, datetime
from openerp.osv import fields, osv
from openerp.tools import ustr, DEFAULT_SERVER_DATE_FORMAT
from openerp.tools.translate import _
import openerp.addons.decimal_precision as dp
# ---------------------------------------------------------
# Utils
# ---------------------------------------------------------
def strToDate(dt):
return date(int(dt[0:4]), int(dt[5:7]), int(dt[8:10]))
def strToDatetime(strdate):
return datetime.strptime(strdate, DEFAULT_SERVER_DATE_FORMAT)
# ---------------------------------------------------------
# Budgets
# ---------------------------------------------------------
class account_budget_post(osv.osv):
_name = "account.budget.post"
_description = "Budgetary Position"
_columns = {
'code': fields.char('Code', size=64, required=True),
'name': fields.char('Name', required=True),
'account_ids': fields.many2many('account.account', 'account_budget_rel', 'budget_id', 'account_id', 'Accounts'),
'crossovered_budget_line': fields.one2many('crossovered.budget.lines', 'general_budget_id', 'Budget Lines'),
'company_id': fields.many2one('res.company', 'Company', required=True),
}
_defaults = {
'company_id': lambda self, cr, uid, c: self.pool.get('res.company')._company_default_get(cr, uid, 'account.budget.post', context=c)
}
_order = "name"
class crossovered_budget(osv.osv):
_name = "crossovered.budget"
_description = "Budget"
_columns = {
'name': fields.char('Name', required=True, states={'done':[('readonly',True)]}),
'code': fields.char('Code', size=16, required=True, states={'done':[('readonly',True)]}),
'creating_user_id': fields.many2one('res.users', 'Responsible User'),
'validating_user_id': fields.many2one('res.users', 'Validate User', readonly=True),
'date_from': fields.date('Start Date', required=True, states={'done':[('readonly',True)]}),
'date_to': fields.date('End Date', required=True, states={'done':[('readonly',True)]}),
'state' : fields.selection([('draft','Draft'),('cancel', 'Cancelled'),('confirm','Confirmed'),('validate','Validated'),('done','Done')], 'Status', select=True, required=True, readonly=True, copy=False),
'crossovered_budget_line': fields.one2many('crossovered.budget.lines', 'crossovered_budget_id', 'Budget Lines', states={'done':[('readonly',True)]}, copy=True),
'company_id': fields.many2one('res.company', 'Company', required=True),
}
_defaults = {
'state': 'draft',
'creating_user_id': lambda self, cr, uid, context: uid,
'company_id': lambda self, cr, uid, c: self.pool.get('res.company')._company_default_get(cr, uid, 'account.budget.post', context=c)
}
def budget_confirm(self, cr, uid, ids, *args):
self.write(cr, uid, ids, {
'state': 'confirm'
})
return True
def budget_draft(self, cr, uid, ids, *args):
self.write(cr, uid, ids, {
'state': 'draft'
})
return True
def budget_validate(self, cr, uid, ids, *args):
self.write(cr, uid, ids, {
'state': 'validate',
'validating_user_id': uid,
})
return True
def budget_cancel(self, cr, uid, ids, *args):
self.write(cr, uid, ids, {
'state': 'cancel'
})
return True
def budget_done(self, cr, uid, ids, *args):
self.write(cr, uid, ids, {
'state': 'done'
})
return True
class crossovered_budget_lines(osv.osv):
def _prac_amt(self, cr, uid, ids, context=None):
res = {}
result = 0.0
if context is None:
context = {}
account_obj = self.pool.get('account.account')
for line in self.browse(cr, uid, ids, context=context):
acc_ids = [x.id for x in line.general_budget_id.account_ids]
if not acc_ids:
raise osv.except_osv(_('Error!'),_("The Budget '%s' has no accounts!") % ustr(line.general_budget_id.name))
acc_ids = account_obj._get_children_and_consol(cr, uid, acc_ids, context=context)
date_to = line.date_to
date_from = line.date_from
if line.analytic_account_id.id:
cr.execute("SELECT SUM(amount) FROM account_analytic_line WHERE account_id=%s AND (date "
"between to_date(%s,'yyyy-mm-dd') AND to_date(%s,'yyyy-mm-dd')) AND "
"general_account_id=ANY(%s)", (line.analytic_account_id.id, date_from, date_to,acc_ids,))
result = cr.fetchone()[0]
if result is None:
result = 0.00
res[line.id] = result
return res
def _prac(self, cr, uid, ids, name, args, context=None):
res={}
for line in self.browse(cr, uid, ids, context=context):
res[line.id] = self._prac_amt(cr, uid, [line.id], context=context)[line.id]
return res
def _theo_amt(self, cr, uid, ids, context=None):
if context is None:
context = {}
res = {}
for line in self.browse(cr, uid, ids, context=context):
today = datetime.now()
if line.paid_date:
if strToDate(line.date_to) <= strToDate(line.paid_date):
theo_amt = 0.00
else:
theo_amt = line.planned_amount
else:
line_timedelta = strToDatetime(line.date_to) - strToDatetime(line.date_from)
elapsed_timedelta = today - (strToDatetime(line.date_from))
if elapsed_timedelta.days < 0:
# If the budget line has not started yet, theoretical amount should be zero
theo_amt = 0.00
elif line_timedelta.days > 0 and today < strToDatetime(line.date_to):
# If today is between the budget line date_from and date_to
theo_amt = (elapsed_timedelta.total_seconds() / line_timedelta.total_seconds()) * line.planned_amount
else:
theo_amt = line.planned_amount
res[line.id] = theo_amt
return res
def _theo(self, cr, uid, ids, name, args, context=None):
res = {}
for line in self.browse(cr, uid, ids, context=context):
res[line.id] = self._theo_amt(cr, uid, [line.id], context=context)[line.id]
return res
def _perc(self, cr, uid, ids, name, args, context=None):
res = {}
for line in self.browse(cr, uid, ids, context=context):
if line.theoritical_amount <> 0.00:
res[line.id] = float((line.practical_amount or 0.0) / line.theoritical_amount) * 100
else:
res[line.id] = 0.00
return res
_name = "crossovered.budget.lines"
_description = "Budget Line"
_columns = {
'crossovered_budget_id': fields.many2one('crossovered.budget', 'Budget', ondelete='cascade', select=True, required=True),
'analytic_account_id': fields.many2one('account.analytic.account', 'Analytic Account'),
'general_budget_id': fields.many2one('account.budget.post', 'Budgetary Position',required=True),
'date_from': fields.date('Start Date', required=True),
'date_to': fields.date('End Date', required=True),
'paid_date': fields.date('Paid Date'),
'planned_amount':fields.float('Planned Amount', required=True, digits_compute=dp.get_precision('Account')),
'practical_amount':fields.function(_prac, string='Practical Amount', type='float', digits_compute=dp.get_precision('Account')),
'theoritical_amount':fields.function(_theo, string='Theoretical Amount', type='float', digits_compute=dp.get_precision('Account')),
'percentage':fields.function(_perc, string='Percentage', type='float'),
'company_id': fields.related('crossovered_budget_id', 'company_id', type='many2one', relation='res.company', string='Company', store=True, readonly=True)
}
class account_analytic_account(osv.osv):
_inherit = "account.analytic.account"
_columns = {
'crossovered_budget_line': fields.one2many('crossovered.budget.lines', 'analytic_account_id', 'Budget Lines'),
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | 2,316,561,570,903,656,000 | 42.775701 | 210 | 0.579846 | false |
yogo1212/RIOT | tests/bench_runtime_coreapis/tests/01-run.py | 14 | 1439 | #!/usr/bin/env python3
# Copyright (C) 2018 Freie Universität Berlin
#
# This file is subject to the terms and conditions of the GNU Lesser
# General Public License v2.1. See the file LICENSE in the top level
# directory for more details.
import sys
from testrunner import run
# The default timeout is not enough for this test on some of the slower boards
TIMEOUT = 30
BENCHMARK_REGEXP = r"\s+{func}:\s+\d+us\s+---\s+\d*\.*\d+us per call\s+---\s+\d+ calls per sec"
def testfunc(child):
child.expect_exact('Runtime of Selected Core API functions')
child.expect(BENCHMARK_REGEXP.format(func="nop loop"))
child.expect(BENCHMARK_REGEXP.format(func=r"mutex_init\(\)"))
child.expect(BENCHMARK_REGEXP.format(func="mutex lock/unlock"), timeout=TIMEOUT)
child.expect(BENCHMARK_REGEXP.format(func=r"thread_flags_set\(\)"))
child.expect(BENCHMARK_REGEXP.format(func=r"thread_flags_clear\(\)"))
child.expect(BENCHMARK_REGEXP.format(func="thread flags set/wait any"), timeout=TIMEOUT)
child.expect(BENCHMARK_REGEXP.format(func="thread flags set/wait all"), timeout=TIMEOUT)
child.expect(BENCHMARK_REGEXP.format(func="thread flags set/wait one"), timeout=TIMEOUT)
child.expect(BENCHMARK_REGEXP.format(func=r"msg_try_receive\(\)"), timeout=TIMEOUT)
child.expect(BENCHMARK_REGEXP.format(func=r"msg_avail\(\)"))
child.expect_exact('[SUCCESS]')
if __name__ == "__main__":
sys.exit(run(testfunc))
| lgpl-2.1 | -8,241,191,474,528,647,000 | 41.294118 | 95 | 0.718359 | false |
topic2k/EventGhost | plugins/FS20PCS/__init__.py | 4 | 21740 | """<rst>
Allows to send commands to FS20 receivers.
|
|fS20Image|_
`Direct shop link <http://www.elv.de/output/controller.aspx?cid=74&detail=10&detail2=27743>`__
.. |fS20Image| image:: picture.jpg
.. _fS20Image: http://www.elv.de/
"""
import time
eg.RegisterPlugin(
name = "ELV FS20 PCS",
author = "Bartman",
version = "0.2.1486",
kind = "external",
canMultiLoad = False,
createMacrosOnAdd = False,
description = __doc__,
url = "http://www.eventghost.net/forum/viewtopic.php?f=9&t=2147",
guid = '{D76A6D18-142A-4f75-8F93-9CDA86DBC310}'
)
import binascii
import math
import sys
import win32event
import wx.lib.mixins.listctrl as listmix
from wx.lib.masked import TextCtrl
import wx.lib.masked as masked
from eg.WinApi.HID import HIDThread
from eg.WinApi.HID import GetDevicePath
from eg.WinApi.HID import IsDeviceName
VENDOR_ID = 6383
PRODUCT_ID = 57365
TIME_OUT = 250
class Text:
errorFind = "Error finding ELV FS20 PCS"
timedActionName = "Timed actions"
timedActionDescription = "Allows controlling FS20 devices with timed parameter."
address = "Address:"
timerValue = "Timer value:"
repeat = "Repeat:"
level = "Level:"
repeatSuffix = "{0} ({1} times)"
class FS20PCS(eg.PluginClass):
text = Text
def AddNewAction(self, root, internalName, baseClass, classFuncCode, externalName, classDescription, classLabelFormat):
class MyText:
labelFormat = classLabelFormat
class tmpAction(baseClass):
text = MyText
name = externalName
description = classDescription
funcCode = classFuncCode
tmpAction.__name__ = internalName
root.AddAction(tmpAction)
def __init__(self):
self.version = None
self.thread = None
self.AddNewAction(self, "Off", SimpleAction, 0x00, "Off", "Turns device off (dim to 0%)", "Turn off {0}")
self.AddNewAction(self, "On", SimpleAction, 0x10, "On", "Turns device on (dim to 100%)", "Turn on {0}")
self.AddNewAction(self, "PreviousValue", SimpleAction, 0x11, "On with previous value", "Turns device on with previous value", "Turn on {0} with previous value")
self.AddNewAction(self, "Toggle", SimpleAction, 0x12, "Toggle", "Toggles between off and previous value", "Toggle {0} between off and previous value")
self.AddNewAction(self, "DimDown", RepeatAction, 0x14, "Dim down", "Dims down", "Dim down {0}")
self.AddNewAction(self, "DimUp", RepeatAction, 0x13, "Dim up", "Dims up", "Dim up {0}")
self.AddAction(Dim)
self.AddNewAction(self, "DimAlternating", RepeatAction, 0x15, "Alternating dim", "Dims up one level until maximum, then dim down", "Alternating dim {0}")
group = self.AddGroup(self.text.timedActionName, self.text.timedActionDescription)
self.AddNewAction(group, "OffTimer", TimerValueAction, 0x20, "Off in timer value", "Turns device off (dim to 0%) in timer value", "Turn off {0} in {1}")
self.AddNewAction(group, "OnTimer", TimerValueAction, 0x30, "On in timer value", "Turns device on (dim to 100%) in timer value", "Turn on {0} in {1}")
self.AddNewAction(group, "PreviousValueTimer", TimerValueAction, 0x31, "On with previous value in timer value", "Turns device on with previous value in timer value", "Turn on {0} with previous value in {1}")
self.AddNewAction(group, "ToggleTimer", TimerValueAction, 0x32, "Toggle in timer value", "Toggles between off and previous value in timer value", "Toggle {0} between off and previous value in {1}")
group.AddAction(DimTimer)
self.AddNewAction(group, "OffPreviousValueInternal", SimpleAction, 0x18, "Off for internal timer value, previous value afterwards", "Turns off (dim to 0%) device for internal timer value and return to previous value afterwards", "Turn off {0} for internal timer value and return to previous value afterwards")
self.AddNewAction(group, "OffPreviousValueTimer", TimerValueAction, 0x38, "Off for timer value, previous value afterwards", "Turns off (dim to 0%) device for timer value and return to previous value afterwards", "Turn off {0} for {1} and return to previous value afterwards")
self.AddNewAction(group, "OnOffInternal", SimpleAction, 0x19, "On (dim to 100%) for internal timer value, off afterwards", "Turns on (device dim to 100%) for internal timer value and turns it off afterwards", "Turn on {0} for internal timer value and turn off afterwards")
self.AddNewAction(group, "OnOffTimer", TimerValueAction, 0x39, "On (dim to 100%) for timer value, off afterwards", "Turns on (device dim to 100%) for timer value and turns it off afterwards", "Turn on {0} for {1} and turn off afterwards")
self.AddNewAction(group, "PreviousValueOffInternal", SimpleAction, 0x1a, "Previous value for internal timer value, off afterwards", "Turns on device with previous value for internal timer value and turns it off afterwards", "Turn on {0} with previous value for internal timer value and turn off afterwards")
self.AddNewAction(group, "PreviousValueOffTimer", TimerValueAction, 0x3a, "Previous value for timer value, off afterwards", "Turns on device with previous value for timer value and turns it off afterwards", "Turn on {0} with previous value for {1} and turn off afterwards")
self.AddNewAction(group, "OnPreviousStateInternal", SimpleAction, 0x1e, "On for internal timer value, previous state afterwards", "Turns on (dim to 100%) device for internal timer value and return to previous state afterwards", "Turn on {0} for internal timer value and return to previous state afterwards")
self.AddNewAction(group, "OnPreviousStateTimer", TimerValueAction, 0x3e, "On for timer value, previous state afterwards", "Turns on (dim to 100%) device for timer value and return to previous state afterwards", "Turn on {0} for {1} and return to previous state afterwards")
self.AddNewAction(group, "PreviousValuePreviousStateInternal", SimpleAction, 0x1f, "Previous value for internal timer value, previous state afterwards", "Turns on device with previous value for internal timer value and return to previous state afterwards", "Turn on {0} with previous value for internal timer value and return to previous state afterwards")
self.AddNewAction(group, "PreviousValuePreviousStateTimer", TimerValueAction, 0x3f, "Previous value for timer value, previous state afterwards", "Turns on device with previous value for timer value and return to previous state afterwards", "Turn on {0} with previous value for {1} and return to previous state afterwards")
self.AddNewAction(group, "DimUpOffTimer", RepeatTimerValueAction, 0x33, "Dim up and turn off after timer value", "Dims up and turns off after timer value", "Dim up {0} and turn off after {1}")
self.AddNewAction(group, "DimDownOffTimer", RepeatTimerValueAction, 0x34, "Dim down and turn off after timer value", "Dims down and turns off after timer value", "Dim down {0} and turn off after {1}")
self.AddNewAction(group, "DimAlternatingOffTimer", RepeatTimerValueAction, 0x35, "Alternating dim and turn off after timer value", "Dims up one level until maximum, then dim down and turns off after timer value", "Alternating dim {0} and turn off after {1}")
group = self.AddGroup("Programming", "Allows programming of FS20 devices. You should prefer timed actions and only use these for initial setup.")
self.AddNewAction(group, "ProgramTimer", SimpleAction, 0x16, "Start/stop programming of internal timer", "Starts respectively stop programming of the internal timer", "Start/stop programming of internal timer for {0}")
self.AddNewAction(group, "ProgramCode", SimpleAction, 0x17, "Program address", "Learn address. This is a dummy action which does nothing, but can be used for address learning procedure on some devices.", "Learn address {0}")
self.AddNewAction(group, "ProgramFactoryDefaults", SimpleAction, 0x1b, "Reset device to factory defaults", "Reset device to factory defaults", "Reset {0} to factory defaults")
self.AddNewAction(group, "ProgramInternalTimer", TimerValueAction, 0x36, "Program internal timer value", "Program internal timer value", "Program internal timer value for {0} to {1}")
self.AddNewAction(group, "ProgramDimUpRampTimer", TimerValueAction, 0x3c, "Program dim up ramp timer value", "Program dim up ramp timer value", "Program dim up ramp timer value for {0} to {1}")
self.AddNewAction(group, "ProgramDimDownRampTimer", TimerValueAction, 0x3d, "Program dim down ramp timer value", "Program dim down ramp timer value", "Program dim down ramp timer value for {0} to {1}")
def RawCallback(self, data):
if eg.debugLevel:
print "FS20PCS RawCallBack", binascii.hexlify(data)
if len(data) != 5 or data[0:3] != "\x02\x03\xA0":
self.PrintError("data must have a length of 5 and start with 02 03 A0")
errorId = ord(data[3:4])
if errorId == 0:
pass
#everything is fine
elif errorId == 1:
#Firmware version was requested
self.version = ord(data[4:5])
elif errorId == 2:
#Firmware version was requested
self.version = ord(data[4:5])
elif errorId == 3:
self.PrintError("Unknown command id")
elif errorId == 4:
self.PrintError("invalid command length")
elif errorId == 5:
self.PrintError("nothing to abort")
else:
self.PrintError("Unknown Error")
def PrintVersion(self):
#create the following Python command to show version number
#eg.plugins.FS20PCS.plugin.PrintVersion()
versionMajor = self.version / 16
versionMinor = self.version % 16
print "Firmware version %d.%d" % (versionMajor, versionMinor)
def StopCallback(self):
self.TriggerEvent("Stopped")
self.thread = None
def GetMyDevicePath(self):
path = GetDevicePath(
None,
VENDOR_ID,
PRODUCT_ID,
None,
0,
True,
0)
return path;
def SendRawCommand(self, data, timeout = 0):
if not self.thread:
self.PrintError("Plug in is not running.")
return
dataLength = len(data)
if eg.debugLevel:
print "FS20PCS SendRawCommand", binascii.hexlify(data)
newData = data + ((11 - dataLength) * '\x00')
self.thread.Write(newData, timeout + 1000)#extra second to wait for response
def Abort(self):
self.SendRawCommand("\x01\x01\xf3")
def RequestVersion(self):
data = '\x01\x01\xf0'
self.SendRawCommand(data)
def SetupHidThread(self, newDevicePath):
#create thread
thread = HIDThread(self.name, newDevicePath, self.name)
thread.SetStopCallback(self.StopCallback)
thread.SetRawCallback(self.RawCallback)
thread.start()
thread.WaitForInit()
self.thread = thread
self.RequestVersion()
def ReconnectDevice(self, event):
"""method to reconnect a disconnected device"""
if self.thread == None:
if not IsDeviceName(event.payload, VENDOR_ID, PRODUCT_ID):
return
#check if the right device was connected
#getting devicePath
newDevicePath = self.GetMyDevicePath()
if not newDevicePath:
#wrong device
return
self.SetupHidThread(newDevicePath)
def __start__(self):
#Bind plug in to RegisterDeviceNotification message
eg.Bind("System.DeviceAttached", self.ReconnectDevice)
newDevicePath = self.GetMyDevicePath()
if not newDevicePath:
#device not found
self.PrintError(Text.errorFind)
else:
self.SetupHidThread(newDevicePath)
def __stop__(self):
if self.thread:
self.thread.AbortThread()
#unbind from RegisterDeviceNotification message
eg.Unbind("System.DeviceAttached", self.ReconnectDevice)
def GetAddressBytes(address):
x, a0 = divmod(address, 256)
a2, a1 = divmod(x, 256)
return chr(a2) + chr(a1) + chr(a0)
def GetStringFromAddress(address, formatted = False):
valueStr = ""
for i in range(11, -1, -1):
x = (address >> i*2) & 0x03
valueStr += str(x + 1)
if formatted:
if i == 4:
valueStr += " - "
if i == 8:
valueStr += " "
return valueStr
def GetAddressFromString(addressString):
address = 0
for i in range(12):
address <<= 2
address += int(addressString[i]) - 1
return address
def GetTimeCodeByIndex(index):
if index < 16:
return index
return index + ((index / 8) - 1) * 8
def GetTimeCodeIndex(timeCode):
if timeCode < 16:
return timeCode
return timeCode - (timeCode / 16) * 8
def GetTimeValue(timeCode):
return (2**(timeCode / 16)) * 0.25 * (timeCode % 16)
def FormatTimeValue(timeValue):
if timeValue >= 3600:
hours = math.floor(timeValue / 3600)
minutes = math.floor((timeValue - (hours * 3600)) / 60)
seconds = timeValue - (hours * 3600) - minutes * 60
return "%0d h %00d m %00d s" % (hours, minutes, seconds)
elif timeValue >= 60:
minutes = math.floor(timeValue / 60)
seconds = timeValue - minutes * 60
return "%00d m %00d s" % (minutes, seconds)
else:
return "%0.02f sec" % timeValue
class ActionBase(eg.ActionBase):
defaultAddress = 0 #GetAddressFromString("123412342222")
funcCode = None
name = None
description = None
def AddAddressControl(self, panel, address):
if address is None:
address = self.defaultAddress
maskedCtrl = masked.TextCtrl(
parent=panel,
mask="#### #### - ####",
defaultValue="1111 1111 - 1111",
excludeChars="056789",
formatcodes="F",
validRequired=False,
)
maskedCtrl.SetValue(GetStringFromAddress(address))
panel.AddLine(self.plugin.text.address, maskedCtrl)
return maskedCtrl
def AddTimerControl(self, panel, timeCode):
def TimerCallback(value):
timeCodeForValue = GetTimeCodeByIndex(value)
return FormatTimeValue(GetTimeValue(timeCodeForValue))
timerCtrl = eg.Slider(
panel,
value=GetTimeCodeIndex(timeCode),
min=0,
max=111,
minLabel=FormatTimeValue(0),
maxLabel=FormatTimeValue(15360),
style = wx.SL_TOP,
size=(300,-1),
levelCallback=TimerCallback
)
timerCtrl.SetMinSize((300, -1))
panel.AddLine(self.plugin.text.timerValue, timerCtrl)
return timerCtrl
def AddRepeatControl(self, panel, repeatCount):
repeatCtrl = eg.Slider(
panel,
value=repeatCount,
min=1,
max=255,
minLabel="1",
maxLabel="255",
style = wx.SL_TOP,
size=(300,-1),
)
repeatCtrl.SetMinSize((300, -1))
panel.AddLine(self.plugin.text.repeat, repeatCtrl)
return repeatCtrl
def AddLevelControl(self, panel, level):
def LevelCallback(value):
return "%.02f%%" % (value * 100.00 / 16)
levelCtrl = eg.Slider(
panel,
value=level,
min=0,
max=16,
minLabel="0.00%",
maxLabel="100.00%",
style = wx.SL_AUTOTICKS|wx.SL_TOP,
size=(300,-1),
levelCallback=LevelCallback
)
levelCtrl.SetMinSize((300, -1))
panel.AddLine(self.plugin.text.level, levelCtrl)
return levelCtrl
class SimpleAction(ActionBase):
"""Base class for all action that only take an address as input
"""
def __call__(self, address):
self.plugin.SendRawCommand("\x01\x06\xf1" + GetAddressBytes(address) + chr(self.funcCode))
def GetLabel(self, address):
return self.text.labelFormat.format(GetStringFromAddress(address, True))
def Configure(self, address = None):
panel = eg.ConfigPanel()
maskedCtrl = self.AddAddressControl(panel, address)
while panel.Affirmed():
address = GetAddressFromString(maskedCtrl.GetPlainValue())
ActionBase.defaultAddress = address
panel.SetResult(address)
class RepeatAction(ActionBase):
"""Base class for all action that take an address and repeat Count
"""
def __call__(self, address, repeatCount):
self.plugin.SendRawCommand("\x01\x07\xf2" + GetAddressBytes(address) + chr(self.funcCode) + "\x00" + chr(repeatCount), repeatCount * TIME_OUT)
def GetLabel(self, address, repeatCount):
label = self.text.labelFormat.format(GetStringFromAddress(address, True))
if repeatCount > 1:
label = self.plugin.text.repeatSuffix.format(label, repeatCount)
return label
def Configure(self, address = None, repeatCount = 1):
panel = eg.ConfigPanel()
maskedCtrl = self.AddAddressControl(panel, address)
repeatCtrl = self.AddRepeatControl(panel, repeatCount)
while panel.Affirmed():
address = GetAddressFromString(maskedCtrl.GetPlainValue())
ActionBase.defaultAddress = address
panel.SetResult(address, repeatCtrl.GetValue())
class RepeatTimerValueAction(ActionBase):
"""Base class for all action that take an address, timer value and repeat Count
"""
def __call__(self, address, timeCode, repeatCount):
self.plugin.SendRawCommand("\x01\x07\xf2" + GetAddressBytes(address) + chr(self.funcCode) + chr(timeCode) + chr(repeatCount), repeatCount * TIME_OUT)
def GetLabel(self, address, timeCode, repeatCount):
label = self.text.labelFormat.format(GetStringFromAddress(address, True), FormatTimeValue(GetTimeValue(timeCode)))
if repeatCount > 1:
label = self.plugin.text.repeatSuffix.format(label, repeatCount)
return label
def Configure(self, address = None, timeCode = 0, repeatCount = 1):
panel = eg.ConfigPanel()
maskedCtrl = self.AddAddressControl(panel, address)
timerCtrl = self.AddTimerControl(panel, timeCode)
repeatCtrl = self.AddRepeatControl(panel, repeatCount)
while panel.Affirmed():
address = GetAddressFromString(maskedCtrl.GetPlainValue())
ActionBase.defaultAddress = address
panel.SetResult(address, GetTimeCodeByIndex(timerCtrl.GetValue()), repeatCtrl.GetValue())
class TimerValueAction(ActionBase):
"""Base class for all action that take an address and timer value
"""
def __call__(self, address, timeCode):
self.plugin.SendRawCommand("\x01\x06\xf1" + GetAddressBytes(address) + chr(self.funcCode) + chr(timeCode))
def GetLabel(self, address, timeCode):
return self.text.labelFormat.format(GetStringFromAddress(address, True), FormatTimeValue(GetTimeValue(timeCode)))
def Configure(self, address = None, timeCode = 0):
panel = eg.ConfigPanel()
maskedCtrl = self.AddAddressControl(panel, address)
timerCtrl = self.AddTimerControl(panel, timeCode)
while panel.Affirmed():
address = GetAddressFromString(maskedCtrl.GetPlainValue())
ActionBase.defaultAddress = address
panel.SetResult(address, GetTimeCodeByIndex(timerCtrl.GetValue()))
class Dim(ActionBase):
class Text:
labelFormat = "Set dim-level to {1:.02f}% for {0}"
name = "Dim"
description = "Sets dim level immediately"
text = Text
def __call__(self, address, level):
self.plugin.SendRawCommand("\x01\x06\xf1" + GetAddressBytes(address) + chr(level))
def GetLabel(self, address, level):
return self.text.labelFormat.format(GetStringFromAddress(address, True), (level * 100.00 / 16))
def Configure(self, address = None, level = 8):
panel = eg.ConfigPanel()
maskedCtrl = self.AddAddressControl(panel, address)
levelCtrl = self.AddLevelControl(panel, level)
while panel.Affirmed():
address = GetAddressFromString(maskedCtrl.GetPlainValue())
ActionBase.defaultAddress = address
panel.SetResult(address, levelCtrl.GetValue())
class DimTimer(ActionBase):
class Text:
labelFormat = "Set dim-level to {1:.02f}% for {0} in {2}"
name = "Dim in timer value"
description = "Sets the dim level in timer value"
text = Text
def __call__(self, address, level, timeCode):
self.plugin.SendRawCommand("\x01\x06\xf1" + GetAddressBytes(address) + chr(level + 32) + chr(timeCode))
def GetLabel(self, address, level, timeCode):
return self.text.labelFormat.format(GetStringFromAddress(address, True), (level * 100.00 / 16), FormatTimeValue(GetTimeValue(timeCode)))
def Configure(self, address = None, level = 8, timeCode = 0):
panel = eg.ConfigPanel()
maskedCtrl = self.AddAddressControl(panel, address)
levelCtrl = self.AddLevelControl(panel, level)
timerCtrl = self.AddTimerControl(panel, timeCode)
while panel.Affirmed():
address = GetAddressFromString(maskedCtrl.GetPlainValue())
ActionBase.defaultAddress = address
panel.SetResult(
address,
levelCtrl.GetValue(),
GetTimeCodeByIndex(timerCtrl.GetValue()))
| gpl-2.0 | -4,539,095,718,050,212,000 | 43.917355 | 364 | 0.657498 | false |
neilLasrado/erpnext | erpnext/crm/doctype/investor/investor.py | 1 | 2929 | # -*- coding: utf-8 -*-
# Copyright (c) 2020, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
import frappe
from frappe.model.document import Document
from frappe.contacts.address_and_contact import load_address_and_contact
from erpnext.accounts.party import validate_party_accounts, get_dashboard_info, get_timeline_data # keep this
class Investor(Document):
def onload(self):
"""Load address and contacts in `__onload`"""
load_address_and_contact(self)
def after_insert(self):
self.update_lead_status()
def on_update(self):
if self.flags.old_lead != self.party_name:
self.update_lead_status()
if self.investor_from == "Lead" and self.party_name:
self.create_lead_address()
self.create_lead_contact()
if self.investor_from == "Opportunity" and self.party_name:
self.create_opportunity_address()
self.create_opportunity_contact()
def update_lead_status(self):
'''If Investor created from Lead, update lead status to "Investor"'''
if self.investor_from == "Lead" and self.party_name:
frappe.db.set_value('Lead', self.party_name, 'status', 'Investor', update_modified=False)
def create_lead_address(self):
# assign lead address to investor (if already not set)
address_names = frappe.get_all('Dynamic Link', filters={
"parenttype": "Address",
"link_doctype": "Lead",
"link_name": self.party_name
}, fields=["parent as name"])
for address_name in address_names:
address = frappe.get_doc('Address', address_name.get('name'))
if not address.has_link('Investor', self.name):
address.append('links', dict(link_doctype='Investor', link_name=self.name))
address.save()
def create_lead_contact(self):
# assign lead contact to investor (if already not set)
contact_names = frappe.get_all('Dynamic Link', filters={
"parenttype": "Contact",
"link_doctype": "Lead",
"link_name": self.party_name
}, fields=["parent as name"])
for contact_name in contact_names:
contact = frappe.get_doc('Contact', contact_name.get('name'))
if not contact.has_link('Investor', self.name):
contact.append('links', dict(link_doctype='Investor', link_name=self.name))
contact.save()
def create_opportunity_address(self):
customer_address = frappe.db.get_value("Opportunity", self.party_name, "customer_address")
if customer_address:
address = frappe.get_doc('Address', customer_address)
if not address.has_link('Investor', self.name):
address.append('links', dict(link_doctype='Investor', link_name=self.name))
address.save()
def create_opportunity_contact(self):
contact_person = frappe.db.get_value("Opportunity", self.party_name, "contact_person")
if contact_person:
contact = frappe.get_doc('Contact', contact_person)
if not contact.has_link('Investor', self.name):
contact.append('links', dict(link_doctype='Investor', link_name=self.name))
contact.save()
| gpl-3.0 | -8,412,990,094,023,700,000 | 36.551282 | 110 | 0.71014 | false |
ThinkingBridge/platform_external_chromium_org | media/tools/constrained_network_server/cns.py | 168 | 17314 | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Constrained Network Server. Serves files with supplied network constraints.
The CNS exposes a web based API allowing network constraints to be imposed on
file serving.
TODO(dalecurtis): Add some more docs here.
"""
import logging
from logging import handlers
import mimetypes
import optparse
import os
import signal
import sys
import threading
import time
import urllib
import urllib2
import traffic_control
try:
import cherrypy
except ImportError:
print ('CNS requires CherryPy v3 or higher to be installed. Please install\n'
'and try again. On Linux: sudo apt-get install python-cherrypy3\n')
sys.exit(1)
# Add webm file types to mimetypes map since cherrypy's default type is text.
mimetypes.types_map['.webm'] = 'video/webm'
# Default logging is ERROR. Use --verbose to enable DEBUG logging.
_DEFAULT_LOG_LEVEL = logging.ERROR
# Default port to serve the CNS on.
_DEFAULT_SERVING_PORT = 9000
# Default port range for constrained use.
_DEFAULT_CNS_PORT_RANGE = (50000, 51000)
# Default number of seconds before a port can be torn down.
_DEFAULT_PORT_EXPIRY_TIME_SECS = 5 * 60
class PortAllocator(object):
"""Dynamically allocates/deallocates ports with a given set of constraints."""
def __init__(self, port_range, expiry_time_secs=5 * 60):
"""Sets up initial state for the Port Allocator.
Args:
port_range: Range of ports available for allocation.
expiry_time_secs: Amount of time in seconds before constrained ports are
cleaned up.
"""
self._port_range = port_range
self._expiry_time_secs = expiry_time_secs
# Keeps track of ports we've used, the creation key, and the last request
# time for the port so they can be cached and cleaned up later.
self._ports = {}
# Locks port creation and cleanup. TODO(dalecurtis): If performance becomes
# an issue a per-port based lock system can be used instead.
self._port_lock = threading.RLock()
def Get(self, key, new_port=False, **kwargs):
"""Sets up a constrained port using the requested parameters.
Requests for the same key and constraints will result in a cached port being
returned if possible, subject to new_port.
Args:
key: Used to cache ports with the given constraints.
new_port: Whether to create a new port or use an existing one if possible.
**kwargs: Constraints to pass into traffic control.
Returns:
None if no port can be setup or the port number of the constrained port.
"""
with self._port_lock:
# Check port key cache to see if this port is already setup. Update the
# cache time and return the port if so. Performance isn't a concern here,
# so just iterate over ports dict for simplicity.
full_key = (key,) + tuple(kwargs.values())
if not new_port:
for port, status in self._ports.iteritems():
if full_key == status['key']:
self._ports[port]['last_update'] = time.time()
return port
# Cleanup ports on new port requests. Do it after the cache check though
# so we don't erase and then setup the same port.
if self._expiry_time_secs > 0:
self.Cleanup(all_ports=False)
# Performance isn't really an issue here, so just iterate over the port
# range to find an unused port. If no port is found, None is returned.
for port in xrange(self._port_range[0], self._port_range[1]):
if port in self._ports:
continue
if self._SetupPort(port, **kwargs):
kwargs['port'] = port
self._ports[port] = {'last_update': time.time(), 'key': full_key,
'config': kwargs}
return port
def _SetupPort(self, port, **kwargs):
"""Setup network constraints on port using the requested parameters.
Args:
port: The port number to setup network constraints on.
**kwargs: Network constraints to set up on the port.
Returns:
True if setting the network constraints on the port was successful, false
otherwise.
"""
kwargs['port'] = port
try:
cherrypy.log('Setting up port %d' % port)
traffic_control.CreateConstrainedPort(kwargs)
return True
except traffic_control.TrafficControlError as e:
cherrypy.log('Error: %s\nOutput: %s' % (e.msg, e.error))
return False
def Cleanup(self, all_ports, request_ip=None):
"""Cleans up expired ports, or if all_ports=True, all allocated ports.
By default, ports which haven't been used for self._expiry_time_secs are
torn down. If all_ports=True then they are torn down regardless.
Args:
all_ports: Should all ports be torn down regardless of expiration?
request_ip: Tear ports matching the IP address regarless of expiration.
"""
with self._port_lock:
now = time.time()
# Use .items() instead of .iteritems() so we can delete keys w/o error.
for port, status in self._ports.items():
expired = now - status['last_update'] > self._expiry_time_secs
matching_ip = request_ip and status['key'][0].startswith(request_ip)
if all_ports or expired or matching_ip:
cherrypy.log('Cleaning up port %d' % port)
self._DeletePort(port)
del self._ports[port]
def _DeletePort(self, port):
"""Deletes network constraints on port.
Args:
port: The port number associated with the network constraints.
"""
try:
traffic_control.DeleteConstrainedPort(self._ports[port]['config'])
except traffic_control.TrafficControlError as e:
cherrypy.log('Error: %s\nOutput: %s' % (e.msg, e.error))
class ConstrainedNetworkServer(object):
"""A CherryPy-based HTTP server for serving files with network constraints."""
def __init__(self, options, port_allocator):
"""Sets up initial state for the CNS.
Args:
options: optparse based class returned by ParseArgs()
port_allocator: A port allocator instance.
"""
self._options = options
self._port_allocator = port_allocator
@cherrypy.expose
def Cleanup(self):
"""Cleans up all the ports allocated using the request IP address.
When requesting a constrained port, the cherrypy.request.remote.ip is used
as a key for that port (in addition to other request parameters). Such
ports created for the same IP address are removed.
"""
cherrypy.log('Cleaning up ports allocated by %s.' %
cherrypy.request.remote.ip)
self._port_allocator.Cleanup(all_ports=False,
request_ip=cherrypy.request.remote.ip)
@cherrypy.expose
def ServeConstrained(self, f=None, bandwidth=None, latency=None, loss=None,
new_port=False, no_cache=False, **kwargs):
"""Serves the requested file with the requested constraints.
Subsequent requests for the same constraints from the same IP will share the
previously created port unless new_port equals True. If no constraints
are provided the file is served as is.
Args:
f: path relative to http root of file to serve.
bandwidth: maximum allowed bandwidth for the provided port (integer
in kbit/s).
latency: time to add to each packet (integer in ms).
loss: percentage of packets to drop (integer, 0-100).
new_port: whether to use a new port for this request or not.
no_cache: Set reponse's cache-control to no-cache.
"""
if no_cache:
response = cherrypy.response
response.headers['Pragma'] = 'no-cache'
response.headers['Cache-Control'] = 'no-cache'
# CherryPy is a bit wonky at detecting parameters, so just make them all
# optional and validate them ourselves.
if not f:
raise cherrypy.HTTPError(400, 'Invalid request. File must be specified.')
# Check existence early to prevent wasted constraint setup.
self._CheckRequestedFileExist(f)
# If there are no constraints, just serve the file.
if bandwidth is None and latency is None and loss is None:
return self._ServeFile(f)
constrained_port = self._GetConstrainedPort(
f, bandwidth=bandwidth, latency=latency, loss=loss, new_port=new_port,
**kwargs)
# Build constrained URL using the constrained port and original URL
# parameters except the network constraints (bandwidth, latency, and loss).
constrained_url = self._GetServerURL(f, constrained_port,
no_cache=no_cache, **kwargs)
# Redirect request to the constrained port.
cherrypy.log('Redirect to %s' % constrained_url)
cherrypy.lib.cptools.redirect(constrained_url, internal=False)
def _CheckRequestedFileExist(self, f):
"""Checks if the requested file exists, raises HTTPError otherwise."""
if self._options.local_server_port:
self._CheckFileExistOnLocalServer(f)
else:
self._CheckFileExistOnServer(f)
def _CheckFileExistOnServer(self, f):
"""Checks if requested file f exists to be served by this server."""
# Sanitize and check the path to prevent www-root escapes.
sanitized_path = os.path.abspath(os.path.join(self._options.www_root, f))
if not sanitized_path.startswith(self._options.www_root):
raise cherrypy.HTTPError(403, 'Invalid file requested.')
if not os.path.exists(sanitized_path):
raise cherrypy.HTTPError(404, 'File not found.')
def _CheckFileExistOnLocalServer(self, f):
"""Checks if requested file exists on local server hosting files."""
test_url = self._GetServerURL(f, self._options.local_server_port)
try:
cherrypy.log('Check file exist using URL: %s' % test_url)
return urllib2.urlopen(test_url) is not None
except Exception:
raise cherrypy.HTTPError(404, 'File not found on local server.')
def _ServeFile(self, f):
"""Serves the file as an http response."""
if self._options.local_server_port:
redirect_url = self._GetServerURL(f, self._options.local_server_port)
cherrypy.log('Redirect to %s' % redirect_url)
cherrypy.lib.cptools.redirect(redirect_url, internal=False)
else:
sanitized_path = os.path.abspath(os.path.join(self._options.www_root, f))
return cherrypy.lib.static.serve_file(sanitized_path)
def _GetServerURL(self, f, port, **kwargs):
"""Returns a URL for local server to serve the file on given port.
Args:
f: file name to serve on local server. Relative to www_root.
port: Local server port (it can be a configured constrained port).
kwargs: extra parameteres passed in the URL.
"""
url = '%s?f=%s&' % (cherrypy.url(), f)
if self._options.local_server_port:
url = '%s/%s?' % (
cherrypy.url().replace('ServeConstrained', self._options.www_root), f)
url = url.replace(':%d' % self._options.port, ':%d' % port)
extra_args = urllib.urlencode(kwargs)
if extra_args:
url += extra_args
return url
def _GetConstrainedPort(self, f=None, bandwidth=None, latency=None, loss=None,
new_port=False, **kwargs):
"""Creates or gets a port with specified network constraints.
See ServeConstrained() for more details.
"""
# Validate inputs. isdigit() guarantees a natural number.
bandwidth = self._ParseIntParameter(
bandwidth, 'Invalid bandwidth constraint.', lambda x: x > 0)
latency = self._ParseIntParameter(
latency, 'Invalid latency constraint.', lambda x: x >= 0)
loss = self._ParseIntParameter(
loss, 'Invalid loss constraint.', lambda x: x <= 100 and x >= 0)
redirect_port = self._options.port
if self._options.local_server_port:
redirect_port = self._options.local_server_port
start_time = time.time()
# Allocate a port using the given constraints. If a port with the requested
# key and kwargs already exist then reuse that port.
constrained_port = self._port_allocator.Get(
cherrypy.request.remote.ip, server_port=redirect_port,
interface=self._options.interface, bandwidth=bandwidth, latency=latency,
loss=loss, new_port=new_port, file=f, **kwargs)
cherrypy.log('Time to set up port %d = %.3fsec.' %
(constrained_port, time.time() - start_time))
if not constrained_port:
raise cherrypy.HTTPError(503, 'Service unavailable. Out of ports.')
return constrained_port
def _ParseIntParameter(self, param, msg, check):
"""Returns integer value of param and verifies it satisfies the check.
Args:
param: Parameter name to check.
msg: Message in error if raised.
check: Check to verify the parameter value.
Returns:
None if param is None, integer value of param otherwise.
Raises:
cherrypy.HTTPError if param can not be converted to integer or if it does
not satisfy the check.
"""
if param:
try:
int_value = int(param)
if check(int_value):
return int_value
except:
pass
raise cherrypy.HTTPError(400, msg)
def ParseArgs():
"""Define and parse the command-line arguments."""
parser = optparse.OptionParser()
parser.add_option('--expiry-time', type='int',
default=_DEFAULT_PORT_EXPIRY_TIME_SECS,
help=('Number of seconds before constrained ports expire '
'and are cleaned up. 0=Disabled. Default: %default'))
parser.add_option('--port', type='int', default=_DEFAULT_SERVING_PORT,
help='Port to serve the API on. Default: %default')
parser.add_option('--port-range', default=_DEFAULT_CNS_PORT_RANGE,
help=('Range of ports for constrained serving. Specify as '
'a comma separated value pair. Default: %default'))
parser.add_option('--interface', default='eth0',
help=('Interface to setup constraints on. Use lo for a '
'local client. Default: %default'))
parser.add_option('--socket-timeout', type='int',
default=cherrypy.server.socket_timeout,
help=('Number of seconds before a socket connection times '
'out. Default: %default'))
parser.add_option('--threads', type='int',
default=cherrypy._cpserver.Server.thread_pool,
help=('Number of threads in the thread pool. Default: '
'%default'))
parser.add_option('--www-root', default='',
help=('Directory root to serve files from. If --local-'
'server-port is used, the path is appended to the '
'redirected URL of local server. Defaults to the '
'current directory (if --local-server-port is not '
'used): %s' % os.getcwd()))
parser.add_option('--local-server-port', type='int',
help=('Optional local server port to host files.'))
parser.add_option('-v', '--verbose', action='store_true', default=False,
help='Turn on verbose output.')
options = parser.parse_args()[0]
# Convert port range into the desired tuple format.
try:
if isinstance(options.port_range, str):
options.port_range = [int(port) for port in options.port_range.split(',')]
except ValueError:
parser.error('Invalid port range specified.')
if options.expiry_time < 0:
parser.error('Invalid expiry time specified.')
# Convert the path to an absolute to remove any . or ..
if not options.local_server_port:
if not options.www_root:
options.www_root = os.getcwd()
options.www_root = os.path.abspath(options.www_root)
_SetLogger(options.verbose)
return options
def _SetLogger(verbose):
file_handler = handlers.RotatingFileHandler('cns.log', 'a', 10000000, 10)
file_handler.setFormatter(logging.Formatter('[%(threadName)s] %(message)s'))
log_level = _DEFAULT_LOG_LEVEL
if verbose:
log_level = logging.DEBUG
file_handler.setLevel(log_level)
cherrypy.log.error_log.addHandler(file_handler)
cherrypy.log.access_log.addHandler(file_handler)
def Main():
"""Configure and start the ConstrainedNetworkServer."""
options = ParseArgs()
try:
traffic_control.CheckRequirements()
except traffic_control.TrafficControlError as e:
cherrypy.log(e.msg)
return
cherrypy.config.update({'server.socket_host': '::',
'server.socket_port': options.port})
if options.threads:
cherrypy.config.update({'server.thread_pool': options.threads})
if options.socket_timeout:
cherrypy.config.update({'server.socket_timeout': options.socket_timeout})
# Setup port allocator here so we can call cleanup on failures/exit.
pa = PortAllocator(options.port_range, expiry_time_secs=options.expiry_time)
try:
cherrypy.quickstart(ConstrainedNetworkServer(options, pa))
finally:
# Disable Ctrl-C handler to prevent interruption of cleanup.
signal.signal(signal.SIGINT, lambda signal, frame: None)
pa.Cleanup(all_ports=True)
if __name__ == '__main__':
Main()
| bsd-3-clause | 1,863,596,965,475,157,500 | 36.969298 | 80 | 0.662065 | false |
doismellburning/django | tests/signals/tests.py | 311 | 10273 | from __future__ import unicode_literals
from django.db import models
from django.db.models import signals
from django.dispatch import receiver
from django.test import TestCase
from django.utils import six
from .models import Author, Book, Car, Person
class BaseSignalTest(TestCase):
def setUp(self):
# Save up the number of connected signals so that we can check at the
# end that all the signals we register get properly unregistered (#9989)
self.pre_signals = (
len(signals.pre_save.receivers),
len(signals.post_save.receivers),
len(signals.pre_delete.receivers),
len(signals.post_delete.receivers),
)
def tearDown(self):
# Check that all our signals got disconnected properly.
post_signals = (
len(signals.pre_save.receivers),
len(signals.post_save.receivers),
len(signals.pre_delete.receivers),
len(signals.post_delete.receivers),
)
self.assertEqual(self.pre_signals, post_signals)
class SignalTests(BaseSignalTest):
def test_model_pre_init_and_post_init(self):
data = []
def pre_init_callback(sender, args, **kwargs):
data.append(kwargs['kwargs'])
signals.pre_init.connect(pre_init_callback)
def post_init_callback(sender, instance, **kwargs):
data.append(instance)
signals.post_init.connect(post_init_callback)
p1 = Person(first_name="John", last_name="Doe")
self.assertEqual(data, [{}, p1])
def test_save_signals(self):
data = []
def pre_save_handler(signal, sender, instance, **kwargs):
data.append(
(instance, kwargs.get("raw", False))
)
def post_save_handler(signal, sender, instance, **kwargs):
data.append(
(instance, kwargs.get("created"), kwargs.get("raw", False))
)
signals.pre_save.connect(pre_save_handler, weak=False)
signals.post_save.connect(post_save_handler, weak=False)
try:
p1 = Person.objects.create(first_name="John", last_name="Smith")
self.assertEqual(data, [
(p1, False),
(p1, True, False),
])
data[:] = []
p1.first_name = "Tom"
p1.save()
self.assertEqual(data, [
(p1, False),
(p1, False, False),
])
data[:] = []
# Calling an internal method purely so that we can trigger a "raw" save.
p1.save_base(raw=True)
self.assertEqual(data, [
(p1, True),
(p1, False, True),
])
data[:] = []
p2 = Person(first_name="James", last_name="Jones")
p2.id = 99999
p2.save()
self.assertEqual(data, [
(p2, False),
(p2, True, False),
])
data[:] = []
p2.id = 99998
p2.save()
self.assertEqual(data, [
(p2, False),
(p2, True, False),
])
finally:
signals.pre_save.disconnect(pre_save_handler)
signals.post_save.disconnect(post_save_handler)
def test_delete_signals(self):
data = []
def pre_delete_handler(signal, sender, instance, **kwargs):
data.append(
(instance, instance.id is None)
)
# #8285: signals can be any callable
class PostDeleteHandler(object):
def __init__(self, data):
self.data = data
def __call__(self, signal, sender, instance, **kwargs):
self.data.append(
(instance, instance.id is None)
)
post_delete_handler = PostDeleteHandler(data)
signals.pre_delete.connect(pre_delete_handler, weak=False)
signals.post_delete.connect(post_delete_handler, weak=False)
try:
p1 = Person.objects.create(first_name="John", last_name="Smith")
p1.delete()
self.assertEqual(data, [
(p1, False),
(p1, False),
])
data[:] = []
p2 = Person(first_name="James", last_name="Jones")
p2.id = 99999
p2.save()
p2.id = 99998
p2.save()
p2.delete()
self.assertEqual(data, [
(p2, False),
(p2, False)
])
data[:] = []
self.assertQuerysetEqual(
Person.objects.all(), [
"James Jones",
],
six.text_type
)
finally:
signals.pre_delete.disconnect(pre_delete_handler)
signals.post_delete.disconnect(post_delete_handler)
def test_decorators(self):
data = []
@receiver(signals.pre_save, weak=False)
def decorated_handler(signal, sender, instance, **kwargs):
data.append(instance)
@receiver(signals.pre_save, sender=Car, weak=False)
def decorated_handler_with_sender_arg(signal, sender, instance, **kwargs):
data.append(instance)
try:
c1 = Car.objects.create(make="Volkswagon", model="Passat")
self.assertEqual(data, [c1, c1])
finally:
signals.pre_save.disconnect(decorated_handler)
signals.pre_save.disconnect(decorated_handler_with_sender_arg, sender=Car)
def test_save_and_delete_signals_with_m2m(self):
data = []
def pre_save_handler(signal, sender, instance, **kwargs):
data.append('pre_save signal, %s' % instance)
if kwargs.get('raw'):
data.append('Is raw')
def post_save_handler(signal, sender, instance, **kwargs):
data.append('post_save signal, %s' % instance)
if 'created' in kwargs:
if kwargs['created']:
data.append('Is created')
else:
data.append('Is updated')
if kwargs.get('raw'):
data.append('Is raw')
def pre_delete_handler(signal, sender, instance, **kwargs):
data.append('pre_delete signal, %s' % instance)
data.append('instance.id is not None: %s' % (instance.id is not None))
def post_delete_handler(signal, sender, instance, **kwargs):
data.append('post_delete signal, %s' % instance)
data.append('instance.id is not None: %s' % (instance.id is not None))
signals.pre_save.connect(pre_save_handler, weak=False)
signals.post_save.connect(post_save_handler, weak=False)
signals.pre_delete.connect(pre_delete_handler, weak=False)
signals.post_delete.connect(post_delete_handler, weak=False)
try:
a1 = Author.objects.create(name='Neal Stephenson')
self.assertEqual(data, [
"pre_save signal, Neal Stephenson",
"post_save signal, Neal Stephenson",
"Is created"
])
data[:] = []
b1 = Book.objects.create(name='Snow Crash')
self.assertEqual(data, [
"pre_save signal, Snow Crash",
"post_save signal, Snow Crash",
"Is created"
])
data[:] = []
# Assigning and removing to/from m2m shouldn't generate an m2m signal.
b1.authors = [a1]
self.assertEqual(data, [])
b1.authors = []
self.assertEqual(data, [])
finally:
signals.pre_save.disconnect(pre_save_handler)
signals.post_save.disconnect(post_save_handler)
signals.pre_delete.disconnect(pre_delete_handler)
signals.post_delete.disconnect(post_delete_handler)
def test_disconnect_in_dispatch(self):
"""
Test that signals that disconnect when being called don't mess future
dispatching.
"""
class Handler(object):
def __init__(self, param):
self.param = param
self._run = False
def __call__(self, signal, sender, **kwargs):
self._run = True
signal.disconnect(receiver=self, sender=sender)
a, b = Handler(1), Handler(2)
signals.post_save.connect(a, sender=Person, weak=False)
signals.post_save.connect(b, sender=Person, weak=False)
Person.objects.create(first_name='John', last_name='Smith')
self.assertTrue(a._run)
self.assertTrue(b._run)
self.assertEqual(signals.post_save.receivers, [])
class LazyModelRefTest(BaseSignalTest):
def setUp(self):
super(LazyModelRefTest, self).setUp()
self.received = []
def receiver(self, **kwargs):
self.received.append(kwargs)
def test_invalid_sender_model_name(self):
with self.assertRaisesMessage(ValueError,
"Specified sender must either be a model or a "
"model name of the 'app_label.ModelName' form."):
signals.post_init.connect(self.receiver, sender='invalid')
def test_already_loaded_model(self):
signals.post_init.connect(
self.receiver, sender='signals.Book', weak=False
)
try:
instance = Book()
self.assertEqual(self.received, [{
'signal': signals.post_init,
'sender': Book,
'instance': instance
}])
finally:
signals.post_init.disconnect(self.receiver, sender=Book)
def test_not_loaded_model(self):
signals.post_init.connect(
self.receiver, sender='signals.Created', weak=False
)
try:
class Created(models.Model):
pass
instance = Created()
self.assertEqual(self.received, [{
'signal': signals.post_init, 'sender': Created, 'instance': instance
}])
finally:
signals.post_init.disconnect(self.receiver, sender=Created)
| bsd-3-clause | -610,495,658,226,570,600 | 33.016556 | 86 | 0.53957 | false |
a-doumoulakis/tensorflow | tensorflow/contrib/opt/python/training/moving_average_optimizer.py | 84 | 5839 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Moving average optimizer."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import six
from tensorflow.python.framework import ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import variables
from tensorflow.python.training import moving_averages
from tensorflow.python.training import optimizer
from tensorflow.python.training import saver
class MovingAverageOptimizer(optimizer.Optimizer):
"""Optimizer that computes a moving average of the variables.
Empirically it has been found that using the moving average of the trained
parameters of a deep network is better than using its trained parameters
directly. This optimizer allows you to compute this moving average and swap
the variables at save time so that any code outside of the training loop will
use by default the averaged values instead of the original ones.
Example of usage:
```python
// Encapsulate your favorite optimizer (here the momentum one)
// inside the MovingAverageOptimizer.
opt = tf.train.MomentumOptimizer(learning_rate, FLAGS.momentum)
opt = tf.contrib.opt.MovingAverageOptimizer(opt)
// Then create your model and all its variables.
model = build_model()
// Add the training op that optimizes using opt.
// This needs to be called before swapping_saver().
opt.minimize(cost, var_list)
// Then create your saver like this:
saver = opt.swapping_saver()
// Pass it to your training loop.
slim.learning.train(
model,
...
saver=saver)
```
Note that for evaluation, the normal saver should be used instead of
swapping_saver().
"""
def __init__(self, opt, average_decay=0.9999, num_updates=None,
sequential_update=True):
"""Construct a new MovingAverageOptimizer.
Args:
opt: A tf.Optimizer that will be used to compute and apply gradients.
average_decay: Float. Decay to use to maintain the moving averages
of trained variables.
See tf.train.ExponentialMovingAverage for details.
num_updates: Optional count of number of updates applied to variables.
See tf.train.ExponentialMovingAverage for details.
sequential_update: Bool. If False, will compute the moving average at the
same time as the model is updated, potentially doing
benign data races.
If True, will update the moving average after gradient
updates.
"""
self._optimizer = opt
self._ema = moving_averages.ExponentialMovingAverage(
average_decay, num_updates=num_updates)
self._variable_map = None
self._sequential_update = sequential_update
def apply_gradients(self, grads_and_vars, global_step=None, name=None):
train_op = self._optimizer.apply_gradients(
grads_and_vars, global_step=global_step, name=name)
var_list = [x[1] for x in grads_and_vars if x[0] is not None]
self._variable_map = {}
if self._sequential_update:
with ops.control_dependencies([train_op]):
ma_op = self._ema.apply(var_list)
else:
ma_op = self._ema.apply(var_list)
for v in var_list:
v_avg = self._ema.average(v)
self._variable_map[v.op.name] = v_avg
self._variable_map[v_avg.op.name] = v
return control_flow_ops.group(train_op, ma_op, name="train_with_avg")
def swapping_saver(self, var_list=None, name='swapping_saver', **kwargs):
"""Create a saver swapping moving averages and variables.
You should use this saver during training. It will save the moving averages
of the trained parameters under the original parameter names. For
evaluations or inference you should use a regular saver and it will
automatically use the moving averages for the trained variable.
You must call this function after all variables have been created and after
you have called Optimizer.minimize().
Args:
var_list: List of variables to save, as per `Saver()`.
If set to None, will save all the variables that have been
created before this call.
name: The name of the saver.
**kwargs: Keyword arguments of `Saver()`.
Returns:
A `tf.train.Saver` object.
Raises:
RuntimeError: If apply_gradients or minimize has not been called before.
"""
if self._variable_map is None:
raise RuntimeError('Must call apply_gradients or minimize before '
'creating the swapping_saver')
if var_list is None:
var_list = variables.global_variables()
if not isinstance(var_list, dict):
var_list = saver.BaseSaverBuilder.OpListToDict(var_list)
# Now swap variables and moving averages
swapped_var_list = {}
for k, v in six.iteritems(var_list):
v_swap = self._variable_map.get(v.op.name, None)
if v_swap:
swapped_var_list[k] = v_swap
else:
swapped_var_list[k] = v
# Build the swapping saver.
return saver.Saver(swapped_var_list, name=name, **kwargs)
| apache-2.0 | -6,233,560,233,608,460,000 | 38.721088 | 80 | 0.679911 | false |
nlu90/heron | heron/instance/tests/python/utils/topology_context_impl_unittest.py | 5 | 2275 | #!/usr/bin/env python
# -*- encoding: utf-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=missing-docstring
import unittest
from heron.instance.src.python.utils.topology import TopologyContextImpl
import heron.instance.tests.python.utils.mock_generator as mock_generator
import heron.instance.tests.python.mock_protobuf as mock_protobuf
class TopologyContextImplTest(unittest.TestCase):
def setUp(self):
self.context = TopologyContextImpl(
config={},
topology=mock_protobuf.get_mock_topology(),
task_to_component={},
my_task_id="task_id",
metrics_collector=None,
topo_pex_path="path.to.pex")
def test_task_hook(self):
task_hook = mock_generator.MockTaskHook()
self.assertFalse(len(self.context.task_hooks) > 0)
self.context.add_task_hook(task_hook)
self.assertTrue(len(self.context.task_hooks) > 0)
self.context.invoke_hook_prepare()
self.context.invoke_hook_emit(None, None, None)
self.assertTrue(task_hook.emit_called)
self.context.invoke_hook_spout_ack(None, 0.1)
self.assertTrue(task_hook.spout_ack_called)
self.context.invoke_hook_spout_fail(None, 0.1)
self.assertTrue(task_hook.spout_fail_called)
self.context.invoke_hook_bolt_execute(None, 0.1)
self.assertTrue(task_hook.bolt_exec_called)
self.context.invoke_hook_bolt_ack(None, 0.1)
self.assertTrue(task_hook.bolt_ack_called)
self.context.invoke_hook_bolt_fail(None, 0.1)
self.assertTrue(task_hook.bolt_fail_called)
| apache-2.0 | -8,836,224,120,763,604,000 | 34.546875 | 73 | 0.734066 | false |
nzavagli/UnrealPy | UnrealPyEmbed/Development/Python/2015.08.07-Python2710-x64-Source-vs2015/Python27/Source/reportlab-3.2.0/tests/test_platypus_wrapping.py | 14 | 3840 | #Copyright ReportLab Europe Ltd. 2000-2012
#see license.txt for license details
"""Tests for context-dependent indentation
"""
__version__='''$Id: test_platypus_indents.py 3660 2010-02-08 18:17:33Z damian $'''
from reportlab.lib.testutils import setOutDir,makeSuiteForClasses, outputfile, printLocation
setOutDir(__name__)
import sys, os, random
from operator import truth
import unittest
from reportlab.pdfbase.pdfmetrics import stringWidth
from reportlab.platypus.paraparser import ParaParser
from reportlab.platypus.flowables import Flowable
from reportlab.lib.colors import Color
from reportlab.lib.units import cm
from reportlab.lib.enums import TA_LEFT, TA_RIGHT, TA_CENTER, TA_JUSTIFY
from reportlab.lib.utils import _className
from reportlab.lib.styles import getSampleStyleSheet, ParagraphStyle
from reportlab.platypus.paragraph import Paragraph
from reportlab.platypus.frames import Frame
from reportlab.platypus.doctemplate \
import PageTemplate, BaseDocTemplate, Indenter, FrameBreak, NextPageTemplate
from reportlab.platypus import tableofcontents
from reportlab.platypus.tableofcontents import TableOfContents
from reportlab.platypus.tables import TableStyle, Table
from reportlab.platypus.paragraph import *
from reportlab.platypus.paragraph import _getFragWords
from reportlab.platypus.flowables import Spacer
def myMainPageFrame(canvas, doc):
"The page frame used for all PDF documents."
canvas.saveState()
canvas.rect(2.5*cm, 2.5*cm, 15*cm, 25*cm)
canvas.setFont('Times-Roman', 12)
pageNumber = canvas.getPageNumber()
canvas.drawString(10*cm, cm, str(pageNumber))
canvas.restoreState()
class MyDocTemplate(BaseDocTemplate):
_invalidInitArgs = ('pageTemplates',)
def __init__(self, filename, **kw):
frame1 = Frame(2.5*cm, 2.5*cm, 15*cm, 25*cm, id='F1')
self.allowSplitting = 0
BaseDocTemplate.__init__(self, filename, **kw)
template1 = PageTemplate('normal', [frame1], myMainPageFrame)
frame2 = Frame(2.5*cm, 16*cm, 15*cm, 10*cm, id='F2', showBoundary=1)
frame3 = Frame(2.5*cm, 2.5*cm, 15*cm, 10*cm, id='F3', showBoundary=1)
template2 = PageTemplate('updown', [frame2, frame3])
self.addPageTemplates([template1, template2])
class WrappingTestCase(unittest.TestCase):
"Test wrapping of long urls"
def test0(self):
"This makes one long multi-page paragraph."
# Build story.
story = []
styleSheet = getSampleStyleSheet()
h1 = styleSheet['Heading1']
h1.spaceBefore = 18
bt = styleSheet['BodyText']
bt.spaceBefore = 6
story.append(Paragraph('Test of paragraph wrapping',h1))
story.append(Spacer(18,18))
txt = "Normally we wrap paragraphs by looking for spaces between the words. However, with long technical command references and URLs, sometimes this gives ugly results. We attempt to split really long words on certain tokens: slashes, dots etc."
story.append(Paragraph(txt,bt))
story.append(Paragraph('This is an attempt to break long URLs sanely. Here is a file name: <font face="Courier">C:\\Windows\\System32\\Drivers\\etc\\hosts</font>. ', bt))
story.append(Paragraph('This paragraph has a URL (basically, a word) too long to fit on one line, so it just overflows. http://some-really-long-site.somewhere-verbose.com/webthingies/framework/xc4987236hgsdlkafh/foo?format=dingbats&content=rubbish. Ideally, we would wrap it in the middle.', bt))
doc = MyDocTemplate(outputfile('test_platypus_wrapping.pdf'))
doc.multiBuild(story)
#noruntests
def makeSuite():
return makeSuiteForClasses(WrappingTestCase)
#noruntests
if __name__ == "__main__":
unittest.TextTestRunner().run(makeSuite())
printLocation()
| mit | -4,638,309,570,354,209,000 | 36.647059 | 308 | 0.719792 | false |
ivanlmj/PyCaptive | app/modules/checksys.py | 1 | 2085 |
import socket
import subprocess as sp
import sys
from app import app
class Components():
def __init__(self):
self._binaries = (app.config['CHECKSYS_DICT']["IPTABLES"], app.config['CHECKSYS_DICT']["CONNTRACK"])
self._services = None
if app.config['TEST_MODE']:
self._services = {
"mongodb":(
app.config['CHECKSYS_DICT']["MONGODB_IP"], app.config['CHECKSYS_DICT']["MONGODB_PORT"]
)
}
else:
self._services = {
"nginx_redir_gunicorn":(
app.config['CHECKSYS_DICT']["NGINX_IP"], app.config['CHECKSYS_DICT']["NGINX_REDIR"]
),
"nginx_gunicorn":(
app.config['CHECKSYS_DICT']["NGINX_IP"], app.config['CHECKSYS_DICT']["NGINX_GUNICORN"]
),
"mongodb":(
app.config['CHECKSYS_DICT']["MONDODB_IP"], app.config['CHECKSYS_DICT']["MONGODB_PORT"]
)
}
def binaries(self):
""" Check existence of binaries. """
results = dict()
for b in self._binaries:
result = sp.call(["which", b], stderr=sp.DEVNULL, stdout=sp.DEVNULL)
result_bin = b.split('/')[-1]
if result == 0:
results[result_bin] = 0
else:
results[result_bin] = 1
return results
def services(self):
""" Check status of network services. """
# TODO: must ensure that this is working....
results = dict()
for k,v in self._services.items():
s = socket.socket()
try:
s.connect((v[0], int(v[1])))
results[k] = 0
except Exception:
results[k] = 1
finally:
s.close()
return results
| gpl-3.0 | 8,282,093,403,946,885,000 | 35.578947 | 119 | 0.430695 | false |
emersonsoftware/ansiblefork | lib/ansible/utils/module_docs_fragments/openstack.py | 26 | 4023 | # Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
class ModuleDocFragment(object):
# Standard openstack documentation fragment
DOCUMENTATION = '''
options:
cloud:
description:
- Named cloud to operate against. Provides default values for I(auth) and
I(auth_type). This parameter is not needed if I(auth) is provided or if
OpenStack OS_* environment variables are present.
required: false
auth:
description:
- Dictionary containing auth information as needed by the cloud's auth
plugin strategy. For the default I(password) plugin, this would contain
I(auth_url), I(username), I(password), I(project_name) and any
information about domains if the cloud supports them. For other plugins,
this param will need to contain whatever parameters that auth plugin
requires. This parameter is not needed if a named cloud is provided or
OpenStack OS_* environment variables are present.
required: false
auth_type:
description:
- Name of the auth plugin to use. If the cloud uses something other than
password authentication, the name of the plugin should be indicated here
and the contents of the I(auth) parameter should be updated accordingly.
required: false
default: password
region_name:
description:
- Name of the region.
required: false
availability_zone:
description:
- Name of the availability zone.
required: false
wait:
description:
- Should ansible wait until the requested resource is complete.
required: false
default: "yes"
choices: ["yes", "no"]
timeout:
description:
- How long should ansible wait for the requested resource.
required: false
default: 180
api_timeout:
description:
- How long should the socket layer wait before timing out for API calls.
If this is omitted, nothing will be passed to the requests library.
required: false
default: None
validate_certs:
description:
- Whether or not SSL API requests should be verified.
required: false
default: True
aliases: ['verify']
cacert:
description:
- A path to a CA Cert bundle that can be used as part of verifying
SSL API requests.
required: false
default: None
cert:
description:
- A path to a client certificate to use as part of the SSL transaction.
required: false
default: None
key:
description:
- A path to a client key to use as part of the SSL transaction.
required: false
default: None
endpoint_type:
description:
- Endpoint URL type to fetch from the service catalog.
choices: [public, internal, admin]
required: false
default: public
requirements:
- python >= 2.7
- shade
notes:
- The standard OpenStack environment variables, such as C(OS_USERNAME)
may be used instead of providing explicit values.
- Auth information is driven by os-client-config, which means that values
can come from a yaml config file in /etc/ansible/openstack.yaml,
/etc/openstack/clouds.yaml or ~/.config/openstack/clouds.yaml, then from
standard environment variables, then finally by explicit parameters in
plays. More information can be found at
U(http://docs.openstack.org/developer/os-client-config)
'''
| gpl-3.0 | 9,215,688,418,562,492,000 | 34.919643 | 80 | 0.705195 | false |
grupozeety/CDerpnext | erpnext/accounts/report/profit_and_loss_statement/profit_and_loss_statement.py | 7 | 1698 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
from frappe.utils import flt
from erpnext.accounts.report.financial_statements import (get_period_list, get_columns, get_data)
def execute(filters=None):
period_list = get_period_list(filters.fiscal_year, filters.periodicity)
income = get_data(filters.company, "Income", "Credit", period_list,
accumulated_values=filters.accumulated_values, ignore_closing_entries=True)
expense = get_data(filters.company, "Expense", "Debit", period_list,
accumulated_values=filters.accumulated_values, ignore_closing_entries=True)
net_profit_loss = get_net_profit_loss(income, expense, period_list, filters.company)
data = []
data.extend(income or [])
data.extend(expense or [])
if net_profit_loss:
data.append(net_profit_loss)
columns = get_columns(filters.periodicity, period_list, filters.accumulated_values, filters.company)
return columns, data
def get_net_profit_loss(income, expense, period_list, company):
if income and expense:
total = 0
net_profit_loss = {
"account_name": "'" + _("Net Profit / Loss") + "'",
"account": None,
"warn_if_negative": True,
"currency": frappe.db.get_value("Company", company, "default_currency")
}
has_value = False
for period in period_list:
net_profit_loss[period.key] = flt(income[-2][period.key] - expense[-2][period.key], 3)
if net_profit_loss[period.key]:
has_value=True
total += flt(net_profit_loss[period.key])
net_profit_loss["total"] = total
if has_value:
return net_profit_loss
| agpl-3.0 | 7,337,644,426,609,977,000 | 31.653846 | 101 | 0.716726 | false |
DarthMaulware/EquationGroupLeaks | Leak #5 - Lost In Translation/windows/Resources/LegacyWindowsExploits/Exploits/EAFU 2.2.0/EAFU_SSL.py | 1 | 1272 | import re, socket, string, sys
if __name__ == "__main__":
if len(sys.argv) < 3:
sys.exit(2)
target_address = (sys.argv[1])
target_port = int(sys.argv[2])
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((target_address, target_port))
ssl_sock = socket.ssl(s)
# print the cert info
#print repr(ssl_sock.server())
#print repr(ssl_sock.issuer())
# Set a simple HTTP request -- use httplib in actual code.
ssl_sock.write("""GET / HTTP/1.1\r\nHost:\r\n\r\n""")
# Read a chunk of data. Will not necessarily
# read all the data returned by the server.
data = ssl_sock.read()
# what did we get back?
# print data
# parse the reply for the version number
# Server: WDaemon/9.5.1
if re.search('Server: WDaemon/\d\d?\.\d\.\d', data):
m = re.search('Server: WDaemon\/(\d\d?\.\d\.\d)', data)
print "\n\n\nWorldClient version is: " + m.group(1)
elif re.search('Server: Microsoft-IIS\/(\d\d?\.\d).*MDaemon\/WorldClient v(\d\d?\.\d\.\d)', data):
n = re.search('Server: Microsoft-IIS\/(\d\d?\.\d).*MDaemon\/WorldClient v(\d\d?\.\d\.\d)', data)
print "\n\n\nWorldClient version and IIS version is: " + n.group(2) + n.group(1)
# Note that you need to close the underlying socket, not the SSL object.
del ssl_sock
s.close()
| unlicense | 2,908,487,244,535,040,500 | 28.604651 | 99 | 0.639937 | false |
rd37/horizon | horizon/conf/__init__.py | 77 | 2063 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
from django.utils.functional import empty # noqa
from django.utils.functional import LazyObject # noqa
class LazySettings(LazyObject):
def _setup(self, name=None):
from django.conf import settings
from horizon.conf.default import HORIZON_CONFIG as DEFAULT_CONFIG # noqa
HORIZON_CONFIG = copy.copy(DEFAULT_CONFIG)
HORIZON_CONFIG.update(settings.HORIZON_CONFIG)
# Ensure we always have our exception configuration...
for exc_category in ['unauthorized', 'not_found', 'recoverable']:
if exc_category not in HORIZON_CONFIG['exceptions']:
default_exc_config = DEFAULT_CONFIG['exceptions'][exc_category]
HORIZON_CONFIG['exceptions'][exc_category] = default_exc_config
# Ensure our password validator always exists...
if 'regex' not in HORIZON_CONFIG['password_validator']:
default_pw_regex = DEFAULT_CONFIG['password_validator']['regex']
HORIZON_CONFIG['password_validator']['regex'] = default_pw_regex
if 'help_text' not in HORIZON_CONFIG['password_validator']:
default_pw_help = DEFAULT_CONFIG['password_validator']['help_text']
HORIZON_CONFIG['password_validator']['help_text'] = default_pw_help
self._wrapped = HORIZON_CONFIG
def __getitem__(self, name, fallback=None):
if self._wrapped is empty:
self._setup(name)
return self._wrapped.get(name, fallback)
HORIZON_CONFIG = LazySettings()
| apache-2.0 | -6,889,847,102,809,219,000 | 42.893617 | 81 | 0.688318 | false |
hassanabidpk/django | tests/gis_tests/test_geoip.py | 73 | 5275 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import os
import unittest
import warnings
from unittest import skipUnless
from django.conf import settings
from django.contrib.gis.geoip import HAS_GEOIP
from django.contrib.gis.geos import HAS_GEOS, GEOSGeometry
from django.test import ignore_warnings
from django.utils import six
from django.utils.deprecation import RemovedInDjango20Warning
if HAS_GEOIP:
from django.contrib.gis.geoip import GeoIP, GeoIPException
# Note: Requires use of both the GeoIP country and city datasets.
# The GEOIP_DATA path should be the only setting set (the directory
# should contain links or the actual database files 'GeoIP.dat' and
# 'GeoLiteCity.dat'.
@skipUnless(HAS_GEOIP and getattr(settings, "GEOIP_PATH", None),
"GeoIP is required along with the GEOIP_PATH setting.")
@ignore_warnings(category=RemovedInDjango20Warning)
class GeoIPTest(unittest.TestCase):
addr = '128.249.1.1'
fqdn = 'tmc.edu'
def test01_init(self):
"Testing GeoIP initialization."
g1 = GeoIP() # Everything inferred from GeoIP path
path = settings.GEOIP_PATH
g2 = GeoIP(path, 0) # Passing in data path explicitly.
g3 = GeoIP.open(path, 0) # MaxMind Python API syntax.
for g in (g1, g2, g3):
self.assertTrue(g._country)
self.assertTrue(g._city)
# Only passing in the location of one database.
city = os.path.join(path, 'GeoLiteCity.dat')
cntry = os.path.join(path, 'GeoIP.dat')
g4 = GeoIP(city, country='')
self.assertIsNone(g4._country)
g5 = GeoIP(cntry, city='')
self.assertIsNone(g5._city)
# Improper parameters.
bad_params = (23, 'foo', 15.23)
for bad in bad_params:
self.assertRaises(GeoIPException, GeoIP, cache=bad)
if isinstance(bad, six.string_types):
e = GeoIPException
else:
e = TypeError
self.assertRaises(e, GeoIP, bad, 0)
def test02_bad_query(self):
"Testing GeoIP query parameter checking."
cntry_g = GeoIP(city='<foo>')
# No city database available, these calls should fail.
self.assertRaises(GeoIPException, cntry_g.city, 'google.com')
self.assertRaises(GeoIPException, cntry_g.coords, 'yahoo.com')
# Non-string query should raise TypeError
self.assertRaises(TypeError, cntry_g.country_code, 17)
self.assertRaises(TypeError, cntry_g.country_name, GeoIP)
def test03_country(self):
"Testing GeoIP country querying methods."
g = GeoIP(city='<foo>')
for query in (self.fqdn, self.addr):
for func in (g.country_code, g.country_code_by_addr, g.country_code_by_name):
self.assertEqual('US', func(query), 'Failed for func %s and query %s' % (func, query))
for func in (g.country_name, g.country_name_by_addr, g.country_name_by_name):
self.assertEqual('United States', func(query), 'Failed for func %s and query %s' % (func, query))
self.assertEqual({'country_code': 'US', 'country_name': 'United States'},
g.country(query))
@skipUnless(HAS_GEOS, "Geos is required")
def test04_city(self):
"Testing GeoIP city querying methods."
g = GeoIP(country='<foo>')
for query in (self.fqdn, self.addr):
# Country queries should still work.
for func in (g.country_code, g.country_code_by_addr, g.country_code_by_name):
self.assertEqual('US', func(query))
for func in (g.country_name, g.country_name_by_addr, g.country_name_by_name):
self.assertEqual('United States', func(query))
self.assertEqual({'country_code': 'US', 'country_name': 'United States'},
g.country(query))
# City information dictionary.
d = g.city(query)
self.assertEqual('USA', d['country_code3'])
self.assertEqual('Houston', d['city'])
self.assertEqual('TX', d['region'])
self.assertEqual(713, d['area_code'])
geom = g.geos(query)
self.assertIsInstance(geom, GEOSGeometry)
lon, lat = (-95.4010, 29.7079)
lat_lon = g.lat_lon(query)
lat_lon = (lat_lon[1], lat_lon[0])
for tup in (geom.tuple, g.coords(query), g.lon_lat(query), lat_lon):
self.assertAlmostEqual(lon, tup[0], 4)
self.assertAlmostEqual(lat, tup[1], 4)
def test05_unicode_response(self):
"Testing that GeoIP strings are properly encoded, see #16553."
g = GeoIP()
d = g.city("duesseldorf.de")
self.assertEqual('Düsseldorf', d['city'])
d = g.country('200.26.205.1')
# Some databases have only unaccented countries
self.assertIn(d['country_name'], ('Curaçao', 'Curacao'))
def test_deprecation_warning(self):
with warnings.catch_warnings(record=True) as warns:
warnings.simplefilter('always')
GeoIP()
self.assertEqual(len(warns), 1)
msg = str(warns[0].message)
self.assertIn('django.contrib.gis.geoip is deprecated', msg)
| bsd-3-clause | 7,794,955,658,760,493,000 | 39.561538 | 113 | 0.616158 | false |
amnet04/ALECMAPREADER1 | funcionesCV_recurrentes.py | 1 | 4438 | import numpy as np
import pandas
import cv2
def cargar_imagen(archivo):
'''
Carga en variables dos matrices de la imágen, una gris y otra a color,
devuelve un diccionario con las dos versiones.
'''
imagen = {}
imagen['gris'] = cv2.imread(archivo,0)
imagen['color'] = cv2.imread(archivo)
return(imagen)
def dilatar_imagen(img, umbral_blanco, umbral_negro, dim_kernel, iteraciones):
ret,thresh = cv2.threshold(img, umbral_blanco,umbral_negro,cv2.THRESH_BINARY_INV)
kernel = cv2.getStructuringElement(cv2.MORPH_RECT, dim_kernel)
dilatada= cv2.dilate(thresh,kernel,iterations = iteraciones)
return(dilatada)
def erosionar_imagen(img, umbral_blanco, umbral_negro, dim_kernel, iteraciones):
ret,thresh = cv2.threshold(img, umbral_blanco,umbral_negro,cv2.THRESH_BINARY_INV)
kernel = cv2.getStructuringElement(cv2.MORPH_RECT, dim_kernel)
erosionada = cv2.erode(thresh,kernel,iterations = iteraciones)
return(erosionada)
def dibujar_rectangulos(img,x1,y1,x2,y2,color,ancho_bordes,archivo=''):
cv2.rectangle(img,(x1,y1),(x2,y2),(color),ancho_bordes)
# if archivo !='':
# cv2.imwrite(archivo,img)
def cortar_imagen(img,x1,x2,y1,y2):
corte = img[y1:y2,x1:x2]
img_cortada = {}
img_cortada['im'] = corte
img_cortada['x1'] = x1
img_cortada['y1'] = y1
img_cortada['x2'] = x2
img_cortada['y2'] = y2
return(img_cortada)
def bw_otsu(img, umbral_blanco,limite,blur=0,blur_ori =0):
'''
blur es el shape del blur en tupla por ejemplo (5,5)
blur_ori es un entero. Si no se ponen valores no hace el blur
'''
if blur == (0,0):
blureada = img
else:
blureada = cv2.GaussianBlur(img,blur,blur_ori)
ret,th = cv2.threshold(blureada,umbral_blanco,limite,cv2.THRESH_BINARY+cv2.THRESH_OTSU)
return (th)
def bw_adapta(img,limite,tam,sh):
th = cv2.adaptiveThreshold(img,limite,cv2.ADAPTIVE_THRESH_GAUSSIAN_C,cv2.THRESH_BINARY,tam,sh)
return (th)
def ver_imagen(img,title='solo pa vé'):
cv2.imshow(title, img)
cv2.waitKey()
cv2.destroyAllWindows()
def detectar(template, imagen, max_var_thresh):
'''
Detacta si el la imagen tiene coincidencias en el mapa y devuelve la
coordenada superior izquierda de la coincidencia, su altura y su ancho
en la imagen del mapa general
'''
imagen_gris = cv2.cvtColor(imagen, cv2.COLOR_RGB2GRAY)
imagen_bw = bw_adapta(imagen_gris, 255, 71, 30)
h, w = template.shape
coincidencia = cv2.matchTemplate(template, imagen_bw, cv2.TM_CCOEFF)
min_val, max_val, min_loc, max_loc = cv2.minMaxLoc(coincidencia)
x1 = max_loc[0]
x2 = max_loc[0] + w
y1 = max_loc[1]
y2 = max_loc[1] + h
if max_val < max_var_thresh:
#cv2.imwrite('Pruebas/tast.jpg',imagen[y1:y2,x1:x2])
return(None, max_val)
else:
#print (max_val)
sup_izq = (x1,y1)
inf_der = (x2,y2)
roi = imagen[y1:y2,x1:x2]
return(sup_izq, inf_der, roi)
def detectar_recursivo(template, imagen, thresh):
imagen_gris = cv2.cvtColor(imagen, cv2.COLOR_RGB2GRAY)
imagen_bw = bw_adapta(imagen_gris, 255, 71, 30)
h, w = template.shape
res = cv2.matchTemplate(imagen_bw,template,cv2.TM_CCOEFF_NORMED)
loc = np.where(res>=thresh)
puntos = []
for punto in zip(*loc[::-1]):
puntos.append(punto)
return (puntos, h, w)
def detectar_area_contornos(imagen,
umbral_blanco,
umbral_negro,
dim_kernel,
iteraciones,
w, h):
if dim_kernel != (0,0):
imagen_dilatada = dilatar_imagen(imagen,
umbral_blanco,
umbral_negro,
dim_kernel,
iteraciones)
else:
imagen_dilatada = imagen
imagen, contours, hierarchy = cv2.findContours(imagen_dilatada,
cv2.RETR_TREE,
cv2.CHAIN_APPROX_SIMPLE)
areas = []
for contour in contours:
[x,y,wc,hc] = cv2.boundingRect(contour)
x1 = x
y1 = y
x2 = x+wc
y2 = y+hc
if (wc > w) and (hc > h):
areas.append((x1, y1 , x2, y2))
return (areas)
| mit | -509,802,030,842,585,000 | 34.206349 | 98 | 0.591073 | false |
takluyver/readthedocs.org | readthedocs/restapi/views/footer_views.py | 6 | 4195 | from django.shortcuts import get_object_or_404
from django.template import Context, loader as template_loader
from django.conf import settings
from django.core.context_processors import csrf
from rest_framework import decorators, permissions
from rest_framework.renderers import JSONPRenderer, JSONRenderer, BrowsableAPIRenderer
from rest_framework.response import Response
from bookmarks.models import Bookmark
from builds.models import Version
from projects.models import Project
@decorators.api_view(['GET'])
@decorators.permission_classes((permissions.AllowAny,))
@decorators.renderer_classes((JSONRenderer, JSONPRenderer, BrowsableAPIRenderer))
def footer_html(request):
project_slug = request.GET.get('project', None)
version_slug = request.GET.get('version', None)
page_slug = request.GET.get('page', None)
theme = request.GET.get('theme', False)
docroot = request.GET.get('docroot', '')
subproject = request.GET.get('subproject', False)
source_suffix = request.GET.get('source_suffix', '.rst')
new_theme = (theme == "sphinx_rtd_theme")
using_theme = (theme == "default")
project = get_object_or_404(Project, slug=project_slug)
version = get_object_or_404(Version.objects.public(request.user, project=project, only_active=False), slug=version_slug)
main_project = project.main_language_project or project
if page_slug and page_slug != "index":
if main_project.documentation_type == "sphinx_htmldir" or main_project.documentation_type == "mkdocs":
path = page_slug + "/"
elif main_project.documentation_type == "sphinx_singlehtml":
path = "index.html#document-" + page_slug
else:
path = page_slug + ".html"
else:
path = ""
host = request.get_host()
if settings.PRODUCTION_DOMAIN in host and request.user.is_authenticated():
show_bookmarks = True
try:
bookmark = Bookmark.objects.get(
user=request.user,
project=project,
version=version,
page=page_slug,
)
except (Bookmark.DoesNotExist, Bookmark.MultipleObjectsReturned, Exception):
bookmark = None
else:
show_bookmarks = False
bookmark = None
if version.type == 'tag' and version.project.has_pdf(version.slug):
print_url = 'https://keminglabs.com/print-the-docs/quote?project={project}&version={version}'.format(
project=project.slug,
version=version.slug,
)
else:
print_url = None
show_promo = True
# User is a gold user, no promos for them!
if request.user.is_authenticated():
if request.user.gold.count() or request.user.goldonce.count():
show_promo = False
# Explicit promo disabling
if project.slug in getattr(settings, 'DISABLE_PROMO_PROJECTS', []):
show_promo = False
# A GoldUser has mapped this project
if project.gold_owners.count():
show_promo = False
context = Context({
'show_bookmarks': show_bookmarks,
'bookmark': bookmark,
'project': project,
'path': path,
'downloads': version.get_downloads(pretty=True),
'current_version': version.slug,
'versions': project.ordered_active_versions(),
'main_project': main_project,
'translations': main_project.translations.all(),
'current_language': project.language,
'using_theme': using_theme,
'new_theme': new_theme,
'settings': settings,
'subproject': subproject,
'print_url': print_url,
'github_edit_url': version.get_github_url(docroot, page_slug, source_suffix, 'edit'),
'github_view_url': version.get_github_url(docroot, page_slug, source_suffix, 'view'),
'bitbucket_url': version.get_bitbucket_url(docroot, page_slug, source_suffix),
})
context.update(csrf(request))
html = template_loader.get_template('restapi/footer.html').render(context)
return Response({
'html': html,
'version_active': version.active,
'version_supported': version.supported,
'promo': show_promo,
})
| mit | 5,000,815,453,148,028,000 | 38.205607 | 124 | 0.651728 | false |
pizzapanther/HoverMom | hovermom/django/db/transaction.py | 77 | 20601 | """
This module implements a transaction manager that can be used to define
transaction handling in a request or view function. It is used by transaction
control middleware and decorators.
The transaction manager can be in managed or in auto state. Auto state means the
system is using a commit-on-save strategy (actually it's more like
commit-on-change). As soon as the .save() or .delete() (or related) methods are
called, a commit is made.
Managed transactions don't do those commits, but will need some kind of manual
or implicit commits or rollbacks.
"""
import warnings
from functools import wraps
from django.db import (
connections, DEFAULT_DB_ALIAS,
DatabaseError, Error, ProgrammingError)
from django.utils.decorators import available_attrs
class TransactionManagementError(ProgrammingError):
"""
This exception is thrown when transaction management is used improperly.
"""
pass
################
# Private APIs #
################
def get_connection(using=None):
"""
Get a database connection by name, or the default database connection
if no name is provided.
"""
if using is None:
using = DEFAULT_DB_ALIAS
return connections[using]
###########################
# Deprecated private APIs #
###########################
def abort(using=None):
"""
Roll back any ongoing transactions and clean the transaction management
state of the connection.
This method is to be used only in cases where using balanced
leave_transaction_management() calls isn't possible. For example after a
request has finished, the transaction state isn't known, yet the connection
must be cleaned up for the next request.
"""
get_connection(using).abort()
def enter_transaction_management(managed=True, using=None, forced=False):
"""
Enters transaction management for a running thread. It must be balanced with
the appropriate leave_transaction_management call, since the actual state is
managed as a stack.
The state and dirty flag are carried over from the surrounding block or
from the settings, if there is no surrounding block (dirty is always false
when no current block is running).
"""
get_connection(using).enter_transaction_management(managed, forced)
def leave_transaction_management(using=None):
"""
Leaves transaction management for a running thread. A dirty flag is carried
over to the surrounding block, as a commit will commit all changes, even
those from outside. (Commits are on connection level.)
"""
get_connection(using).leave_transaction_management()
def is_dirty(using=None):
"""
Returns True if the current transaction requires a commit for changes to
happen.
"""
return get_connection(using).is_dirty()
def set_dirty(using=None):
"""
Sets a dirty flag for the current thread and code streak. This can be used
to decide in a managed block of code to decide whether there are open
changes waiting for commit.
"""
get_connection(using).set_dirty()
def set_clean(using=None):
"""
Resets a dirty flag for the current thread and code streak. This can be used
to decide in a managed block of code to decide whether a commit or rollback
should happen.
"""
get_connection(using).set_clean()
def is_managed(using=None):
warnings.warn("'is_managed' is deprecated.",
PendingDeprecationWarning, stacklevel=2)
def managed(flag=True, using=None):
warnings.warn("'managed' no longer serves a purpose.",
PendingDeprecationWarning, stacklevel=2)
def commit_unless_managed(using=None):
warnings.warn("'commit_unless_managed' is now a no-op.",
PendingDeprecationWarning, stacklevel=2)
def rollback_unless_managed(using=None):
warnings.warn("'rollback_unless_managed' is now a no-op.",
PendingDeprecationWarning, stacklevel=2)
###############
# Public APIs #
###############
def get_autocommit(using=None):
"""
Get the autocommit status of the connection.
"""
return get_connection(using).get_autocommit()
def set_autocommit(autocommit, using=None):
"""
Set the autocommit status of the connection.
"""
return get_connection(using).set_autocommit(autocommit)
def commit(using=None):
"""
Commits a transaction and resets the dirty flag.
"""
get_connection(using).commit()
def rollback(using=None):
"""
Rolls back a transaction and resets the dirty flag.
"""
get_connection(using).rollback()
def savepoint(using=None):
"""
Creates a savepoint (if supported and required by the backend) inside the
current transaction. Returns an identifier for the savepoint that will be
used for the subsequent rollback or commit.
"""
return get_connection(using).savepoint()
def savepoint_rollback(sid, using=None):
"""
Rolls back the most recent savepoint (if one exists). Does nothing if
savepoints are not supported.
"""
get_connection(using).savepoint_rollback(sid)
def savepoint_commit(sid, using=None):
"""
Commits the most recent savepoint (if one exists). Does nothing if
savepoints are not supported.
"""
get_connection(using).savepoint_commit(sid)
def clean_savepoints(using=None):
"""
Resets the counter used to generate unique savepoint ids in this thread.
"""
get_connection(using).clean_savepoints()
def get_rollback(using=None):
"""
Gets the "needs rollback" flag -- for *advanced use* only.
"""
return get_connection(using).get_rollback()
def set_rollback(rollback, using=None):
"""
Sets or unsets the "needs rollback" flag -- for *advanced use* only.
When `rollback` is `True`, it triggers a rollback when exiting the
innermost enclosing atomic block that has `savepoint=True` (that's the
default). Use this to force a rollback without raising an exception.
When `rollback` is `False`, it prevents such a rollback. Use this only
after rolling back to a known-good state! Otherwise, you break the atomic
block and data corruption may occur.
"""
return get_connection(using).set_rollback(rollback)
#################################
# Decorators / context managers #
#################################
class Atomic(object):
"""
This class guarantees the atomic execution of a given block.
An instance can be used either as a decorator or as a context manager.
When it's used as a decorator, __call__ wraps the execution of the
decorated function in the instance itself, used as a context manager.
When it's used as a context manager, __enter__ creates a transaction or a
savepoint, depending on whether a transaction is already in progress, and
__exit__ commits the transaction or releases the savepoint on normal exit,
and rolls back the transaction or to the savepoint on exceptions.
It's possible to disable the creation of savepoints if the goal is to
ensure that some code runs within a transaction without creating overhead.
A stack of savepoints identifiers is maintained as an attribute of the
connection. None denotes the absence of a savepoint.
This allows reentrancy even if the same AtomicWrapper is reused. For
example, it's possible to define `oa = @atomic('other')` and use `@oa` or
`with oa:` multiple times.
Since database connections are thread-local, this is thread-safe.
"""
def __init__(self, using, savepoint):
self.using = using
self.savepoint = savepoint
def __enter__(self):
connection = get_connection(self.using)
if not connection.in_atomic_block:
# Reset state when entering an outermost atomic block.
connection.commit_on_exit = True
connection.needs_rollback = False
if not connection.get_autocommit():
# Some database adapters (namely sqlite3) don't handle
# transactions and savepoints properly when autocommit is off.
# Turning autocommit back on isn't an option; it would trigger
# a premature commit. Give up if that happens.
if connection.features.autocommits_when_autocommit_is_off:
raise TransactionManagementError(
"Your database backend doesn't behave properly when "
"autocommit is off. Turn it on before using 'atomic'.")
# When entering an atomic block with autocommit turned off,
# Django should only use savepoints and shouldn't commit.
# This requires at least a savepoint for the outermost block.
if not self.savepoint:
raise TransactionManagementError(
"The outermost 'atomic' block cannot use "
"savepoint = False when autocommit is off.")
# Pretend we're already in an atomic block to bypass the code
# that disables autocommit to enter a transaction, and make a
# note to deal with this case in __exit__.
connection.in_atomic_block = True
connection.commit_on_exit = False
if connection.in_atomic_block:
# We're already in a transaction; create a savepoint, unless we
# were told not to or we're already waiting for a rollback. The
# second condition avoids creating useless savepoints and prevents
# overwriting needs_rollback until the rollback is performed.
if self.savepoint and not connection.needs_rollback:
sid = connection.savepoint()
connection.savepoint_ids.append(sid)
else:
connection.savepoint_ids.append(None)
else:
# We aren't in a transaction yet; create one.
# The usual way to start a transaction is to turn autocommit off.
# However, some database adapters (namely sqlite3) don't handle
# transactions and savepoints properly when autocommit is off.
# In such cases, start an explicit transaction instead, which has
# the side-effect of disabling autocommit.
if connection.features.autocommits_when_autocommit_is_off:
connection._start_transaction_under_autocommit()
connection.autocommit = False
else:
connection.set_autocommit(False)
connection.in_atomic_block = True
def __exit__(self, exc_type, exc_value, traceback):
connection = get_connection(self.using)
if connection.savepoint_ids:
sid = connection.savepoint_ids.pop()
else:
# Prematurely unset this flag to allow using commit or rollback.
connection.in_atomic_block = False
try:
if connection.closed_in_transaction:
# The database will perform a rollback by itself.
# Wait until we exit the outermost block.
pass
elif exc_type is None and not connection.needs_rollback:
if connection.in_atomic_block:
# Release savepoint if there is one
if sid is not None:
try:
connection.savepoint_commit(sid)
except DatabaseError:
try:
connection.savepoint_rollback(sid)
except Error:
# If rolling back to a savepoint fails, mark for
# rollback at a higher level and avoid shadowing
# the original exception.
connection.needs_rollback = True
raise
else:
# Commit transaction
try:
connection.commit()
except DatabaseError:
try:
connection.rollback()
except Error:
# An error during rollback means that something
# went wrong with the connection. Drop it.
connection.close()
raise
else:
# This flag will be set to True again if there isn't a savepoint
# allowing to perform the rollback at this level.
connection.needs_rollback = False
if connection.in_atomic_block:
# Roll back to savepoint if there is one, mark for rollback
# otherwise.
if sid is None:
connection.needs_rollback = True
else:
try:
connection.savepoint_rollback(sid)
except Error:
# If rolling back to a savepoint fails, mark for
# rollback at a higher level and avoid shadowing
# the original exception.
connection.needs_rollback = True
else:
# Roll back transaction
try:
connection.rollback()
except Error:
# An error during rollback means that something
# went wrong with the connection. Drop it.
connection.close()
finally:
# Outermost block exit when autocommit was enabled.
if not connection.in_atomic_block:
if connection.closed_in_transaction:
connection.connection = None
elif connection.features.autocommits_when_autocommit_is_off:
connection.autocommit = True
else:
connection.set_autocommit(True)
# Outermost block exit when autocommit was disabled.
elif not connection.savepoint_ids and not connection.commit_on_exit:
if connection.closed_in_transaction:
connection.connection = None
else:
connection.in_atomic_block = False
def __call__(self, func):
@wraps(func, assigned=available_attrs(func))
def inner(*args, **kwargs):
with self:
return func(*args, **kwargs)
return inner
def atomic(using=None, savepoint=True):
# Bare decorator: @atomic -- although the first argument is called
# `using`, it's actually the function being decorated.
if callable(using):
return Atomic(DEFAULT_DB_ALIAS, savepoint)(using)
# Decorator: @atomic(...) or context manager: with atomic(...): ...
else:
return Atomic(using, savepoint)
def _non_atomic_requests(view, using):
try:
view._non_atomic_requests.add(using)
except AttributeError:
view._non_atomic_requests = set([using])
return view
def non_atomic_requests(using=None):
if callable(using):
return _non_atomic_requests(using, DEFAULT_DB_ALIAS)
else:
if using is None:
using = DEFAULT_DB_ALIAS
return lambda view: _non_atomic_requests(view, using)
############################################
# Deprecated decorators / context managers #
############################################
class Transaction(object):
"""
Acts as either a decorator, or a context manager. If it's a decorator it
takes a function and returns a wrapped function. If it's a contextmanager
it's used with the ``with`` statement. In either event entering/exiting
are called before and after, respectively, the function/block is executed.
autocommit, commit_on_success, and commit_manually contain the
implementations of entering and exiting.
"""
def __init__(self, entering, exiting, using):
self.entering = entering
self.exiting = exiting
self.using = using
def __enter__(self):
self.entering(self.using)
def __exit__(self, exc_type, exc_value, traceback):
self.exiting(exc_type, self.using)
def __call__(self, func):
@wraps(func)
def inner(*args, **kwargs):
with self:
return func(*args, **kwargs)
return inner
def _transaction_func(entering, exiting, using):
"""
Takes 3 things, an entering function (what to do to start this block of
transaction management), an exiting function (what to do to end it, on both
success and failure, and using which can be: None, indiciating using is
DEFAULT_DB_ALIAS, a callable, indicating that using is DEFAULT_DB_ALIAS and
to return the function already wrapped.
Returns either a Transaction objects, which is both a decorator and a
context manager, or a wrapped function, if using is a callable.
"""
# Note that although the first argument is *called* `using`, it
# may actually be a function; @autocommit and @autocommit('foo')
# are both allowed forms.
if using is None:
using = DEFAULT_DB_ALIAS
if callable(using):
return Transaction(entering, exiting, DEFAULT_DB_ALIAS)(using)
return Transaction(entering, exiting, using)
def autocommit(using=None):
"""
Decorator that activates commit on save. This is Django's default behavior;
this decorator is useful if you globally activated transaction management in
your settings file and want the default behavior in some view functions.
"""
warnings.warn("autocommit is deprecated in favor of set_autocommit.",
PendingDeprecationWarning, stacklevel=2)
def entering(using):
enter_transaction_management(managed=False, using=using)
def exiting(exc_type, using):
leave_transaction_management(using=using)
return _transaction_func(entering, exiting, using)
def commit_on_success(using=None):
"""
This decorator activates commit on response. This way, if the view function
runs successfully, a commit is made; if the viewfunc produces an exception,
a rollback is made. This is one of the most common ways to do transaction
control in Web apps.
"""
warnings.warn("commit_on_success is deprecated in favor of atomic.",
PendingDeprecationWarning, stacklevel=2)
def entering(using):
enter_transaction_management(using=using)
def exiting(exc_type, using):
try:
if exc_type is not None:
if is_dirty(using=using):
rollback(using=using)
else:
if is_dirty(using=using):
try:
commit(using=using)
except:
rollback(using=using)
raise
finally:
leave_transaction_management(using=using)
return _transaction_func(entering, exiting, using)
def commit_manually(using=None):
"""
Decorator that activates manual transaction control. It just disables
automatic transaction control and doesn't do any commit/rollback of its
own -- it's up to the user to call the commit and rollback functions
themselves.
"""
warnings.warn("commit_manually is deprecated in favor of set_autocommit.",
PendingDeprecationWarning, stacklevel=2)
def entering(using):
enter_transaction_management(using=using)
def exiting(exc_type, using):
leave_transaction_management(using=using)
return _transaction_func(entering, exiting, using)
def commit_on_success_unless_managed(using=None, savepoint=False):
"""
Transitory API to preserve backwards-compatibility while refactoring.
Once the legacy transaction management is fully deprecated, this should
simply be replaced by atomic. Until then, it's necessary to guarantee that
a commit occurs on exit, which atomic doesn't do when it's nested.
Unlike atomic, savepoint defaults to False because that's closer to the
legacy behavior.
"""
connection = get_connection(using)
if connection.get_autocommit() or connection.in_atomic_block:
return atomic(using, savepoint)
else:
def entering(using):
pass
def exiting(exc_type, using):
set_dirty(using=using)
return _transaction_func(entering, exiting, using)
| mit | -1,195,857,419,622,866,700 | 37.079482 | 80 | 0.625649 | false |
oberlin/django | tests/migrations/test_base.py | 292 | 4620 | import os
import shutil
import tempfile
from contextlib import contextmanager
from importlib import import_module
from django.apps import apps
from django.db import connection
from django.db.migrations.recorder import MigrationRecorder
from django.test import TransactionTestCase
from django.test.utils import extend_sys_path
from django.utils.module_loading import module_dir
class MigrationTestBase(TransactionTestCase):
"""
Contains an extended set of asserts for testing migrations and schema operations.
"""
available_apps = ["migrations"]
def tearDown(self):
# Reset applied-migrations state.
recorder = MigrationRecorder(connection)
recorder.migration_qs.filter(app='migrations').delete()
def get_table_description(self, table):
with connection.cursor() as cursor:
return connection.introspection.get_table_description(cursor, table)
def assertTableExists(self, table):
with connection.cursor() as cursor:
self.assertIn(table, connection.introspection.table_names(cursor))
def assertTableNotExists(self, table):
with connection.cursor() as cursor:
self.assertNotIn(table, connection.introspection.table_names(cursor))
def assertColumnExists(self, table, column):
self.assertIn(column, [c.name for c in self.get_table_description(table)])
def assertColumnNotExists(self, table, column):
self.assertNotIn(column, [c.name for c in self.get_table_description(table)])
def assertColumnNull(self, table, column):
self.assertEqual([c.null_ok for c in self.get_table_description(table) if c.name == column][0], True)
def assertColumnNotNull(self, table, column):
self.assertEqual([c.null_ok for c in self.get_table_description(table) if c.name == column][0], False)
def assertIndexExists(self, table, columns, value=True):
with connection.cursor() as cursor:
self.assertEqual(
value,
any(
c["index"]
for c in connection.introspection.get_constraints(cursor, table).values()
if c['columns'] == list(columns)
),
)
def assertIndexNotExists(self, table, columns):
return self.assertIndexExists(table, columns, False)
def assertFKExists(self, table, columns, to, value=True):
with connection.cursor() as cursor:
self.assertEqual(
value,
any(
c["foreign_key"] == to
for c in connection.introspection.get_constraints(cursor, table).values()
if c['columns'] == list(columns)
),
)
def assertFKNotExists(self, table, columns, to, value=True):
return self.assertFKExists(table, columns, to, False)
@contextmanager
def temporary_migration_module(self, app_label='migrations', module=None):
"""
Allows testing management commands in a temporary migrations module.
Wrap all invocations to makemigrations and squashmigrations with this
context manager in order to avoid creating migration files in your
source tree inadvertently.
Takes the application label that will be passed to makemigrations or
squashmigrations and the Python path to a migrations module.
The migrations module is used as a template for creating the temporary
migrations module. If it isn't provided, the application's migrations
module is used, if it exists.
Returns the filesystem path to the temporary migrations module.
"""
temp_dir = tempfile.mkdtemp()
try:
target_dir = tempfile.mkdtemp(dir=temp_dir)
with open(os.path.join(target_dir, '__init__.py'), 'w'):
pass
target_migrations_dir = os.path.join(target_dir, 'migrations')
if module is None:
module = apps.get_app_config(app_label).name + '.migrations'
try:
source_migrations_dir = module_dir(import_module(module))
except (ImportError, ValueError):
pass
else:
shutil.copytree(source_migrations_dir, target_migrations_dir)
with extend_sys_path(temp_dir):
new_module = os.path.basename(target_dir) + '.migrations'
with self.settings(MIGRATION_MODULES={app_label: new_module}):
yield target_migrations_dir
finally:
shutil.rmtree(temp_dir)
| bsd-3-clause | 1,587,216,098,701,959,400 | 37.5 | 110 | 0.641126 | false |
zerc/django | django/contrib/gis/gdal/raster/const.py | 238 | 1537 | """
GDAL - Constant definitions
"""
from ctypes import (
c_byte, c_double, c_float, c_int16, c_int32, c_uint16, c_uint32,
)
# See http://www.gdal.org/gdal_8h.html#a22e22ce0a55036a96f652765793fb7a4
GDAL_PIXEL_TYPES = {
0: 'GDT_Unknown', # Unknown or unspecified type
1: 'GDT_Byte', # Eight bit unsigned integer
2: 'GDT_UInt16', # Sixteen bit unsigned integer
3: 'GDT_Int16', # Sixteen bit signed integer
4: 'GDT_UInt32', # Thirty-two bit unsigned integer
5: 'GDT_Int32', # Thirty-two bit signed integer
6: 'GDT_Float32', # Thirty-two bit floating point
7: 'GDT_Float64', # Sixty-four bit floating point
8: 'GDT_CInt16', # Complex Int16
9: 'GDT_CInt32', # Complex Int32
10: 'GDT_CFloat32', # Complex Float32
11: 'GDT_CFloat64', # Complex Float64
}
# A list of gdal datatypes that are integers.
GDAL_INTEGER_TYPES = [1, 2, 3, 4, 5]
# Lookup values to convert GDAL pixel type indices into ctypes objects.
# The GDAL band-io works with ctypes arrays to hold data to be written
# or to hold the space for data to be read into. The lookup below helps
# selecting the right ctypes object for a given gdal pixel type.
GDAL_TO_CTYPES = [
None, c_byte, c_uint16, c_int16, c_uint32, c_int32,
c_float, c_double, None, None, None, None
]
# List of resampling algorithms that can be used to warp a GDALRaster.
GDAL_RESAMPLE_ALGORITHMS = {
'NearestNeighbour': 0,
'Bilinear': 1,
'Cubic': 2,
'CubicSpline': 3,
'Lanczos': 4,
'Average': 5,
'Mode': 6,
}
| bsd-3-clause | -349,536,835,688,983,040 | 33.155556 | 72 | 0.666233 | false |
tashigaofei/BlogSpider | scrapy/tests/test_downloadermiddleware_redirect.py | 15 | 9245 | import unittest
from scrapy.contrib.downloadermiddleware.redirect import RedirectMiddleware, MetaRefreshMiddleware
from scrapy.spider import Spider
from scrapy.exceptions import IgnoreRequest
from scrapy.http import Request, Response, HtmlResponse
from scrapy.utils.test import get_crawler
class RedirectMiddlewareTest(unittest.TestCase):
def setUp(self):
crawler = get_crawler()
self.spider = Spider('foo')
self.mw = RedirectMiddleware.from_crawler(crawler)
def test_priority_adjust(self):
req = Request('http://a.com')
rsp = Response('http://a.com', headers={'Location': 'http://a.com/redirected'}, status=301)
req2 = self.mw.process_response(req, rsp, self.spider)
assert req2.priority > req.priority
def test_redirect_301(self):
def _test(method):
url = 'http://www.example.com/301'
url2 = 'http://www.example.com/redirected'
req = Request(url, method=method)
rsp = Response(url, headers={'Location': url2}, status=301)
req2 = self.mw.process_response(req, rsp, self.spider)
assert isinstance(req2, Request)
self.assertEqual(req2.url, url2)
self.assertEqual(req2.method, method)
# response without Location header but with status code is 3XX should be ignored
del rsp.headers['Location']
assert self.mw.process_response(req, rsp, self.spider) is rsp
_test('GET')
_test('POST')
_test('HEAD')
def test_dont_redirect(self):
url = 'http://www.example.com/301'
url2 = 'http://www.example.com/redirected'
req = Request(url, meta={'dont_redirect': True})
rsp = Response(url, headers={'Location': url2}, status=301)
r = self.mw.process_response(req, rsp, self.spider)
assert isinstance(r, Response)
assert r is rsp
def test_redirect_302(self):
url = 'http://www.example.com/302'
url2 = 'http://www.example.com/redirected2'
req = Request(url, method='POST', body='test',
headers={'Content-Type': 'text/plain', 'Content-length': '4'})
rsp = Response(url, headers={'Location': url2}, status=302)
req2 = self.mw.process_response(req, rsp, self.spider)
assert isinstance(req2, Request)
self.assertEqual(req2.url, url2)
self.assertEqual(req2.method, 'GET')
assert 'Content-Type' not in req2.headers, \
"Content-Type header must not be present in redirected request"
assert 'Content-Length' not in req2.headers, \
"Content-Length header must not be present in redirected request"
assert not req2.body, \
"Redirected body must be empty, not '%s'" % req2.body
# response without Location header but with status code is 3XX should be ignored
del rsp.headers['Location']
assert self.mw.process_response(req, rsp, self.spider) is rsp
def test_redirect_302_head(self):
url = 'http://www.example.com/302'
url2 = 'http://www.example.com/redirected2'
req = Request(url, method='HEAD')
rsp = Response(url, headers={'Location': url2}, status=302)
req2 = self.mw.process_response(req, rsp, self.spider)
assert isinstance(req2, Request)
self.assertEqual(req2.url, url2)
self.assertEqual(req2.method, 'HEAD')
# response without Location header but with status code is 3XX should be ignored
del rsp.headers['Location']
assert self.mw.process_response(req, rsp, self.spider) is rsp
def test_max_redirect_times(self):
self.mw.max_redirect_times = 1
req = Request('http://scrapytest.org/302')
rsp = Response('http://scrapytest.org/302', headers={'Location': '/redirected'}, status=302)
req = self.mw.process_response(req, rsp, self.spider)
assert isinstance(req, Request)
assert 'redirect_times' in req.meta
self.assertEqual(req.meta['redirect_times'], 1)
self.assertRaises(IgnoreRequest, self.mw.process_response, req, rsp, self.spider)
def test_ttl(self):
self.mw.max_redirect_times = 100
req = Request('http://scrapytest.org/302', meta={'redirect_ttl': 1})
rsp = Response('http://www.scrapytest.org/302', headers={'Location': '/redirected'}, status=302)
req = self.mw.process_response(req, rsp, self.spider)
assert isinstance(req, Request)
self.assertRaises(IgnoreRequest, self.mw.process_response, req, rsp, self.spider)
def test_redirect_urls(self):
req1 = Request('http://scrapytest.org/first')
rsp1 = Response('http://scrapytest.org/first', headers={'Location': '/redirected'}, status=302)
req2 = self.mw.process_response(req1, rsp1, self.spider)
rsp2 = Response('http://scrapytest.org/redirected', headers={'Location': '/redirected2'}, status=302)
req3 = self.mw.process_response(req2, rsp2, self.spider)
self.assertEqual(req2.url, 'http://scrapytest.org/redirected')
self.assertEqual(req2.meta['redirect_urls'], ['http://scrapytest.org/first'])
self.assertEqual(req3.url, 'http://scrapytest.org/redirected2')
self.assertEqual(req3.meta['redirect_urls'], ['http://scrapytest.org/first', 'http://scrapytest.org/redirected'])
class MetaRefreshMiddlewareTest(unittest.TestCase):
def setUp(self):
crawler = get_crawler()
self.spider = Spider('foo')
self.mw = MetaRefreshMiddleware.from_crawler(crawler)
def _body(self, interval=5, url='http://example.org/newpage'):
return """<html><head><meta http-equiv="refresh" content="{0};url={1}"/></head></html>"""\
.format(interval, url)
def test_priority_adjust(self):
req = Request('http://a.com')
rsp = HtmlResponse(req.url, body=self._body())
req2 = self.mw.process_response(req, rsp, self.spider)
assert req2.priority > req.priority
def test_meta_refresh(self):
req = Request(url='http://example.org')
rsp = HtmlResponse(req.url, body=self._body())
req2 = self.mw.process_response(req, rsp, self.spider)
assert isinstance(req2, Request)
self.assertEqual(req2.url, 'http://example.org/newpage')
def test_meta_refresh_with_high_interval(self):
# meta-refresh with high intervals don't trigger redirects
req = Request(url='http://example.org')
rsp = HtmlResponse(url='http://example.org', body=self._body(interval=1000))
rsp2 = self.mw.process_response(req, rsp, self.spider)
assert rsp is rsp2
def test_meta_refresh_trough_posted_request(self):
req = Request(url='http://example.org', method='POST', body='test',
headers={'Content-Type': 'text/plain', 'Content-length': '4'})
rsp = HtmlResponse(req.url, body=self._body())
req2 = self.mw.process_response(req, rsp, self.spider)
assert isinstance(req2, Request)
self.assertEqual(req2.url, 'http://example.org/newpage')
self.assertEqual(req2.method, 'GET')
assert 'Content-Type' not in req2.headers, \
"Content-Type header must not be present in redirected request"
assert 'Content-Length' not in req2.headers, \
"Content-Length header must not be present in redirected request"
assert not req2.body, \
"Redirected body must be empty, not '%s'" % req2.body
def test_max_redirect_times(self):
self.mw.max_redirect_times = 1
req = Request('http://scrapytest.org/max')
rsp = HtmlResponse(req.url, body=self._body())
req = self.mw.process_response(req, rsp, self.spider)
assert isinstance(req, Request)
assert 'redirect_times' in req.meta
self.assertEqual(req.meta['redirect_times'], 1)
self.assertRaises(IgnoreRequest, self.mw.process_response, req, rsp, self.spider)
def test_ttl(self):
self.mw.max_redirect_times = 100
req = Request('http://scrapytest.org/302', meta={'redirect_ttl': 1})
rsp = HtmlResponse(req.url, body=self._body())
req = self.mw.process_response(req, rsp, self.spider)
assert isinstance(req, Request)
self.assertRaises(IgnoreRequest, self.mw.process_response, req, rsp, self.spider)
def test_redirect_urls(self):
req1 = Request('http://scrapytest.org/first')
rsp1 = HtmlResponse(req1.url, body=self._body(url='/redirected'))
req2 = self.mw.process_response(req1, rsp1, self.spider)
assert isinstance(req2, Request), req2
rsp2 = HtmlResponse(req2.url, body=self._body(url='/redirected2'))
req3 = self.mw.process_response(req2, rsp2, self.spider)
assert isinstance(req3, Request), req3
self.assertEqual(req2.url, 'http://scrapytest.org/redirected')
self.assertEqual(req2.meta['redirect_urls'], ['http://scrapytest.org/first'])
self.assertEqual(req3.url, 'http://scrapytest.org/redirected2')
self.assertEqual(req3.meta['redirect_urls'], ['http://scrapytest.org/first', 'http://scrapytest.org/redirected'])
if __name__ == "__main__":
unittest.main()
| mit | 185,456,331,411,753,120 | 44.318627 | 121 | 0.640671 | false |
writefaruq/lionface-app | django/contrib/gis/tests/test_spatialrefsys.py | 12 | 6799 | from django.db import connection
from django.contrib.gis.tests.utils import mysql, no_mysql, oracle, postgis, spatialite
from django.utils import unittest
test_srs = ({'srid' : 4326,
'auth_name' : ('EPSG', True),
'auth_srid' : 4326,
'srtext' : 'GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],TOWGS84[0,0,0,0,0,0,0],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]]',
'srtext14' : 'GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]]',
'proj4' : '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs ',
'spheroid' : 'WGS 84', 'name' : 'WGS 84',
'geographic' : True, 'projected' : False, 'spatialite' : True,
'ellipsoid' : (6378137.0, 6356752.3, 298.257223563), # From proj's "cs2cs -le" and Wikipedia (semi-minor only)
'eprec' : (1, 1, 9),
},
{'srid' : 32140,
'auth_name' : ('EPSG', False),
'auth_srid' : 32140,
'srtext' : 'PROJCS["NAD83 / Texas South Central",GEOGCS["NAD83",DATUM["North_American_Datum_1983",SPHEROID["GRS 1980",6378137,298.257222101,AUTHORITY["EPSG","7019"]],AUTHORITY["EPSG","6269"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4269"]],PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",30.28333333333333],PARAMETER["standard_parallel_2",28.38333333333333],PARAMETER["latitude_of_origin",27.83333333333333],PARAMETER["central_meridian",-99],PARAMETER["false_easting",600000],PARAMETER["false_northing",4000000],UNIT["metre",1,AUTHORITY["EPSG","9001"]],AUTHORITY["EPSG","32140"]]',
'srtext14': 'PROJCS["NAD83 / Texas South Central",GEOGCS["NAD83",DATUM["North_American_Datum_1983",SPHEROID["GRS 1980",6378137,298.257222101,AUTHORITY["EPSG","7019"]],AUTHORITY["EPSG","6269"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4269"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",30.28333333333333],PARAMETER["standard_parallel_2",28.38333333333333],PARAMETER["latitude_of_origin",27.83333333333333],PARAMETER["central_meridian",-99],PARAMETER["false_easting",600000],PARAMETER["false_northing",4000000],AUTHORITY["EPSG","32140"],AXIS["X",EAST],AXIS["Y",NORTH]]',
'proj4' : '+proj=lcc +lat_1=30.28333333333333 +lat_2=28.38333333333333 +lat_0=27.83333333333333 +lon_0=-99 +x_0=600000 +y_0=4000000 +ellps=GRS80 +datum=NAD83 +units=m +no_defs ',
'spheroid' : 'GRS 1980', 'name' : 'NAD83 / Texas South Central',
'geographic' : False, 'projected' : True, 'spatialite' : False,
'ellipsoid' : (6378137.0, 6356752.31414, 298.257222101), # From proj's "cs2cs -le" and Wikipedia (semi-minor only)
'eprec' : (1, 5, 10),
},
)
if oracle:
from django.contrib.gis.db.backends.oracle.models import SpatialRefSys
elif postgis:
from django.contrib.gis.db.backends.postgis.models import SpatialRefSys
elif spatialite:
from django.contrib.gis.db.backends.spatialite.models import SpatialRefSys
class SpatialRefSysTest(unittest.TestCase):
@no_mysql
def test01_retrieve(self):
"Testing retrieval of SpatialRefSys model objects."
for sd in test_srs:
srs = SpatialRefSys.objects.get(srid=sd['srid'])
self.assertEqual(sd['srid'], srs.srid)
# Some of the authority names are borked on Oracle, e.g., SRID=32140.
# also, Oracle Spatial seems to add extraneous info to fields, hence the
# the testing with the 'startswith' flag.
auth_name, oracle_flag = sd['auth_name']
if postgis or (oracle and oracle_flag):
self.assertEqual(True, srs.auth_name.startswith(auth_name))
self.assertEqual(sd['auth_srid'], srs.auth_srid)
# No proj.4 and different srtext on oracle backends :(
if postgis:
if connection.ops.spatial_version >= (1, 4, 0):
srtext = sd['srtext14']
else:
srtext = sd['srtext']
self.assertEqual(srtext, srs.wkt)
self.assertEqual(sd['proj4'], srs.proj4text)
@no_mysql
def test02_osr(self):
"Testing getting OSR objects from SpatialRefSys model objects."
for sd in test_srs:
sr = SpatialRefSys.objects.get(srid=sd['srid'])
self.assertEqual(True, sr.spheroid.startswith(sd['spheroid']))
self.assertEqual(sd['geographic'], sr.geographic)
self.assertEqual(sd['projected'], sr.projected)
if not (spatialite and not sd['spatialite']):
# Can't get 'NAD83 / Texas South Central' from PROJ.4 string
# on SpatiaLite
self.assertEqual(True, sr.name.startswith(sd['name']))
# Testing the SpatialReference object directly.
if postgis or spatialite:
srs = sr.srs
self.assertEqual(sd['proj4'], srs.proj4)
# No `srtext` field in the `spatial_ref_sys` table in SpatiaLite
if not spatialite:
if connection.ops.spatial_version >= (1, 4, 0):
srtext = sd['srtext14']
else:
srtext = sd['srtext']
self.assertEqual(srtext, srs.wkt)
@no_mysql
def test03_ellipsoid(self):
"Testing the ellipsoid property."
for sd in test_srs:
# Getting the ellipsoid and precision parameters.
ellps1 = sd['ellipsoid']
prec = sd['eprec']
# Getting our spatial reference and its ellipsoid
srs = SpatialRefSys.objects.get(srid=sd['srid'])
ellps2 = srs.ellipsoid
for i in range(3):
param1 = ellps1[i]
param2 = ellps2[i]
self.assertAlmostEqual(ellps1[i], ellps2[i], prec[i])
def suite():
s = unittest.TestSuite()
s.addTest(unittest.makeSuite(SpatialRefSysTest))
return s
def run(verbosity=2):
unittest.TextTestRunner(verbosity=verbosity).run(suite())
| bsd-3-clause | -7,731,432,972,394,027,000 | 58.168142 | 737 | 0.605236 | false |
valorekhov/AvrBee | src/AvrBee/HexFileFormat.py | 1 | 2846 | from abc import ABCMeta, abstractmethod
import struct
import binascii, os, sys
class HexFileFormat(object):
"""Parses out Hex File format into a byte stream"""
def __init__(self, path=None, file=None):
self.path = path
self.file = file
pass
def get_bytes(self):
with open(self.path) as f:
self.lines = f.readlines()
ret = list()
for line in self.lines:
if not line.startswith(':'):
raise Exception("Unsupported format")
line = line[1:].replace('\n','')
bytes = bytearray.fromhex(line)
pageSize = bytes[0]
memAddrHigh = bytes[1] #Big endian address as per http://en.wikipedia.org/wiki/Intel_HEX
memAddrLow = bytes[2]
memAddr = (memAddrHigh << 8) + memAddrLow
eolFlag = bytes[3]
if eolFlag == 1:
break
checksum = bytes[-1]
payload = bytes[4:4+pageSize]
payloadsum = (pageSize + memAddrLow + memAddrHigh + eolFlag + sum(x for x in payload) + checksum ) & 0xFF
if payloadsum != 0:
raise Exception("Checksum mismatch")
ret.append((memAddr,payload))
return ret
def save_bytes(self, data, startAddress = 0):
f = open(self.path, 'w') if self.file == None else self.file
pageSize = 16
length = len(data)
address = startAddress
pages = int(length / 16)
if pages > int(pages):
pages = int(pages) + 1
for i in range(0, pages):
slice = data[i*pageSize: i*pageSize + pageSize]
length = len(slice)
eol = 0 if i+1 < pages else 1
addrLow = address & 0xFF
addrHigh = (address >> 8) & 0xFF
checksum = (length + addrLow + addrHigh + eol + sum(x for x in slice) )& 0xff
checksum = 0xff - checksum
bytes = bytearray()
bytes.append(length)
bytes.extend(struct.pack(">H", address))
bytes.append(eol)
bytes.extend(slice)
bytes.append(checksum)
#struct.pack( "BiB" + str(length)+"cB" , length, address & 0xFFFF, eol, str(slice), checksum)
f.write(':')
f.write(str(binascii.b2a_hex( bytes )).upper())
f.write(os.linesep)
address += length
if f != sys.stdout:
f.close()
class FileFormat(metaclass=ABCMeta):
"""Abstract class representing parsers"""
@abstractmethod
def get_bytes(self):
pass
@abstractmethod
def save_bytes(self, data):
pass
FileFormat.register(HexFileFormat)
| unlicense | -8,157,248,079,459,672,000 | 32.093023 | 117 | 0.515109 | false |
NaturalSolutions/NsPortal | Back/ns_portal/resources/root/security/oauth2/v1/login/login_resource.py | 1 | 2467 | from ns_portal.core.resources import (
MetaEndPointResource
)
from marshmallow import (
Schema,
fields,
EXCLUDE,
ValidationError
)
from ns_portal.database.main_db import (
TUsers
)
from sqlalchemy import (
and_
)
from sqlalchemy.orm.exc import (
MultipleResultsFound
)
from pyramid.security import (
Allow,
Everyone,
remember
)
from ns_portal.utils import (
getCookieToken
)
from pyramid.response import (
Response
)
class loginSchema(Schema):
username = fields.String(required=True)
password = fields.String(required=True)
class Meta:
unknown = EXCLUDE
class LoginResource(MetaEndPointResource):
__acl__ = [
(Allow, Everyone, 'create')
]
def validateUserCredentials(self, data):
query = self.request.dbsession.query(TUsers)
query = query.filter(
and_(
TUsers.TUse_Login == data.get('username'),
TUsers.TUse_Password == data.get('password')
)
)
try:
res = query.one_or_none()
except MultipleResultsFound:
raise ValidationError({
"error": (
'your username and password are'
' not unique in db'
' please report it to an admin'
)
})
if res:
# this key is added after validation
return res
else:
raise ValidationError({
"error": (
'your username and/or password'
' are wrongs'
)
})
def POST(self):
reqParams = self.__parser__(
args=loginSchema(),
location='form'
)
userFound = self.validateUserCredentials(data=reqParams)
if userFound:
token = getCookieToken(
idUser=getattr(userFound, 'TUse_PK_ID'),
request=self.request
)
resp = Response(
status=200
)
remember(
resp,
token
)
self.request.response = resp
return self.request.response
else:
raise ValidationError({
"error": (
'your username and/or password'
' are wrongs'
)
})
| mit | -6,602,315,053,987,175,000 | 22.721154 | 64 | 0.495338 | false |
AustereCuriosity/astropy | astropy/wcs/tests/extension/test_extension.py | 1 | 2869 | # Licensed under a 3-clause BSD style license - see LICENSE.rst
from __future__ import absolute_import, division, print_function, unicode_literals
import os
import subprocess
import sys
import pytest
def test_wcsapi_extension(tmpdir):
# Test that we can build a simple C extension with the astropy.wcs C API
setup_path = os.path.dirname(__file__)
astropy_path = os.path.abspath(
os.path.join(setup_path, '..', '..', '..', '..'))
env = os.environ.copy()
paths = [str(tmpdir), astropy_path]
if env.get('PYTHONPATH'):
paths.append(env.get('PYTHONPATH'))
env[str('PYTHONPATH')] = str(os.pathsep.join(paths))
# Build the extension
# This used to use subprocess.check_call, but on Python 3.4 there was
# a mysterious Heisenbug causing this to fail with a non-zero exit code
# *unless* the output is redirected. This bug also did not occur in an
# interactive session, so it likely had something to do with pytest's
# output capture
p = subprocess.Popen([sys.executable, 'setup.py', 'install',
'--install-lib={0}'.format(tmpdir),
astropy_path], cwd=setup_path, env=env,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
# Whether the process fails or not this isn't likely to produce a great
# deal of output so communicate should be fine in almost all cases
stdout, stderr = p.communicate()
try:
stdout, stderr = stdout.decode('utf8'), stderr.decode('utf8')
except UnicodeDecodeError:
# Don't try to guess about encoding; just display the text
stdout, stderr = stdout.decode('latin1'), stderr.decode('latin1')
# If compilation fails, we can skip this test, since the
# dependencies necessary to compile an extension may be missing.
# If it passes, however, we want to continue and ensure that the
# extension created is actually usable. However, if we're on
# Travis-CI, or another generic continuous integration setup, we
# don't want to ever skip, because having it fail in that
# environment probably indicates something more serious that we
# want to know about.
if (not (str('CI') in os.environ or
str('TRAVIS') in os.environ or
str('CONTINUOUS_INTEGRATION') in os.environ) and
p.returncode):
pytest.skip("system unable to compile extensions")
return
assert p.returncode == 0, (
"setup.py exited with non-zero return code {0}\n"
"stdout:\n\n{1}\n\nstderr:\n\n{2}\n".format(
p.returncode, stdout, stderr))
code = """
import sys
import wcsapi_test
sys.exit(wcsapi_test.test())
"""
code = code.strip().replace('\n', '; ')
# Import and run the extension
subprocess.check_call([sys.executable, '-c', code], env=env)
| bsd-3-clause | 1,447,445,754,207,952,400 | 37.253333 | 82 | 0.646567 | false |
mlcommons/training | object_detection/pytorch/maskrcnn_benchmark/config/paths_catalog.py | 1 | 8463 | # Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
"""Centralized catalog of paths."""
import os
class DatasetCatalog(object):
DATA_DIR = "datasets"
DATASETS = {
"coco_2017_train": {
"img_dir": "coco/train2017",
"ann_file": "coco/annotations/instances_train2017.json"
},
"coco_2017_val": {
"img_dir": "coco/val2017",
"ann_file": "coco/annotations/instances_val2017.json"
},
"coco_2014_train": {
"img_dir": "coco/train2014",
"ann_file": "coco/annotations/instances_train2014.json"
},
"coco_2014_val": {
"img_dir": "coco/val2014",
"ann_file": "coco/annotations/instances_val2014.json"
},
"coco_2014_minival": {
"img_dir": "coco/val2014",
"ann_file": "coco/annotations/instances_minival2014.json"
},
"coco_2014_valminusminival": {
"img_dir": "coco/val2014",
"ann_file": "coco/annotations/instances_valminusminival2014.json"
},
"keypoints_coco_2014_train": {
"img_dir": "coco/train2014",
"ann_file": "annotations/person_keypoints_train2014.json",
},
"keypoints_coco_2014_val": {
"img_dir": "coco/val2014",
"ann_file": "coco/annotations/instances_val2014.json"
},
"keypoints_coco_2014_minival": {
"img_dir": "coco/val2014",
"ann_file": "annotations/person_keypoints_minival2014.json",
},
"keypoints_coco_2014_valminusminival": {
"img_dir": "coco/val2014",
"ann_file": "annotations/person_keypoints_valminusminival2014.json",
},
"voc_2007_train": {
"data_dir": "voc/VOC2007",
"split": "train"
},
"voc_2007_train_cocostyle": {
"img_dir": "voc/VOC2007/JPEGImages",
"ann_file": "voc/VOC2007/Annotations/pascal_train2007.json"
},
"voc_2007_val": {
"data_dir": "voc/VOC2007",
"split": "val"
},
"voc_2007_val_cocostyle": {
"img_dir": "voc/VOC2007/JPEGImages",
"ann_file": "voc/VOC2007/Annotations/pascal_val2007.json"
},
"voc_2007_test": {
"data_dir": "voc/VOC2007",
"split": "test"
},
"voc_2007_test_cocostyle": {
"img_dir": "voc/VOC2007/JPEGImages",
"ann_file": "voc/VOC2007/Annotations/pascal_test2007.json"
},
"voc_2012_train": {
"data_dir": "voc/VOC2012",
"split": "train"
},
"voc_2012_train_cocostyle": {
"img_dir": "voc/VOC2012/JPEGImages",
"ann_file": "voc/VOC2012/Annotations/pascal_train2012.json"
},
"voc_2012_val": {
"data_dir": "voc/VOC2012",
"split": "val"
},
"voc_2012_val_cocostyle": {
"img_dir": "voc/VOC2012/JPEGImages",
"ann_file": "voc/VOC2012/Annotations/pascal_val2012.json"
},
"voc_2012_test": {
"data_dir": "voc/VOC2012",
"split": "test"
# PASCAL VOC2012 doesn't made the test annotations available, so there's no json annotation
},
"cityscapes_fine_instanceonly_seg_train_cocostyle": {
"img_dir": "cityscapes/images",
"ann_file": "cityscapes/annotations/instancesonly_filtered_gtFine_train.json"
},
"cityscapes_fine_instanceonly_seg_val_cocostyle": {
"img_dir": "cityscapes/images",
"ann_file": "cityscapes/annotations/instancesonly_filtered_gtFine_val.json"
},
"cityscapes_fine_instanceonly_seg_test_cocostyle": {
"img_dir": "cityscapes/images",
"ann_file": "cityscapes/annotations/instancesonly_filtered_gtFine_test.json"
}
}
@staticmethod
def get(name):
if "coco" in name:
data_dir = DatasetCatalog.DATA_DIR
attrs = DatasetCatalog.DATASETS[name]
args = dict(
root=os.path.join(data_dir, attrs["img_dir"]),
ann_file=os.path.join(data_dir, attrs["ann_file"]),
)
return dict(
factory="COCODataset",
args=args,
)
elif "voc" in name:
data_dir = DatasetCatalog.DATA_DIR
attrs = DatasetCatalog.DATASETS[name]
args = dict(
data_dir=os.path.join(data_dir, attrs["data_dir"]),
split=attrs["split"],
)
return dict(
factory="PascalVOCDataset",
args=args,
)
raise RuntimeError("Dataset not available: {}".format(name))
class ModelCatalog(object):
S3_C2_DETECTRON_URL = "https://dl.fbaipublicfiles.com/detectron"
C2_IMAGENET_MODELS = {
"MSRA/R-50": "ImageNetPretrained/MSRA/R-50.pkl",
"MSRA/R-50-GN": "ImageNetPretrained/47261647/R-50-GN.pkl",
"MSRA/R-101": "ImageNetPretrained/MSRA/R-101.pkl",
"MSRA/R-101-GN": "ImageNetPretrained/47592356/R-101-GN.pkl",
"FAIR/20171220/X-101-32x8d": "ImageNetPretrained/20171220/X-101-32x8d.pkl",
}
C2_DETECTRON_SUFFIX = "output/train/{}coco_2014_train%3A{}coco_2014_valminusminival/generalized_rcnn/model_final.pkl"
C2_DETECTRON_MODELS = {
"35857197/e2e_faster_rcnn_R-50-C4_1x": "01_33_49.iAX0mXvW",
"35857345/e2e_faster_rcnn_R-50-FPN_1x": "01_36_30.cUF7QR7I",
"35857890/e2e_faster_rcnn_R-101-FPN_1x": "01_38_50.sNxI7sX7",
"36761737/e2e_faster_rcnn_X-101-32x8d-FPN_1x": "06_31_39.5MIHi1fZ",
"35858791/e2e_mask_rcnn_R-50-C4_1x": "01_45_57.ZgkA7hPB",
"35858933/e2e_mask_rcnn_R-50-FPN_1x": "01_48_14.DzEQe4wC",
"35861795/e2e_mask_rcnn_R-101-FPN_1x": "02_31_37.KqyEK4tT",
"36761843/e2e_mask_rcnn_X-101-32x8d-FPN_1x": "06_35_59.RZotkLKI",
"37129812/e2e_mask_rcnn_X-152-32x8d-FPN-IN5k_1.44x": "09_35_36.8pzTQKYK",
# keypoints
"37697547/e2e_keypoint_rcnn_R-50-FPN_1x": "08_42_54.kdzV35ao"
}
@staticmethod
def get(name):
if name.startswith("Caffe2Detectron/COCO"):
return ModelCatalog.get_c2_detectron_12_2017_baselines(name)
if name.startswith("ImageNetPretrained"):
return ModelCatalog.get_c2_imagenet_pretrained(name)
raise RuntimeError("model not present in the catalog {}".format(name))
@staticmethod
def get_c2_imagenet_pretrained(name):
prefix = ModelCatalog.S3_C2_DETECTRON_URL
name = name[len("ImageNetPretrained/"):]
name = ModelCatalog.C2_IMAGENET_MODELS[name]
url = "/".join([prefix, name])
return url
@staticmethod
def get_c2_detectron_12_2017_baselines(name):
# Detectron C2 models are stored following the structure
# prefix/<model_id>/2012_2017_baselines/<model_name>.yaml.<signature>/suffix
# we use as identifiers in the catalog Caffe2Detectron/COCO/<model_id>/<model_name>
prefix = ModelCatalog.S3_C2_DETECTRON_URL
dataset_tag = "keypoints_" if "keypoint" in name else ""
suffix = ModelCatalog.C2_DETECTRON_SUFFIX.format(dataset_tag, dataset_tag)
# remove identification prefix
name = name[len("Caffe2Detectron/COCO/"):]
# split in <model_id> and <model_name>
model_id, model_name = name.split("/")
# parsing to make it match the url address from the Caffe2 models
model_name = "{}.yaml".format(model_name)
signature = ModelCatalog.C2_DETECTRON_MODELS[name]
unique_name = ".".join([model_name, signature])
url = "/".join([prefix, model_id, "12_2017_baselines", unique_name, suffix])
return url
| apache-2.0 | -6,520,493,047,081,515,000 | 39.884058 | 121 | 0.583481 | false |
schreiberx/sweet | benchmarks_plane/nonlinear_interaction/pp_plot_errors_single.py | 2 | 2935 | #! /usr/bin/env python3
import sys
import matplotlib
matplotlib.use('Agg')
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.colors as colors
from matplotlib.lines import Line2D
from mule.postprocessing.JobsData import *
from mule.postprocessing.JobsDataConsolidate import *
if len(sys.argv) > 1:
muletag = sys.argv[1]
output_filename = sys.argv[2]
else:
print("")
print("Usage:")
print("")
print(" "+sys.argv[0]+" [jobdata mule tag for y axis] [output_filename.pdf] [jobdir1] [jobdir2] ... [jobdirN]")
print("")
sys.exit(1)
if len(sys.argv) > 3:
# Load Jobs specified via program parameters
jd = JobsData(job_dirs=sys.argv[3:])
else:
# Load all Jobs
jd = JobsData()
# Consolidate data...
jdc = JobsDataConsolidate(jd)
# ... which belongs to the same time integration method
jdc_groups = jdc.create_groups(['runtime.timestepping_method'])
#
# Filter to exclude data which indicates instabilities
#
def data_filter(x, y, jd):
if y == None:
return True
if 'runtime.max_simulation_time' in jd:
if jd['runtime.max_simulation_time'] <= 24*60*60:
if y > 100:
return True
elif jd['runtime.max_simulation_time'] <= 10*24*60*60:
if y > 1000:
return True
return False
# Exctract data suitable for plotting
jdc_groups_data = JobsData_GroupsPlottingScattered(
jdc_groups,
'runtime.timestep_size',
muletag,
data_filter=data_filter
)
data = jdc_groups_data.get_data()
def label(d):
val = d['runtime.timestepping_method'].replace('_', '\\_')+', $\Delta t = '+str(d['runtime.timestep_size'])+'$'
return val
##########################################################
# Plotting starts here
##########################################################
print("*"*80)
print("*"*80)
print("*"*80)
fontsize=18
figsize=(10, 10)
fig, ax = plt.subplots(figsize=(10,10))
#plt.rc('text', usetex=True)
ax.set_xscale("log", nonposx='clip')
ax.set_yscale("log", nonposy='clip')
colors = ['b', 'g', 'r', 'c', 'm', 'y', 'k']
markers = []
for m in Line2D.markers:
try:
if len(m) == 1 and m != ' ' and m != '':
markers.append(m)
except TypeError:
pass
linestyles = ['-', '--', ':', '-.']
c = 0
title = ''
for key, d in data.items():
x = d['x_values']
y = d['y_values']
l = key.replace('_', '\\_')
print(" + "+l)
print(x)
print(y)
ax.plot(x, y, marker=markers[c % len(markers)], linestyle=linestyles[c % len(linestyles)], label=l)
c = c + 1
if title != '':
plt.title(title, fontsize=fontsize)
plt.xlabel("Timestep size $\Delta t$ (sec)", fontsize=fontsize)
#
# Name of data
#
dataname = "TODO"
if 'prog_h' in muletag:
dataname = "surface height $h$"
#
# Norm
#
if 'linf' in muletag:
norm = "$L_\infty$"
else:
norm = "$L_{TODO}$"
plt.ylabel(norm+" error on "+dataname, fontsize=fontsize)
plt.legend(fontsize=15)
plt.savefig(output_filename, transparent=True, bbox_inches='tight', pad_inches=0)
plt.close()
| mit | 1,448,827,930,040,319,200 | 17.814103 | 112 | 0.61908 | false |
SatoshiNXSimudrone/sl4a-damon-clone | python/src/Tools/pybench/With.py | 43 | 4137 | from __future__ import with_statement
from pybench import Test
class WithFinally(Test):
version = 2.0
operations = 20
rounds = 80000
class ContextManager(object):
def __enter__(self):
pass
def __exit__(self, exc, val, tb):
pass
def test(self):
cm = self.ContextManager()
for i in xrange(self.rounds):
with cm: pass
with cm: pass
with cm: pass
with cm: pass
with cm: pass
with cm: pass
with cm: pass
with cm: pass
with cm: pass
with cm: pass
with cm: pass
with cm: pass
with cm: pass
with cm: pass
with cm: pass
with cm: pass
with cm: pass
with cm: pass
with cm: pass
with cm: pass
def calibrate(self):
cm = self.ContextManager()
for i in xrange(self.rounds):
pass
class TryFinally(Test):
version = 2.0
operations = 20
rounds = 80000
class ContextManager(object):
def __enter__(self):
pass
def __exit__(self):
# "Context manager" objects used just for their cleanup
# actions in finally blocks usually don't have parameters.
pass
def test(self):
cm = self.ContextManager()
for i in xrange(self.rounds):
cm.__enter__()
try: pass
finally: cm.__exit__()
cm.__enter__()
try: pass
finally: cm.__exit__()
cm.__enter__()
try: pass
finally: cm.__exit__()
cm.__enter__()
try: pass
finally: cm.__exit__()
cm.__enter__()
try: pass
finally: cm.__exit__()
cm.__enter__()
try: pass
finally: cm.__exit__()
cm.__enter__()
try: pass
finally: cm.__exit__()
cm.__enter__()
try: pass
finally: cm.__exit__()
cm.__enter__()
try: pass
finally: cm.__exit__()
cm.__enter__()
try: pass
finally: cm.__exit__()
cm.__enter__()
try: pass
finally: cm.__exit__()
cm.__enter__()
try: pass
finally: cm.__exit__()
cm.__enter__()
try: pass
finally: cm.__exit__()
cm.__enter__()
try: pass
finally: cm.__exit__()
cm.__enter__()
try: pass
finally: cm.__exit__()
cm.__enter__()
try: pass
finally: cm.__exit__()
cm.__enter__()
try: pass
finally: cm.__exit__()
cm.__enter__()
try: pass
finally: cm.__exit__()
cm.__enter__()
try: pass
finally: cm.__exit__()
cm.__enter__()
try: pass
finally: cm.__exit__()
def calibrate(self):
cm = self.ContextManager()
for i in xrange(self.rounds):
pass
class WithRaiseExcept(Test):
version = 2.0
operations = 2 + 3 + 3
rounds = 100000
class BlockExceptions(object):
def __enter__(self):
pass
def __exit__(self, exc, val, tb):
return True
def test(self):
error = ValueError
be = self.BlockExceptions()
for i in xrange(self.rounds):
with be: raise error
with be: raise error
with be: raise error,"something"
with be: raise error,"something"
with be: raise error,"something"
with be: raise error("something")
with be: raise error("something")
with be: raise error("something")
def calibrate(self):
error = ValueError
be = self.BlockExceptions()
for i in xrange(self.rounds):
pass
| apache-2.0 | 2,657,609,480,484,924,000 | 20.773684 | 70 | 0.433164 | false |
francisco-dlp/hyperspy | hyperspy/samfire_utils/weights/red_chisq.py | 6 | 1212 | # -*- coding: utf-8 -*-
# Copyright 2007-2011 The HyperSpy developers
#
# This file is part of HyperSpy.
#
# HyperSpy is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# HyperSpy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with HyperSpy. If not, see <http://www.gnu.org/licenses/>.
import numpy as np
class ReducedChiSquaredWeight(object):
def __init__(self):
self.expected = 1.0
self.model = None
def function(self, ind):
return np.abs(self.model.red_chisq.data[ind] - self.expected)
def map(self, mask, slices=slice(None, None)):
thing = self.model.red_chisq.data[slices].copy()
thing = thing.astype('float64')
thing[np.logical_not(mask)] = np.nan
return np.abs(thing - self.expected)
| gpl-3.0 | 8,327,508,954,012,630,000 | 33.628571 | 70 | 0.698845 | false |
johnnykv/heralding | heralding/tests/test_pop3.py | 1 | 2904 | # Copyright (C) 2017 Johnny Vestergaard <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import asyncio
import unittest
from heralding.capabilities.pop3 import Pop3
from heralding.misc.common import cancel_all_pending_tasks
from heralding.reporting.reporting_relay import ReportingRelay
class Pop3Tests(unittest.TestCase):
def setUp(self):
self.loop = asyncio.new_event_loop()
asyncio.set_event_loop(None)
self.reporting_relay = ReportingRelay()
self.reporting_relay_task = self.loop.run_in_executor(
None, self.reporting_relay.start)
def tearDown(self):
self.reporting_relay.stop()
# We give reporting_relay a chance to be finished
self.loop.run_until_complete(self.reporting_relay_task)
self.server.close()
self.loop.run_until_complete(self.server.wait_closed())
self.loop.run_until_complete(cancel_all_pending_tasks(self.loop))
self.loop.close()
def test_login(self):
"""Testing different login combinations"""
async def pop3_login():
login_sequences = [
# invalid login, invalid password
(('USER wakkwakk', b'+OK User accepted'),
('PASS wakkwakk', b'-ERR Authentication failed.')),
# PASS without user
(
('PASS bond', b'-ERR No username given.'),),
# Try to run a TRANSACITON state command in AUTHORIZATION state
(
('RETR', b'-ERR Unknown command'),),
]
for sequence in login_sequences:
reader, writer = await asyncio.open_connection(
'127.0.0.1', 8888, loop=self.loop)
# skip banner
await reader.readline()
for pair in sequence:
writer.write(bytes(pair[0] + "\r\n", 'utf-8'))
response = await reader.readline()
self.assertEqual(response.rstrip(), pair[1])
options = {
'port': 110,
'protocol_specific_data': {
'banner': '+OK POP3 server ready',
'max_attempts': 3
},
'users': {
'james': 'bond'
}
}
sut = Pop3(options, self.loop)
server_coro = asyncio.start_server(
sut.handle_session, '0.0.0.0', 8888, loop=self.loop)
self.server = self.loop.run_until_complete(server_coro)
self.loop.run_until_complete(pop3_login())
| gpl-3.0 | 2,187,093,946,063,458,000 | 32 | 73 | 0.653926 | false |
unicri/edx-platform | lms/djangoapps/ccx/tests/test_utils.py | 6 | 22042 | """
test utils
"""
from nose.plugins.attrib import attr
from ccx.models import ( # pylint: disable=import-error
CcxMembership,
CcxFutureMembership,
)
from ccx.tests.factories import ( # pylint: disable=import-error
CcxFactory,
CcxMembershipFactory,
CcxFutureMembershipFactory,
)
from student.roles import CourseCcxCoachRole # pylint: disable=import-error
from student.tests.factories import ( # pylint: disable=import-error
AdminFactory,
UserFactory,
CourseEnrollmentFactory,
AnonymousUserFactory,
)
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory
@attr('shard_1')
class TestEmailEnrollmentState(ModuleStoreTestCase):
"""unit tests for the EmailEnrollmentState class
"""
def setUp(self):
"""
Set up tests
"""
super(TestEmailEnrollmentState, self).setUp()
# remove user provided by the parent test case so we can make our own
# when needed.
self.user = None
course = CourseFactory.create()
coach = AdminFactory.create()
role = CourseCcxCoachRole(course.id)
role.add_users(coach)
self.ccx = CcxFactory(course_id=course.id, coach=coach)
def create_user(self):
"""provide a legitimate django user for testing
"""
if getattr(self, 'user', None) is None:
self.user = UserFactory()
def register_user_in_ccx(self):
"""create registration of self.user in self.ccx
registration will be inactive
"""
self.create_user()
CcxMembershipFactory(ccx=self.ccx, student=self.user)
def create_one(self, email=None):
"""Create a single EmailEnrollmentState object and return it
"""
from ccx.utils import EmailEnrollmentState # pylint: disable=import-error
if email is None:
email = self.user.email
return EmailEnrollmentState(self.ccx, email)
def test_enrollment_state_for_non_user(self):
"""verify behavior for non-user email address
"""
ee_state = self.create_one(email='[email protected]')
for attr in ['user', 'member', 'full_name', 'in_ccx']:
value = getattr(ee_state, attr, 'missing attribute')
self.assertFalse(value, "{}: {}".format(value, attr))
def test_enrollment_state_for_non_member_user(self):
"""verify behavior for email address of user who is not a ccx memeber
"""
self.create_user()
ee_state = self.create_one()
self.assertTrue(ee_state.user)
self.assertFalse(ee_state.in_ccx)
self.assertEqual(ee_state.member, self.user)
self.assertEqual(ee_state.full_name, self.user.profile.name)
def test_enrollment_state_for_member_user(self):
"""verify behavior for email address of user who is a ccx member
"""
self.create_user()
self.register_user_in_ccx()
ee_state = self.create_one()
for attr in ['user', 'in_ccx']:
self.assertTrue(
getattr(ee_state, attr, False),
"attribute {} is missing or False".format(attr)
)
self.assertEqual(ee_state.member, self.user)
self.assertEqual(ee_state.full_name, self.user.profile.name)
def test_enrollment_state_to_dict(self):
"""verify dict representation of EmailEnrollmentState
"""
self.create_user()
self.register_user_in_ccx()
ee_state = self.create_one()
ee_dict = ee_state.to_dict()
expected = {
'user': True,
'member': self.user,
'in_ccx': True,
}
for expected_key, expected_value in expected.iteritems():
self.assertTrue(expected_key in ee_dict)
self.assertEqual(expected_value, ee_dict[expected_key])
def test_enrollment_state_repr(self):
self.create_user()
self.register_user_in_ccx()
ee_state = self.create_one()
representation = repr(ee_state)
self.assertTrue('user=True' in representation)
self.assertTrue('in_ccx=True' in representation)
member = 'member={}'.format(self.user)
self.assertTrue(member in representation)
@attr('shard_1')
# TODO: deal with changes in behavior for auto_enroll
class TestGetEmailParams(ModuleStoreTestCase):
"""tests for ccx.utils.get_email_params
"""
def setUp(self):
"""
Set up tests
"""
super(TestGetEmailParams, self).setUp()
course = CourseFactory.create()
coach = AdminFactory.create()
role = CourseCcxCoachRole(course.id)
role.add_users(coach)
self.ccx = CcxFactory(course_id=course.id, coach=coach)
self.all_keys = [
'site_name', 'course', 'course_url', 'registration_url',
'course_about_url', 'auto_enroll'
]
self.url_keys = [k for k in self.all_keys if 'url' in k]
self.course_keys = [k for k in self.url_keys if 'course' in k]
def call_fut(self, auto_enroll=False, secure=False):
"""
call function under test
"""
from ccx.utils import get_email_params # pylint: disable=import-error
return get_email_params(self.ccx, auto_enroll, secure)
def test_params_have_expected_keys(self):
params = self.call_fut()
self.assertFalse(set(params.keys()) - set(self.all_keys))
def test_ccx_id_in_params(self):
expected_course_id = self.ccx.course_id.to_deprecated_string()
params = self.call_fut()
self.assertEqual(params['course'], self.ccx)
for url_key in self.url_keys:
self.assertTrue('http://' in params[url_key])
for url_key in self.course_keys:
self.assertTrue(expected_course_id in params[url_key])
def test_security_respected(self):
secure = self.call_fut(secure=True)
for url_key in self.url_keys:
self.assertTrue('https://' in secure[url_key])
insecure = self.call_fut(secure=False)
for url_key in self.url_keys:
self.assertTrue('http://' in insecure[url_key])
def test_auto_enroll_passed_correctly(self):
not_auto = self.call_fut(auto_enroll=False)
self.assertFalse(not_auto['auto_enroll'])
auto = self.call_fut(auto_enroll=True)
self.assertTrue(auto['auto_enroll'])
@attr('shard_1')
# TODO: deal with changes in behavior for auto_enroll
class TestEnrollEmail(ModuleStoreTestCase):
"""tests for the enroll_email function from ccx.utils
"""
def setUp(self):
super(TestEnrollEmail, self).setUp()
# unbind the user created by the parent, so we can create our own when
# needed.
self.user = None
course = CourseFactory.create()
coach = AdminFactory.create()
role = CourseCcxCoachRole(course.id)
role.add_users(coach)
self.ccx = CcxFactory(course_id=course.id, coach=coach)
self.outbox = self.get_outbox()
def create_user(self):
"""provide a legitimate django user for testing
"""
if getattr(self, 'user', None) is None:
self.user = UserFactory()
def register_user_in_ccx(self):
"""create registration of self.user in self.ccx
registration will be inactive
"""
self.create_user()
CcxMembershipFactory(ccx=self.ccx, student=self.user)
def get_outbox(self):
"""Return the django mail outbox"""
from django.core import mail
return mail.outbox
def check_membership(self, email=None, user=None, future=False):
"""Verify tjat an appropriate CCX Membership exists"""
if not email and not user:
self.fail(
"must provide user or email address to check CCX Membership"
)
if future and email:
membership = CcxFutureMembership.objects.filter(
ccx=self.ccx, email=email
)
elif not future:
if not user:
user = self.user
membership = CcxMembership.objects.filter(
ccx=self.ccx, student=user
)
self.assertTrue(membership.exists())
def check_enrollment_state(self, state, in_ccx, member, user):
"""Verify an enrollment state object against provided arguments
state.in_ccx will always be a boolean
state.user will always be a boolean
state.member will be a Django user object or None
"""
self.assertEqual(in_ccx, state.in_ccx)
self.assertEqual(member, state.member)
self.assertEqual(user, state.user)
def call_fut(
self,
student_email=None,
auto_enroll=False,
email_students=False,
email_params=None
):
"""Call function under test"""
from ccx.utils import enroll_email # pylint: disable=import-error
if student_email is None:
student_email = self.user.email
before, after = enroll_email(
self.ccx, student_email, auto_enroll, email_students, email_params
)
return before, after
def test_enroll_non_user_sending_email(self):
"""enroll a non-user email and send an enrollment email to them
"""
# ensure no emails are in the outbox now
self.assertEqual(self.outbox, [])
test_email = "[email protected]"
before, after = self.call_fut(
student_email=test_email, email_students=True
)
# there should be a future membership set for this email address now
self.check_membership(email=test_email, future=True)
for state in [before, after]:
self.check_enrollment_state(state, False, None, False)
# mail was sent and to the right person
self.assertEqual(len(self.outbox), 1)
msg = self.outbox[0]
self.assertTrue(test_email in msg.recipients())
def test_enroll_non_member_sending_email(self):
"""register a non-member and send an enrollment email to them
"""
self.create_user()
# ensure no emails are in the outbox now
self.assertEqual(self.outbox, [])
before, after = self.call_fut(email_students=True)
# there should be a membership set for this email address now
self.check_membership(email=self.user.email)
self.check_enrollment_state(before, False, self.user, True)
self.check_enrollment_state(after, True, self.user, True)
# mail was sent and to the right person
self.assertEqual(len(self.outbox), 1)
msg = self.outbox[0]
self.assertTrue(self.user.email in msg.recipients())
def test_enroll_member_sending_email(self):
"""register a member and send an enrollment email to them
"""
self.register_user_in_ccx()
# ensure no emails are in the outbox now
self.assertEqual(self.outbox, [])
before, after = self.call_fut(email_students=True)
# there should be a membership set for this email address now
self.check_membership(email=self.user.email)
for state in [before, after]:
self.check_enrollment_state(state, True, self.user, True)
# mail was sent and to the right person
self.assertEqual(len(self.outbox), 1)
msg = self.outbox[0]
self.assertTrue(self.user.email in msg.recipients())
def test_enroll_non_user_no_email(self):
"""register a non-user via email address but send no email
"""
# ensure no emails are in the outbox now
self.assertEqual(self.outbox, [])
test_email = "[email protected]"
before, after = self.call_fut(
student_email=test_email, email_students=False
)
# there should be a future membership set for this email address now
self.check_membership(email=test_email, future=True)
for state in [before, after]:
self.check_enrollment_state(state, False, None, False)
# ensure there are still no emails in the outbox now
self.assertEqual(self.outbox, [])
def test_enroll_non_member_no_email(self):
"""register a non-member but send no email"""
self.create_user()
# ensure no emails are in the outbox now
self.assertEqual(self.outbox, [])
before, after = self.call_fut(email_students=False)
# there should be a membership set for this email address now
self.check_membership(email=self.user.email)
self.check_enrollment_state(before, False, self.user, True)
self.check_enrollment_state(after, True, self.user, True)
# ensure there are still no emails in the outbox now
self.assertEqual(self.outbox, [])
def test_enroll_member_no_email(self):
"""enroll a member but send no email
"""
self.register_user_in_ccx()
# ensure no emails are in the outbox now
self.assertEqual(self.outbox, [])
before, after = self.call_fut(email_students=False)
# there should be a membership set for this email address now
self.check_membership(email=self.user.email)
for state in [before, after]:
self.check_enrollment_state(state, True, self.user, True)
# ensure there are still no emails in the outbox now
self.assertEqual(self.outbox, [])
@attr('shard_1')
# TODO: deal with changes in behavior for auto_enroll
class TestUnenrollEmail(ModuleStoreTestCase):
"""Tests for the unenroll_email function from ccx.utils"""
def setUp(self):
super(TestUnenrollEmail, self).setUp()
# unbind the user created by the parent, so we can create our own when
# needed.
self.user = None
course = CourseFactory.create()
coach = AdminFactory.create()
role = CourseCcxCoachRole(course.id)
role.add_users(coach)
self.ccx = CcxFactory(course_id=course.id, coach=coach)
self.outbox = self.get_outbox()
self.email = "[email protected]"
def get_outbox(self):
"""Return the django mail outbox"""
from django.core import mail
return mail.outbox
def create_user(self):
"""provide a legitimate django user for testing
"""
if getattr(self, 'user', None) is None:
self.user = UserFactory()
def make_ccx_membership(self):
"""create registration of self.user in self.ccx
registration will be inactive
"""
self.create_user()
CcxMembershipFactory.create(ccx=self.ccx, student=self.user)
def make_ccx_future_membership(self):
"""create future registration for email in self.ccx"""
CcxFutureMembershipFactory.create(
ccx=self.ccx, email=self.email
)
def check_enrollment_state(self, state, in_ccx, member, user):
"""Verify an enrollment state object against provided arguments
state.in_ccx will always be a boolean
state.user will always be a boolean
state.member will be a Django user object or None
"""
self.assertEqual(in_ccx, state.in_ccx)
self.assertEqual(member, state.member)
self.assertEqual(user, state.user)
def check_membership(self, future=False):
"""
check membership
"""
if future:
membership = CcxFutureMembership.objects.filter(
ccx=self.ccx, email=self.email
)
else:
membership = CcxMembership.objects.filter(
ccx=self.ccx, student=self.user
)
return membership.exists()
def call_fut(self, email_students=False):
"""call function under test"""
from ccx.utils import unenroll_email # pylint: disable=import-error
email = getattr(self, 'user', None) and self.user.email or self.email
return unenroll_email(self.ccx, email, email_students=email_students)
def test_unenroll_future_member_with_email(self):
"""unenroll a future member and send an email
"""
self.make_ccx_future_membership()
# assert that a membership exists and that no emails have been sent
self.assertTrue(self.check_membership(future=True))
self.assertEqual(self.outbox, [])
# unenroll the student
before, after = self.call_fut(email_students=True)
# assert that membership is now gone
self.assertFalse(self.check_membership(future=True))
# validate the before and after enrollment states
for state in [before, after]:
self.check_enrollment_state(state, False, None, False)
# check that mail was sent and to the right person
self.assertEqual(len(self.outbox), 1)
msg = self.outbox[0]
self.assertTrue(self.email in msg.recipients())
def test_unenroll_member_with_email(self):
"""unenroll a current member and send an email"""
self.make_ccx_membership()
# assert that a membership exists and that no emails have been sent
self.assertTrue(self.check_membership())
self.assertEqual(self.outbox, [])
# unenroll the student
before, after = self.call_fut(email_students=True)
# assert that membership is now gone
self.assertFalse(self.check_membership())
# validate the before and after enrollment state
self.check_enrollment_state(after, False, self.user, True)
self.check_enrollment_state(before, True, self.user, True)
# check that mail was sent and to the right person
self.assertEqual(len(self.outbox), 1)
msg = self.outbox[0]
self.assertTrue(self.user.email in msg.recipients())
def test_unenroll_future_member_no_email(self):
"""unenroll a future member but send no email
"""
self.make_ccx_future_membership()
# assert that a membership exists and that no emails have been sent
self.assertTrue(self.check_membership(future=True))
self.assertEqual(self.outbox, [])
# unenroll the student
before, after = self.call_fut()
# assert that membership is now gone
self.assertFalse(self.check_membership(future=True))
# validate the before and after enrollment states
for state in [before, after]:
self.check_enrollment_state(state, False, None, False)
# no email was sent to the student
self.assertEqual(self.outbox, [])
def test_unenroll_member_no_email(self):
"""unenroll a current member but send no email
"""
self.make_ccx_membership()
# assert that a membership exists and that no emails have been sent
self.assertTrue(self.check_membership())
self.assertEqual(self.outbox, [])
# unenroll the student
before, after = self.call_fut()
# assert that membership is now gone
self.assertFalse(self.check_membership())
# validate the before and after enrollment state
self.check_enrollment_state(after, False, self.user, True)
self.check_enrollment_state(before, True, self.user, True)
# no email was sent to the student
self.assertEqual(self.outbox, [])
@attr('shard_1')
class TestUserCCXList(ModuleStoreTestCase):
"""Unit tests for ccx.utils.get_all_ccx_for_user"""
def setUp(self):
"""Create required infrastructure for tests"""
super(TestUserCCXList, self).setUp()
self.course = CourseFactory.create()
coach = AdminFactory.create()
role = CourseCcxCoachRole(self.course.id)
role.add_users(coach)
self.ccx = CcxFactory(course_id=self.course.id, coach=coach)
enrollment = CourseEnrollmentFactory.create(course_id=self.course.id)
self.user = enrollment.user
self.anonymous = AnonymousUserFactory.create()
def register_user_in_ccx(self, active=False):
"""create registration of self.user in self.ccx
registration will be inactive unless active=True
"""
CcxMembershipFactory(ccx=self.ccx, student=self.user, active=active)
def get_course_title(self):
"""Get course title"""
from courseware.courses import get_course_about_section # pylint: disable=import-error
return get_course_about_section(self.course, 'title')
def call_fut(self, user):
"""Call function under test"""
from ccx.utils import get_all_ccx_for_user # pylint: disable=import-error
return get_all_ccx_for_user(user)
def test_anonymous_sees_no_ccx(self):
memberships = self.call_fut(self.anonymous)
self.assertEqual(memberships, [])
def test_unenrolled_sees_no_ccx(self):
memberships = self.call_fut(self.user)
self.assertEqual(memberships, [])
def test_enrolled_inactive_sees_no_ccx(self):
self.register_user_in_ccx()
memberships = self.call_fut(self.user)
self.assertEqual(memberships, [])
def test_enrolled_sees_a_ccx(self):
self.register_user_in_ccx(active=True)
memberships = self.call_fut(self.user)
self.assertEqual(len(memberships), 1)
def test_data_structure(self):
self.register_user_in_ccx(active=True)
memberships = self.call_fut(self.user)
this_membership = memberships[0]
self.assertTrue(this_membership)
# structure contains the expected keys
for key in ['ccx_name', 'ccx_url']:
self.assertTrue(key in this_membership.keys())
url_parts = [self.course.id.to_deprecated_string(), str(self.ccx.id)]
# all parts of the ccx url are present
for part in url_parts:
self.assertTrue(part in this_membership['ccx_url'])
actual_name = self.ccx.display_name
self.assertEqual(actual_name, this_membership['ccx_name'])
| agpl-3.0 | -8,797,822,406,330,226,000 | 37.20104 | 95 | 0.630569 | false |
guewen/OpenUpgrade | addons/account/wizard/account_chart.py | 39 | 5159 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
class account_chart(osv.osv_memory):
"""
For Chart of Accounts
"""
_name = "account.chart"
_description = "Account chart"
_columns = {
'fiscalyear': fields.many2one('account.fiscalyear', \
'Fiscal year', \
help='Keep empty for all open fiscal years'),
'period_from': fields.many2one('account.period', 'Start period'),
'period_to': fields.many2one('account.period', 'End period'),
'target_move': fields.selection([('posted', 'All Posted Entries'),
('all', 'All Entries'),
], 'Target Moves', required=True),
}
def _get_fiscalyear(self, cr, uid, context=None):
"""Return default Fiscalyear value"""
return self.pool.get('account.fiscalyear').find(cr, uid, context=context)
def onchange_fiscalyear(self, cr, uid, ids, fiscalyear_id=False, context=None):
res = {}
if fiscalyear_id:
start_period = end_period = False
cr.execute('''
SELECT * FROM (SELECT p.id
FROM account_period p
LEFT JOIN account_fiscalyear f ON (p.fiscalyear_id = f.id)
WHERE f.id = %s
ORDER BY p.date_start ASC
LIMIT 1) AS period_start
UNION ALL
SELECT * FROM (SELECT p.id
FROM account_period p
LEFT JOIN account_fiscalyear f ON (p.fiscalyear_id = f.id)
WHERE f.id = %s
AND p.date_start < NOW()
ORDER BY p.date_stop DESC
LIMIT 1) AS period_stop''', (fiscalyear_id, fiscalyear_id))
periods = [i[0] for i in cr.fetchall()]
if periods and len(periods) > 1:
start_period = periods[0]
end_period = periods[1]
res['value'] = {'period_from': start_period, 'period_to': end_period}
else:
res['value'] = {'period_from': False, 'period_to': False}
return res
def account_chart_open_window(self, cr, uid, ids, context=None):
"""
Opens chart of Accounts
@param cr: the current row, from the database cursor,
@param uid: the current user’s ID for security checks,
@param ids: List of account chart’s IDs
@return: dictionary of Open account chart window on given fiscalyear and all Entries or posted entries
"""
mod_obj = self.pool.get('ir.model.data')
act_obj = self.pool.get('ir.actions.act_window')
period_obj = self.pool.get('account.period')
fy_obj = self.pool.get('account.fiscalyear')
if context is None:
context = {}
data = self.read(cr, uid, ids, [], context=context)[0]
result = mod_obj.get_object_reference(cr, uid, 'account', 'action_account_tree')
id = result and result[1] or False
result = act_obj.read(cr, uid, [id], context=context)[0]
fiscalyear_id = data.get('fiscalyear', False) and data['fiscalyear'][0] or False
result['periods'] = []
if data['period_from'] and data['period_to']:
period_from = data.get('period_from', False) and data['period_from'][0] or False
period_to = data.get('period_to', False) and data['period_to'][0] or False
result['periods'] = period_obj.build_ctx_periods(cr, uid, period_from, period_to)
result['context'] = str({'fiscalyear': fiscalyear_id, 'periods': result['periods'], \
'state': data['target_move']})
if fiscalyear_id:
result['name'] += ':' + fy_obj.read(cr, uid, [fiscalyear_id], context=context)[0]['code']
return result
_defaults = {
'target_move': 'posted',
'fiscalyear': _get_fiscalyear,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | -9,189,987,383,739,027,000 | 46.293578 | 110 | 0.540834 | false |
PetrDlouhy/django | django/utils/module_loading.py | 30 | 6416 | import copy
import os
import sys
from importlib import import_module
from django.utils import six
def import_string(dotted_path):
"""
Import a dotted module path and return the attribute/class designated by the
last name in the path. Raise ImportError if the import failed.
"""
try:
module_path, class_name = dotted_path.rsplit('.', 1)
except ValueError:
msg = "%s doesn't look like a module path" % dotted_path
six.reraise(ImportError, ImportError(msg), sys.exc_info()[2])
module = import_module(module_path)
try:
return getattr(module, class_name)
except AttributeError:
msg = 'Module "%s" does not define a "%s" attribute/class' % (
dotted_path, class_name)
six.reraise(ImportError, ImportError(msg), sys.exc_info()[2])
def autodiscover_modules(*args, **kwargs):
"""
Auto-discover INSTALLED_APPS modules and fail silently when
not present. This forces an import on them to register any admin bits they
may want.
You may provide a register_to keyword parameter as a way to access a
registry. This register_to object must have a _registry instance variable
to access it.
"""
from django.apps import apps
register_to = kwargs.get('register_to')
for app_config in apps.get_app_configs():
for module_to_search in args:
# Attempt to import the app's module.
try:
if register_to:
before_import_registry = copy.copy(register_to._registry)
import_module('%s.%s' % (app_config.name, module_to_search))
except:
# Reset the registry to the state before the last import
# as this import will have to reoccur on the next request and
# this could raise NotRegistered and AlreadyRegistered
# exceptions (see #8245).
if register_to:
register_to._registry = before_import_registry
# Decide whether to bubble up this error. If the app just
# doesn't have the module in question, we can ignore the error
# attempting to import it, otherwise we want it to bubble up.
if module_has_submodule(app_config.module, module_to_search):
raise
if sys.version_info[:2] >= (3, 3):
if sys.version_info[:2] >= (3, 4):
from importlib.util import find_spec as importlib_find
else:
from importlib import find_loader as importlib_find
def module_has_submodule(package, module_name):
"""See if 'module' is in 'package'."""
try:
package_name = package.__name__
package_path = package.__path__
except AttributeError:
# package isn't a package.
return False
full_module_name = package_name + '.' + module_name
return importlib_find(full_module_name, package_path) is not None
else:
import imp
def module_has_submodule(package, module_name):
"""See if 'module' is in 'package'."""
name = ".".join([package.__name__, module_name])
try:
# None indicates a cached miss; see mark_miss() in Python/import.c.
return sys.modules[name] is not None
except KeyError:
pass
try:
package_path = package.__path__ # No __path__, then not a package.
except AttributeError:
# Since the remainder of this function assumes that we're dealing with
# a package (module with a __path__), so if it's not, then bail here.
return False
for finder in sys.meta_path:
if finder.find_module(name, package_path):
return True
for entry in package_path:
try:
# Try the cached finder.
finder = sys.path_importer_cache[entry]
if finder is None:
# Implicit import machinery should be used.
try:
file_, _, _ = imp.find_module(module_name, [entry])
if file_:
file_.close()
return True
except ImportError:
continue
# Else see if the finder knows of a loader.
elif finder.find_module(name):
return True
else:
continue
except KeyError:
# No cached finder, so try and make one.
for hook in sys.path_hooks:
try:
finder = hook(entry)
# XXX Could cache in sys.path_importer_cache
if finder.find_module(name):
return True
else:
# Once a finder is found, stop the search.
break
except ImportError:
# Continue the search for a finder.
continue
else:
# No finder found.
# Try the implicit import machinery if searching a directory.
if os.path.isdir(entry):
try:
file_, _, _ = imp.find_module(module_name, [entry])
if file_:
file_.close()
return True
except ImportError:
pass
# XXX Could insert None or NullImporter
else:
# Exhausted the search, so the module cannot be found.
return False
def module_dir(module):
"""
Find the name of the directory that contains a module, if possible.
Raise ValueError otherwise, e.g. for namespace packages that are split
over several directories.
"""
# Convert to list because _NamespacePath does not support indexing on 3.3.
paths = list(getattr(module, '__path__', []))
if len(paths) == 1:
return paths[0]
else:
filename = getattr(module, '__file__', None)
if filename is not None:
return os.path.dirname(filename)
raise ValueError("Cannot determine directory containing %s" % module)
| bsd-3-clause | 7,156,874,583,038,490,000 | 37.190476 | 82 | 0.539589 | false |
couchand/petard | vendor/cxxtest-4.3/test/test_doc.py | 54 | 1097 | #-------------------------------------------------------------------------
# CxxTest: A lightweight C++ unit testing library.
# Copyright (c) 2008 Sandia Corporation.
# This software is distributed under the LGPL License v3
# For more information, see the COPYING file in the top CxxTest directory.
# Under the terms of Contract DE-AC04-94AL85000 with Sandia Corporation,
# the U.S. Government retains certain rights in this software.
#-------------------------------------------------------------------------
#
# Import and execute the Python test driver for the user guide examples
#
# Imports
try:
import pyutilib.th as unittest
pyutilib_available=True
except:
pyutilib_available=False
import os
from os.path import dirname, abspath, abspath, basename
import sys
if pyutilib_available:
currdir = dirname(abspath(__file__))+os.sep
datadir = os.sep.join([dirname(dirname(abspath(__file__))),'doc','examples'])+os.sep
os.chdir(datadir)
sys.path.insert(0, datadir)
from test_examples import *
# Execute the tests
if __name__ == '__main__':
unittest.main()
| mit | -2,477,275,262,920,126,500 | 30.342857 | 88 | 0.626253 | false |
pdellaert/ansible | test/units/modules/network/fortios/test_fortios_application_name.py | 21 | 11085 | # Copyright 2019 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <https://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
import pytest
from mock import ANY
from ansible.module_utils.network.fortios.fortios import FortiOSHandler
try:
from ansible.modules.network.fortios import fortios_application_name
except ImportError:
pytest.skip("Could not load required modules for testing", allow_module_level=True)
@pytest.fixture(autouse=True)
def connection_mock(mocker):
connection_class_mock = mocker.patch('ansible.modules.network.fortios.fortios_application_name.Connection')
return connection_class_mock
fos_instance = FortiOSHandler(connection_mock)
def test_application_name_creation(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'application_name': {
'behavior': 'test_value_3',
'category': '4',
'id': '5',
'name': 'default_name_6',
'parameter': 'test_value_7',
'popularity': '8',
'protocol': 'test_value_9',
'risk': '10',
'sub_category': '11',
'technology': 'test_value_12',
'vendor': 'test_value_13',
'weight': '14'
},
'vdom': 'root'}
is_error, changed, response = fortios_application_name.fortios_application(input_data, fos_instance)
expected_data = {
'behavior': 'test_value_3',
'category': '4',
'id': '5',
'name': 'default_name_6',
'parameter': 'test_value_7',
'popularity': '8',
'protocol': 'test_value_9',
'risk': '10',
'sub-category': '11',
'technology': 'test_value_12',
'vendor': 'test_value_13',
'weight': '14'
}
set_method_mock.assert_called_with('application', 'name', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
def test_application_name_creation_fails(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'error', 'http_method': 'POST', 'http_status': 500}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'application_name': {
'behavior': 'test_value_3',
'category': '4',
'id': '5',
'name': 'default_name_6',
'parameter': 'test_value_7',
'popularity': '8',
'protocol': 'test_value_9',
'risk': '10',
'sub_category': '11',
'technology': 'test_value_12',
'vendor': 'test_value_13',
'weight': '14'
},
'vdom': 'root'}
is_error, changed, response = fortios_application_name.fortios_application(input_data, fos_instance)
expected_data = {
'behavior': 'test_value_3',
'category': '4',
'id': '5',
'name': 'default_name_6',
'parameter': 'test_value_7',
'popularity': '8',
'protocol': 'test_value_9',
'risk': '10',
'sub-category': '11',
'technology': 'test_value_12',
'vendor': 'test_value_13',
'weight': '14'
}
set_method_mock.assert_called_with('application', 'name', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 500
def test_application_name_removal(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
delete_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
delete_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.delete', return_value=delete_method_result)
input_data = {
'username': 'admin',
'state': 'absent',
'application_name': {
'behavior': 'test_value_3',
'category': '4',
'id': '5',
'name': 'default_name_6',
'parameter': 'test_value_7',
'popularity': '8',
'protocol': 'test_value_9',
'risk': '10',
'sub_category': '11',
'technology': 'test_value_12',
'vendor': 'test_value_13',
'weight': '14'
},
'vdom': 'root'}
is_error, changed, response = fortios_application_name.fortios_application(input_data, fos_instance)
delete_method_mock.assert_called_with('application', 'name', mkey=ANY, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
def test_application_name_deletion_fails(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
delete_method_result = {'status': 'error', 'http_method': 'POST', 'http_status': 500}
delete_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.delete', return_value=delete_method_result)
input_data = {
'username': 'admin',
'state': 'absent',
'application_name': {
'behavior': 'test_value_3',
'category': '4',
'id': '5',
'name': 'default_name_6',
'parameter': 'test_value_7',
'popularity': '8',
'protocol': 'test_value_9',
'risk': '10',
'sub_category': '11',
'technology': 'test_value_12',
'vendor': 'test_value_13',
'weight': '14'
},
'vdom': 'root'}
is_error, changed, response = fortios_application_name.fortios_application(input_data, fos_instance)
delete_method_mock.assert_called_with('application', 'name', mkey=ANY, vdom='root')
schema_method_mock.assert_not_called()
assert is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 500
def test_application_name_idempotent(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'error', 'http_method': 'DELETE', 'http_status': 404}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'application_name': {
'behavior': 'test_value_3',
'category': '4',
'id': '5',
'name': 'default_name_6',
'parameter': 'test_value_7',
'popularity': '8',
'protocol': 'test_value_9',
'risk': '10',
'sub_category': '11',
'technology': 'test_value_12',
'vendor': 'test_value_13',
'weight': '14'
},
'vdom': 'root'}
is_error, changed, response = fortios_application_name.fortios_application(input_data, fos_instance)
expected_data = {
'behavior': 'test_value_3',
'category': '4',
'id': '5',
'name': 'default_name_6',
'parameter': 'test_value_7',
'popularity': '8',
'protocol': 'test_value_9',
'risk': '10',
'sub-category': '11',
'technology': 'test_value_12',
'vendor': 'test_value_13',
'weight': '14'
}
set_method_mock.assert_called_with('application', 'name', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 404
def test_application_name_filter_foreign_attributes(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'application_name': {
'random_attribute_not_valid': 'tag',
'behavior': 'test_value_3',
'category': '4',
'id': '5',
'name': 'default_name_6',
'parameter': 'test_value_7',
'popularity': '8',
'protocol': 'test_value_9',
'risk': '10',
'sub_category': '11',
'technology': 'test_value_12',
'vendor': 'test_value_13',
'weight': '14'
},
'vdom': 'root'}
is_error, changed, response = fortios_application_name.fortios_application(input_data, fos_instance)
expected_data = {
'behavior': 'test_value_3',
'category': '4',
'id': '5',
'name': 'default_name_6',
'parameter': 'test_value_7',
'popularity': '8',
'protocol': 'test_value_9',
'risk': '10',
'sub-category': '11',
'technology': 'test_value_12',
'vendor': 'test_value_13',
'weight': '14'
}
set_method_mock.assert_called_with('application', 'name', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
| gpl-3.0 | 2,285,379,125,501,775,600 | 34.873786 | 142 | 0.57465 | false |
100star/h2o | py/testdir_release/sh2junit.py | 31 | 16028 | import sys, psutil, os, stat, tempfile, argparse, time, datetime
sys.path.extend(['.','..','../..','py'])
import h2o_sandbox
# Stripped down, similar to h2o.py has for these functions
# Possible to do this in bash, but the code becomes cryptic.
# You can execute this as sh2junit.py <bash command string>
# sh2junit runs the cmd_string as a subprocess, with stdout/stderr going to files in sandbox
# and stdout to python stdout too.
# When it completes, check the sandbox for errors (using h2o_sandbox.py
# prints interesting things to stdout. Creates the result xml in the current dire
# with name "sh2junit_<name>.xml"
def sandbox_tmp_file(prefix='', suffix=''):
# this gives absolute path, good!
dirname = './sandbox'
if not os.path.exists(dirname):
print "no ./sandbox. Creating"
os.makedirs(dirname)
fd, path = tempfile.mkstemp(prefix=prefix, suffix=suffix, dir=dirname)
# make sure the file now exists
# os.open(path, 'a').close()
# give everyone permission to read it (jenkins running as
# 0xcustomer needs to archive as jenkins
#permissions = stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IROTH
os.chmod(path, 0644) #'644') #permissions)
return (fd, path)
#**************************************************************************
# Example junit xml
#<?xml version="1.0" encoding="UTF-8"?>
#<testsuites disabled="" errors="" failures="" name="" tests="" time="">
# <testsuite disabled="" errors="" failures="" hostname="" id="" name="" package="" skipped="" tests="" time="" timestamp="">
# <properties>
# <property name="" value=""/>
# </properties>
# <testcase assertions="" classname="" name="" status="" time="">
# <skipped/>
# <error message="" type=""/>
# <failure message="" type=""/>
# <system-out/>
# <system-err/>
# </testcase>
# <system-out/>
# <system-err/>
# </testsuite>
#</testsuites>
def create_junit_xml(name, out, err, sandboxErrorMessage, errors=0, elapsed=0):
# http://junitpdfreport.sourceforge.net/managedcontent/PdfTranslation
# not really nosetests..just trying to mimic the python xml
content = '<?xml version="1.0" encoding="UTF-8" ?>\n'
content += ' <testsuite name="nosetests" tests="1" errors="%s" failures="0" skip="0">\n' % (errors)
content += ' <testcase classname="%s" name="%s" time="%0.4f">\n' % (name, name, elapsed)
if errors != 0 and not sandboxErrorMessage:
content += ' <error type="Non-zero R exit code" message="Non-zero R exit code"></error>\n'
# may or may not be 2 errors (R exit code plus log error
if errors != 0 and sandboxErrorMessage:
content += ' <error type="Error in h2o logs" message="Error in h2o logs"></error>\n'
content += ' <system-out>\n'
content += '<![CDATA[\n'
content += 'spawn stdout' + str(datetime.datetime.now()) + '**********************************************************\n'
content += out
content += ']]>\n'
content += ' </system-out>\n'
content += ' <system-err>\n'
content += '<![CDATA[\n'
content += 'spawn stderr' + str(datetime.datetime.now()) + '**********************************************************\n'
content += err
if sandboxErrorMessage:
content += 'spawn errors from sandbox log parsing*********************************\n'
# maybe could split this into a 2nd stdout or stder ..see above
content += sandboxErrorMessage
content += ']]>\n'
content += ' </system-err>\n'
content += ' </testcase>\n'
content += ' </testsuite>\n'
# see if adding nosetests makes michal's stuff pick it up??
# and "test_" prefix"
x = './test_' + os.path.basename(name) + '.nosetests.xml'
with open(x, 'wb') as f:
f.write(content)
#f = open(x, 'w')
#f.write(content)
#f.close()
#**************************************************************************
# belt and suspenders. Do we really need to worry about this?
def terminate_process_tree(pid, including_parent=True):
parent = psutil.Process(pid)
for child in parent.get_children(recursive=True):
try:
child.terminate()
except psutil.NoSuchProcess:
print "terminate_process_tree:", "NoSuchProcess. couldn't terminate child process with pid %s" % child.pid()
except psutil.AccessDenied:
print "terminate_process_tree:", "couldn't terminate child process with pid %s" % child.pid()
else:
child.wait(timeout=3)
if including_parent:
try:
parent.terminate()
except psutil.NoSuchProcess:
print "terminate_process_tree:", "NoSuchProcess. couldn't terminate parent process with pid %s" % parent.pid()
pass
except psutil.AccessDenied:
print "terminate_process_tree:", "AccessDenied. couldn't terminate parent process with pid %s" % parent.pid()
else:
parent.wait(timeout=3)
def terminate_child_processes():
me = os.getpid()
terminate_process_tree(me, including_parent=False)
#**************************************************************************
def rc_if_exists_and_done(ps):
try:
rc = ps.wait(0)
except psutil.TimeoutExpired:
# not sure why I'm getting this
print "Got TimeoutExpired on the R subprocess, may be legal"
rc = None
except psutil.NoSuchProcess:
raise Exception("The R subprocess disappeared when we thought it should still be there")
except psutil.AccessDenied:
raise Exception("The R subprocess gave us AccessDenied")
# rc = None means it already completed?
# FIX! Is it none if we get a timeout exception on this python ..how is that captured?
if rc:
# increment the global errors count if we get a non-zero rc. non-zero rc should only happen once?
error = 1
print "rc_if_exists_and_done: got non-zero rc: %s" % rc
else:
error = 0
return (rc, error)
#**************************************************************************
def sh2junit(name='NoName', cmd_string='/bin/ls', timeout=300, shdir=None, **kwargs):
# split by arbitrary strings of whitespace characters (space, tab, newline, return, formfeed)
print "cmd_string:", cmd_string
cmdList = cmd_string.split()
# these are absolute paths
outfd, outpath = sandbox_tmp_file(prefix=name + '.stdout.', suffix='.log')
errfd, errpath = sandbox_tmp_file(prefix=name + '.stderr.', suffix='.log')
# make outpath and errpath full paths, so we can redirect
print "outpath:", outpath
print "errpath:", errpath
start = time.time()
print "psutil.Popen:", cmdList, outpath, errpath
import subprocess
# start the process in the target dir, if desired
if shdir:
currentDir = os.getcwd()
os.chdir(shdir)
ps = psutil.Popen(cmdList, stdin=None, stdout=subprocess.PIPE, stderr=subprocess.PIPE, **kwargs)
if shdir:
os.chdir(currentDir)
comment = 'PID %d, stdout %s, stderr %s' % (
ps.pid, os.path.basename(outpath), os.path.basename(errpath))
print "spawn_cmd", cmd_string, comment
# Reads the subprocess stdout until it is closed and
# ...echo it our python stdout and also the R stdout file in sandbox
# Then wait for the program to exit.
# Read before wait so that you don't risk the pipe filling up and hanging the program.
# You wait after read for the final program exit and return code.
# If you don't wait, you'll get a zombie process (at least on linux)
# this might not do what we want..see:
# http://stackoverflow.com/questions/2804543/read-subprocess-stdout-line-by-line
# I suppose we'll stop early?
# shouldn't need a delay before checking this?
if not ps.is_running():
raise Exception("sh2junit: not immediate ps.is_running after start")
# Until we get the rc, it can be a zombie process.
# A zombie process is not a real process.
# it's just a remaining entry in the process table until the parent process requests the child's return code.
# The actual process has ended and requires no other resources but said process table entry.
linesMayExist = True
errors = 0
timeoutError = False
while linesMayExist:
# get whatever accumulated, up to nothing returned
# only do up to 20 lines before we check timeout again
# why was R processes not completing on centos?
# linesMayExist = ps.is_running() and not ps.status() == psutil.STATUS_ZOMBIE
linesMayExist = ps.is_running()
lineBurstCnt = 0
# stdout from subprocess
line = ps.stdout.readline()
# R apparently uses stderr a lot, so want to mix that in. We don't grab it until we hit a stall in R stdout though.
while line:
lineBurstCnt += 1
# maybe I should use p.communicate() instead. have to keep it to stdout? or do stdout+stderr here
sys.stdout.write("R->" + line) # to our python stdout, with a prefix so it's obviously from R
sys.stdout.flush()
os.write(outfd, line) # to sandbox R stdout
elapsed = time.time() - start
if elapsed > timeout:
timeoutError = True
errors += 1
print "ERROR: sh2junit: elapsed: %0.2f timeout: %s (secs) while echoing subprocess stdout" % (elapsed, timeout)
#kill R subprocess but don't kill me
terminate_process_tree(ps.pid, including_parent=False)
break
line = ps.stdout.readline()
if timeoutError:
print "\n\n\nERROR: timeout"
break
# stderr from subprocess
line = ps.stderr.readline()
while line:
lineBurstCnt += 1
sys.stdout.write("Re->" + line) # to our python stdout, with a prefix so it's obviously from R stderr
sys.stdout.flush()
os.write(errfd, line) # to sandbox R stderr
line = ps.stderr.readline()
print "lineBurstCnt:", lineBurstCnt
# Check. may have flipped to not running, and we just got the last bit.
# shouldn't be a race on a transition here, if ps.wait(0) completion syncs the transition
if linesMayExist:
print "ps.is_running():", ps.is_running(), ps.pid, ps.name, ps.status, ps.create_time
# unload the return code without waiting..so we don't have a zombie!
(lastrc, error) = rc_if_exists_and_done(ps)
errors += error
elapsed = time.time() - start
# forever if timeout is None
#if timeout and elapsed > timeout:
if elapsed > timeout:
timeoutError = True
errors += 1
# we don't want to exception here, because we're going to print the xml that says there's an error
# I guess we'll end up terminating the R process down below
# could we have lines in stdout we didn't catch up on? maybe, but do we care?
print "ERROR: sh2junit: elapsed: %0.2f timeout: %s (secs) while echoing subprocess stdout" % (elapsed, timeout)
#kill R subprocess but don't kill me
#terminate_process_tree(ps.pid, including_parent=False)
break
# wait for some more output to accumulate
time.sleep(0.25)
# It shouldn't be running now?
# timeout=None waits forever. timeout=0 returns immediately.
# default above is 5 minutes
# Wait for process termination. Since child: return the exit code.
# If the process is already terminated does not raise NoSuchProcess exception
# but just return None immediately.
# If timeout is specified and process is still alive raises psutil.TimeoutExpired() exception.
# old
# rc = ps.wait(timeout)
(lastrc, error) = rc_if_exists_and_done(ps)
errors += error
elapsed = time.time() - start
# Prune h2o logs to interesting lines and detect errors.
# Error lines are returned. warning/info are printed to our (python stdout)
# so that's always printed/saved?
# None if no error
sandboxErrorMessage = h2o_sandbox.check_sandbox_for_errors(
LOG_DIR='./sandbox',
python_test_name=name,
cloudShutdownIsError=True,
sandboxIgnoreErrors=True) # don't take exception on error
if sandboxErrorMessage:
errors += 1
out = file(outpath).read()
err = file(errpath).read()
create_junit_xml(name, out, err, sandboxErrorMessage, errors=errors, elapsed=elapsed)
if not errors:
return (errors, outpath, errpath)
else:
# dump all the info as part of the exception? maybe too much
# is this bad to do in all cases? do we need it?
hline = "\n===========================================BEGIN DUMP=============================================================\n"
hhline = "\n===========================================END DUMP=============================================================\n"
out = '[stdout->err]: '.join(out.splitlines(True))
err = '[sterr->err]: '.join(err.splitlines(True))
if ps.is_running():
print "Before terminate:", ps.pid, ps.is_running()
terminate_process_tree(ps.pid, including_parent=True)
if sandboxErrorMessage:
print "\n\n\nError in Sandbox. Ending test. Dumping sub-process output.\n"
print hline
raise Exception("%s %s \n\tlastrc:%s \n\terrors:%s \n\tErrors found in ./sandbox log files?.\nR stdout:\n%s\n\nR stderr:\n%s\n%s" %
(name, cmd_string, lastrc, errors, out, err, hhline))
# could have already terminated?
elif timeoutError:
print "\n\n\nTimeout Error. Ending test. Dumping sub-process output.\n"
print hline
raise Exception("%s %s \n\tlastrc:%s \n\terrors:%s \n\ttimed out after %d secs. \nR stdout:\n%s\n\nR stderr:\n%s\n%s" %
(name, cmd_string, lastrc, errors, timeout or 0, out, err, hhline))
else:
print "\n\n\nCaught exception. Ending test. Dumping sub-process output.\n"
print hline
raise Exception("%s %s \n\tlastrc:%s \n\terrors:%s \n\tLikely non-zero exit code from R.\nR stdout:\n%s\n\nR stderr:\n%s\n%s" %
(name, cmd_string, lastrc, errors, out, err, hhline))
#**************************************************************************
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('-shdir', type=str, default=None, help='executes the $cmd in the target dir, but the logs stay in sandbox here')
parser.add_argument('-name', type=str, default='NoName', help='used to help name the xml/stdout/stderr logs created')
parser.add_argument('-timeout', type=int, default=5, help='secs timeout for the shell subprocess. Fail if timeout')
parser.add_argument('-cmd', '--cmd_string', type=str, default=None, help="cmd string to pass to shell subprocess. Better to just use'--' to start the cmd (everything after that is sucked in)")
parser.add_argument('Rargs', nargs=argparse.REMAINDER)
args = parser.parse_args()
if args.cmd_string:
cmd_string = args.cmd_string
else:
# easiest way to handle multiple tokens for command
# end with -- and this grabs the rest
# drop the leading '--' if we stopped parsing the rest that way
if args.Rargs:
print "args.Rargs:", args.Rargs
if args.Rargs[0]=='--':
args.Rargs[0] = ''
cmd_string = ' '.join(args.Rargs)
else:
# placeholder for test
cmd_string = '/bin/ls'
sh2junit(name=args.name, cmd_string=cmd_string, timeout=args.timeout, shdir=args.shdir)
| apache-2.0 | 4,169,509,417,171,697,700 | 45.457971 | 196 | 0.594522 | false |
soumyanishan/azure-linux-extensions | OSPatching/azure/storage/storageclient.py | 51 | 5800 | #-------------------------------------------------------------------------
# Copyright (c) Microsoft. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#--------------------------------------------------------------------------
import os
import sys
from azure import (
WindowsAzureError,
DEV_ACCOUNT_NAME,
DEV_ACCOUNT_KEY,
_ERROR_STORAGE_MISSING_INFO,
)
from azure.http import HTTPError
from azure.http.httpclient import _HTTPClient
from azure.storage import _storage_error_handler
#--------------------------------------------------------------------------
# constants for azure app setting environment variables
AZURE_STORAGE_ACCOUNT = 'AZURE_STORAGE_ACCOUNT'
AZURE_STORAGE_ACCESS_KEY = 'AZURE_STORAGE_ACCESS_KEY'
EMULATED = 'EMULATED'
#--------------------------------------------------------------------------
class _StorageClient(object):
'''
This is the base class for BlobManager, TableManager and QueueManager.
'''
def __init__(self, account_name=None, account_key=None, protocol='https',
host_base='', dev_host=''):
'''
account_name: your storage account name, required for all operations.
account_key: your storage account key, required for all operations.
protocol: Optional. Protocol. Defaults to http.
host_base:
Optional. Live host base url. Defaults to Azure url. Override this
for on-premise.
dev_host: Optional. Dev host url. Defaults to localhost.
'''
self.account_name = account_name
self.account_key = account_key
self.requestid = None
self.protocol = protocol
self.host_base = host_base
self.dev_host = dev_host
# the app is not run in azure emulator or use default development
# storage account and key if app is run in emulator.
self.use_local_storage = False
# check whether it is run in emulator.
if EMULATED in os.environ:
self.is_emulated = os.environ[EMULATED].lower() != 'false'
else:
self.is_emulated = False
# get account_name and account key. If they are not set when
# constructing, get the account and key from environment variables if
# the app is not run in azure emulator or use default development
# storage account and key if app is run in emulator.
if not self.account_name or not self.account_key:
if self.is_emulated:
self.account_name = DEV_ACCOUNT_NAME
self.account_key = DEV_ACCOUNT_KEY
self.protocol = 'http'
self.use_local_storage = True
else:
self.account_name = os.environ.get(AZURE_STORAGE_ACCOUNT)
self.account_key = os.environ.get(AZURE_STORAGE_ACCESS_KEY)
if not self.account_name or not self.account_key:
raise WindowsAzureError(_ERROR_STORAGE_MISSING_INFO)
self._httpclient = _HTTPClient(
service_instance=self,
account_key=self.account_key,
account_name=self.account_name,
protocol=self.protocol)
self._batchclient = None
self._filter = self._perform_request_worker
def with_filter(self, filter):
'''
Returns a new service which will process requests with the specified
filter. Filtering operations can include logging, automatic retrying,
etc... The filter is a lambda which receives the HTTPRequest and
another lambda. The filter can perform any pre-processing on the
request, pass it off to the next lambda, and then perform any
post-processing on the response.
'''
res = type(self)(self.account_name, self.account_key, self.protocol)
old_filter = self._filter
def new_filter(request):
return filter(request, old_filter)
res._filter = new_filter
return res
def set_proxy(self, host, port, user=None, password=None):
'''
Sets the proxy server host and port for the HTTP CONNECT Tunnelling.
host: Address of the proxy. Ex: '192.168.0.100'
port: Port of the proxy. Ex: 6000
user: User for proxy authorization.
password: Password for proxy authorization.
'''
self._httpclient.set_proxy(host, port, user, password)
def _get_host(self):
if self.use_local_storage:
return self.dev_host
else:
return self.account_name + self.host_base
def _perform_request_worker(self, request):
return self._httpclient.perform_request(request)
def _perform_request(self, request, text_encoding='utf-8'):
'''
Sends the request and return response. Catches HTTPError and hand it
to error handler
'''
try:
if self._batchclient is not None:
return self._batchclient.insert_request_to_batch(request)
else:
resp = self._filter(request)
if sys.version_info >= (3,) and isinstance(resp, bytes) and \
text_encoding:
resp = resp.decode(text_encoding)
except HTTPError as ex:
_storage_error_handler(ex)
return resp
| apache-2.0 | 5,136,530,396,367,353,000 | 37.157895 | 78 | 0.604483 | false |
yatinkumbhare/openstack-nova | nova/tests/functional/v3/api_paste_fixture.py | 25 | 1543 | # Copyright 2015 NEC Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import fixtures
from oslo_config import cfg
from nova import paths
CONF = cfg.CONF
class ApiPasteFixture(fixtures.Fixture):
def setUp(self):
super(ApiPasteFixture, self).setUp()
CONF.set_default('api_paste_config',
paths.state_path_def('etc/nova/api-paste.ini'))
tmp_api_paste_dir = self.useFixture(fixtures.TempDir())
tmp_api_paste_file_name = os.path.join(tmp_api_paste_dir.path,
'fake_api_paste.ini')
with open(CONF.api_paste_config, 'r') as orig_api_paste:
with open(tmp_api_paste_file_name, 'w') as tmp_file:
for line in orig_api_paste:
tmp_file.write(line.replace(
"/v2: openstack_compute_api_v2",
"/v2: openstack_compute_api_v21"))
CONF.set_override('api_paste_config', tmp_api_paste_file_name)
| apache-2.0 | -3,270,179,196,829,480,000 | 36.634146 | 75 | 0.645496 | false |
rbprogrammer/advanced_python_topics | course-material/py2/solutions/07 XML Processing/Ex7.1.py | 1 | 1036 | #!/usr/local/bin/python2
# Ex7.1.py Python 2 version
# Script to parse an XML file and enumerate tags
import sys
from xml.parsers import expat
# Allow user to provide a filename, or default to books.xml
filename = sys.argv[1] if sys.argv[1:] else 'books.xml'
Tags = 0
tags = {}
class ExpatError(Exception):
pass
def start_tag(name, attr):
global Tags, tags
Tags += 1
if name in tags:
tags[name] += 1
else:
tags[name] = 1
# The following line does the same as the if/else above
# tags.get() does not raise an exception if the default value (zero here)
# is supplied.
# tags[name] = 1 + tags.get(name, 0)
ExParser = expat.ParserCreate()
ExParser.StartElementHandler = start_tag
try:
ExParser.ParseFile(open(filename, 'rb'))
except ExpatError:
print >> sys.stderr, "Ooops!"
exit(1)
else:
for k, v in tags.items():
print k.ljust(15), ":", v
print "XML is well-formed and has ", \
"%s tags of which %d are unique" % (Tags, len(tags.keys()))
| apache-2.0 | -2,609,655,144,962,733,600 | 21.521739 | 73 | 0.639961 | false |
marcellfischbach/CobaltSKY | Scripts/Blender/valkyrie/geometry.py | 1 | 1365 |
import valkyrie
import struct
class GeometryWriter:
GT_GeometryMesh = 0
GT_GeometryCollection = 1
GT_GeometryLOD = 2
DM_Internal = 0
DM_Exteral = 1
def __init__(self):
self.stream = []
self.material_map = {}
def write(self, multi_mesh):
self.stream += struct.pack('<I', GeometryWriter.GT_GeometryMesh)
self.stream += struct.pack('<ffff', 1.0, 0.0, 0.0, 0.0);
self.stream += struct.pack('<ffff', 0.0, 1.0, 0.0, 0.0);
self.stream += struct.pack('<ffff', 0.0, 0.0, 1.0, 0.0);
self.stream += struct.pack('<ffff', 0.0, 0.0, 0.0, 1.0);
self.stream += struct.pack('<I', GeometryWriter.DM_Internal)
self._write_string('Mesh')
# export the materials that are stored within this static mesh
self._prepare_material_map(multi_mesh)
self._write_material_names()
def _prepare_material_map(self, multi_mesh):
i = 0
for mesh_data in multi_mesh.mesh_datas:
mat_name = mesh_data.material_name
if not mat_name in self.material_map:
self.material_map[mat_name] = i
i+=1
def _write_material_names(self):
self.stream += struct.pack('<I', len(self.material_map))
for material_name in self.material_map:
self._write_string(material_name)
def _write_string(self, string):
_string = bytes(string, 'latin1')
self.stream += struct.pack("<I%dsb" % (len(_string)), len(_string)+1, _string, 0)
| gpl-2.0 | -89,460,289,486,703,550 | 23.392857 | 83 | 0.649084 | false |
LIKAIMO/MissionPlanner | LogAnalyzer/tests/TestIMUMatch.py | 61 | 3781 | from LogAnalyzer import Test,TestResult
import DataflashLog
from math import sqrt
class TestIMUMatch(Test):
'''test for empty or near-empty logs'''
def __init__(self):
Test.__init__(self)
self.name = "IMU Mismatch"
def run(self, logdata, verbose):
#tuning parameters:
warn_threshold = .75
fail_threshold = 1.5
filter_tc = 5.0
self.result = TestResult()
self.result.status = TestResult.StatusType.GOOD
if ("IMU" in logdata.channels) and (not "IMU2" in logdata.channels):
self.result.status = TestResult.StatusType.NA
self.result.statusMessage = "No IMU2"
return
if (not "IMU" in logdata.channels) or (not "IMU2" in logdata.channels):
self.result.status = TestResult.StatusType.UNKNOWN
self.result.statusMessage = "No IMU log data"
return
imu1 = logdata.channels["IMU"]
imu2 = logdata.channels["IMU2"]
imu1_timems = imu1["TimeMS"].listData
imu1_accx = imu1["AccX"].listData
imu1_accy = imu1["AccY"].listData
imu1_accz = imu1["AccZ"].listData
imu2_timems = imu2["TimeMS"].listData
imu2_accx = imu2["AccX"].listData
imu2_accy = imu2["AccY"].listData
imu2_accz = imu2["AccZ"].listData
imu1 = []
imu2 = []
for i in range(len(imu1_timems)):
imu1.append({ 't': imu1_timems[i][1]*1.0E-3, 'x': imu1_accx[i][1], 'y': imu1_accy[i][1], 'z': imu1_accz[i][1]})
for i in range(len(imu2_timems)):
imu2.append({ 't': imu2_timems[i][1]*1.0E-3, 'x': imu2_accx[i][1], 'y': imu2_accy[i][1], 'z': imu2_accz[i][1]})
imu1.sort(key=lambda x: x['t'])
imu2.sort(key=lambda x: x['t'])
imu2_index = 0
last_t = None
xdiff_filtered = 0
ydiff_filtered = 0
zdiff_filtered = 0
max_diff_filtered = 0
for i in range(len(imu1)):
#find closest imu2 value
t = imu1[i]['t']
dt = 0 if last_t is None else t-last_t
dt=min(dt,.1)
next_imu2 = None
for i in range(imu2_index,len(imu2)):
next_imu2 = imu2[i]
imu2_index=i
if next_imu2['t'] >= t:
break
prev_imu2 = imu2[imu2_index-1]
closest_imu2 = next_imu2 if abs(next_imu2['t']-t)<abs(prev_imu2['t']-t) else prev_imu2
xdiff = imu1[i]['x']-closest_imu2['x']
ydiff = imu1[i]['y']-closest_imu2['y']
zdiff = imu1[i]['z']-closest_imu2['z']
xdiff_filtered += (xdiff-xdiff_filtered)*dt/filter_tc
ydiff_filtered += (ydiff-ydiff_filtered)*dt/filter_tc
zdiff_filtered += (zdiff-zdiff_filtered)*dt/filter_tc
diff_filtered = sqrt(xdiff_filtered**2+ydiff_filtered**2+zdiff_filtered**2)
max_diff_filtered = max(max_diff_filtered,diff_filtered)
#print max_diff_filtered
last_t = t
if max_diff_filtered > fail_threshold:
self.result.statusMessage = "Check vibration or accelerometer calibration. (Mismatch: %.2f, WARN: %.2f, FAIL: %.2f)" % (max_diff_filtered,warn_threshold,fail_threshold)
self.result.status = TestResult.StatusType.FAIL
elif max_diff_filtered > warn_threshold:
self.result.statusMessage = "Check vibration or accelerometer calibration. (Mismatch: %.2f, WARN: %.2f, FAIL: %.2f)" % (max_diff_filtered,warn_threshold,fail_threshold)
self.result.status = TestResult.StatusType.WARN
else:
self.result.statusMessage = "(Mismatch: %.2f, WARN: %.2f, FAIL: %.2f)" % (max_diff_filtered,warn_threshold, fail_threshold)
| gpl-3.0 | -4,354,047,947,307,644,000 | 35.009524 | 180 | 0.565988 | false |
lwahlmeier/python-threadly | threadly/Futures.py | 2 | 4445 | """
Futures tools for threadly
"""
import threading
import time
class ListenableFuture(object):
"""
This class i used to make a Future that can have listeners and callbacks
added to it. Once setter(object) is called all listeners/callbacks are
also called. Callbacks will be given the set object, and .get() will
return said object.
"""
def __init__(self):
self.lock = threading.Condition()
self.settable = None
self.listeners = list()
self.callables = list()
def add_listener(self, listener, args=None, kwargs=None):
"""
Add a listener function to this ListenableFuture. Once set is called
on this future all listeners will be ran. Arguments for the listener
can be given if needed.
`listener` a callable that will be called when the future is completed
`args` tuple arguments that will be passed to the listener when called.
`kwargs` dict keyword arguments to be passed to the passed listener
when called.
"""
args = args or ()
kwargs = kwargs or {}
if self.settable is None:
self.listeners.append((listener, args, kwargs))
else:
listener(*args, **kwargs)
def add_callable(self, cable, args=None, kwargs=None):
"""
Add a callable function to this ListenableFuture. Once set is called
on this future all callables will be ran. This works the same as the
listener except the set object is passed as the first argument when
the callable is called. Arguments for the listener can be given if
needed.
`cable` a callable that will be called when the future is completed,
it must have at least 1 argument.
`args` tuple arguments that will be passed to the listener when called.
`kwargs` dict keyword arguments to be passed to the passed listener
when called.
"""
args = args or ()
kwargs = kwargs or {}
if self.settable is None:
self.callables.append((cable, args, kwargs))
else:
cable(self.settable, *args, **kwargs)
def get(self, timeout=2 ** 32):
"""
This is a blocking call that will return the set object once it is set.
`timeout` The max amount of time to wait for get (in seconds).
If this is reached a null is returned.
`returns` the set object. This can technically be anything so know
what your listening for.
"""
if self.settable is not None:
return self.settable
start = time.time()
try:
self.lock.acquire()
while self.settable is None and time.time() - start < timeout:
self.lock.wait(timeout - (time.time() - start))
return self.settable
finally:
self.lock.release()
def setter(self, obj):
"""
This is used to complete this future. Whatever thread sets this will
be used to call all listeners and callables for this future.
`obj` The object you want to set on this future
(usually use just True if you dont care)
"""
if self.settable is None:
self.settable = obj
self.lock.acquire()
self.lock.notify_all()
self.lock.release()
while len(self.listeners) > 0:
i = self.listeners.pop(0)
try:
i[0](*i[1], **i[2])
except Exception as exp:
print("Exception calling listener", i[0], exp)
while len(self.callables) > 0:
i = self.callables.pop(0)
try:
i[0](self.settable, *i[1], **i[2])
except Exception as exp:
print("Exception calling listener", i[0], exp)
else:
raise Exception("Already Set!")
def future_job(future, job):
"""
This is a simple helper function used to wrap a task on the Scheduler
in a future. Once the job runs the future will complete.
`future` The future that will be completed once the job finishes.
`job` The job to run before completing the future.
"""
try:
job[0](*job[1], **job[2])
future.setter(True)
except Exception as exp:
print("Error running futureJob:", exp)
future.setter(False)
| unlicense | 4,335,050,251,291,903,000 | 34.56 | 79 | 0.589876 | false |
ftrader-bitcoinabc/bitcoin-abc | test/functional/abc-finalize-block.py | 1 | 13578 | #!/usr/bin/env python3
# Copyright (c) 2018 The Bitcoin developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the finalizeblock RPC calls."""
import time
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
assert_raises_rpc_error,
set_node_times,
wait_until,
)
RPC_FINALIZE_INVALID_BLOCK_ERROR = 'finalize-invalid-block'
RPC_FORK_PRIOR_FINALIZED_ERROR = 'bad-fork-prior-finalized'
RPC_BLOCK_NOT_FOUND_ERROR = 'Block not found'
class FinalizeBlockTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 3
self.extra_args = [["-finalizationdelay=0"],
["-finalizationdelay=0"], []]
self.finalization_delay = 2 * 60 * 60
def run_test(self):
node = self.nodes[0]
self.mocktime = int(time.time())
self.log.info("Test block finalization...")
node.generatetoaddress(10, node.get_deterministic_priv_key().address)
tip = node.getbestblockhash()
node.finalizeblock(tip)
assert_equal(node.getbestblockhash(), tip)
assert_equal(node.getfinalizedblockhash(), tip)
def wait_for_tip(node, tip):
def check_tip():
return node.getbestblockhash() == tip
wait_until(check_tip)
alt_node = self.nodes[1]
wait_for_tip(alt_node, tip)
alt_node.invalidateblock(tip)
# We will use this later
fork_block = alt_node.getbestblockhash()
# Node 0 should not accept the whole alt_node's chain due to tip being finalized,
# even though it is longer.
# Headers would not be accepted if previousblock is invalid:
# - First block from alt node has same height than node tip, but is on a minority chain. Its
# status is "valid-headers"
# - Second block from alt node has height > node tip height, will be marked as invalid because
# node tip is finalized
# - Later blocks from alt node will be rejected because their previous block are invalid
#
# Expected state:
#
# On alt_node:
# >(210)->(211)-> // ->(218 tip)
# /
# (200)->(201)-> // ->(209)->(210 invalid)
#
# On node:
# >(210 valid-headers)->(211 invalid)->(212 to 218 dropped)
# /
# (200)->(201)-> // ->(209)->(210 finalized, tip)
def wait_for_block(node, block, status="invalid"):
def check_block():
for tip in node.getchaintips():
if tip["hash"] == block:
assert tip["status"] != "active"
return tip["status"] == status
return False
wait_until(check_block)
# First block header is accepted as valid-header
alt_node.generatetoaddress(
1, alt_node.get_deterministic_priv_key().address)
wait_for_block(node, alt_node.getbestblockhash(), "valid-headers")
# Second block header is accepted but set invalid
alt_node.generatetoaddress(
1, alt_node.get_deterministic_priv_key().address)
invalid_block = alt_node.getbestblockhash()
wait_for_block(node, invalid_block)
# Later block headers are rejected
for i in range(2, 9):
alt_node.generatetoaddress(
1, alt_node.get_deterministic_priv_key().address)
assert_raises_rpc_error(-5, RPC_BLOCK_NOT_FOUND_ERROR,
node.getblockheader, alt_node.getbestblockhash())
assert_equal(node.getbestblockhash(), tip)
assert_equal(node.getfinalizedblockhash(), tip)
self.log.info("Test that an invalid block cannot be finalized...")
assert_raises_rpc_error(-20, RPC_FINALIZE_INVALID_BLOCK_ERROR,
node.finalizeblock, invalid_block)
self.log.info(
"Test that invalidating a finalized block moves the finalization backward...")
# Node's finalized block will be invalidated, which causes the finalized block to
# move to the previous block.
#
# Expected state:
#
# On alt_node:
# >(210)->(211)-> // ->(218 tip)
# /
# (200)->(201)-> // ->(208 auto-finalized)->(209)->(210 invalid)
#
# On node:
# >(210 valid-headers)->(211 invalid)->(212 to 218 dropped)
# /
# (200)->(201)-> // ->(209 finalized)->(210 tip)
node.invalidateblock(tip)
node.reconsiderblock(tip)
assert_equal(node.getbestblockhash(), tip)
assert_equal(node.getfinalizedblockhash(), fork_block)
assert_equal(alt_node.getfinalizedblockhash(), node.getblockheader(
node.getfinalizedblockhash())['previousblockhash'])
# The node will now accept that chain as the finalized block moved back.
# Generate a new block on alt_node to trigger getheader from node
# Previous 212-218 height blocks have been droped because their previous was invalid
#
# Expected state:
#
# On alt_node:
# >(210)->(211)-> // ->(218)->(219 tip)
# /
# (200)->(201)-> // ->(209 auto-finalized)->(210 invalid)
#
# On node:
# >(210)->(211)->(212)-> // ->(218)->(219 tip)
# /
# (200)->(201)-> // ->(209 finalized)->(210)
node.reconsiderblock(invalid_block)
alt_node_tip = alt_node.generatetoaddress(
1, alt_node.get_deterministic_priv_key().address)[-1]
wait_for_tip(node, alt_node_tip)
assert_equal(node.getbestblockhash(), alt_node.getbestblockhash())
assert_equal(node.getfinalizedblockhash(), fork_block)
assert_equal(alt_node.getfinalizedblockhash(), fork_block)
self.log.info("Trigger reorg via block finalization...")
# Finalize node tip to reorg
#
# Expected state:
#
# On alt_node:
# >(210)->(211)-> // ->(218)->(219 tip)
# /
# (200)->(201)-> // ->(209 auto-finalized)->(210 invalid)
#
# On node:
# >(210 invalid)-> // ->(219 invalid)
# /
# (200)->(201)-> // ->(209)->(210 finalized, tip)
node.finalizeblock(tip)
assert_equal(node.getfinalizedblockhash(), tip)
self.log.info("Try to finalize a block on a competiting fork...")
assert_raises_rpc_error(-20, RPC_FINALIZE_INVALID_BLOCK_ERROR,
node.finalizeblock, alt_node.getbestblockhash())
assert_equal(node.getfinalizedblockhash(), tip)
self.log.info(
"Check auto-finalization occurs as the tip move forward...")
# Reconsider alt_node tip then generate some more blocks on alt_node.
# Auto-finalization will occur on both chains.
#
# Expected state:
#
# On alt_node:
# >(210)->(211)-> // ->(219 auto-finalized)-> // ->(229 tip)
# /
# (200)->(201)-> // ->(209)->(210 invalid)
#
# On node:
# >(210)->(211)-> // ->(219 auto-finalized)-> // ->(229 tip)
# /
# (200)->(201)-> // ->(209)->(210 invalid)
node.reconsiderblock(alt_node.getbestblockhash())
block_to_autofinalize = alt_node.generatetoaddress(
1, alt_node.get_deterministic_priv_key().address)[-1]
alt_node_new_tip = alt_node.generatetoaddress(
9, alt_node.get_deterministic_priv_key().address)[-1]
wait_for_tip(node, alt_node_new_tip)
assert_equal(node.getbestblockhash(), alt_node.getbestblockhash())
assert_equal(node.getfinalizedblockhash(), alt_node_tip)
assert_equal(alt_node.getfinalizedblockhash(), alt_node_tip)
self.log.info(
"Try to finalize a block on an already finalized chain...")
# Finalizing a block of an already finalized chain should have no
# effect
block_218 = node.getblockheader(alt_node_tip)['previousblockhash']
node.finalizeblock(block_218)
assert_equal(node.getfinalizedblockhash(), alt_node_tip)
self.log.info(
"Make sure reconsidering block move the finalization point...")
# Reconsidering the tip will move back the finalized block on node
#
# Expected state:
#
# On alt_node:
# >(210)->(211)-> // ->(219 auto-finalized)-> // ->(229 tip)
# /
# (200)->(201)-> // ->(209)->(210 invalid)
#
# On node:
# >(210)->(211)-> // ->(219)-> // ->(229 tip)
# /
# (200)->(201)-> // ->(209 finalized)->(210)
node.reconsiderblock(tip)
assert_equal(node.getbestblockhash(), alt_node_new_tip)
assert_equal(node.getfinalizedblockhash(), fork_block)
# TEST FINALIZATION DELAY
self.log.info("Check that finalization delay prevents eclipse attacks")
# Because there has been no delay since the beginning of this test,
# there should have been no auto-finalization on delay_node.
#
# Expected state:
#
# On alt_node:
# >(210)->(211)-> // ->(219 auto-finalized)-> // ->(229 tip)
# /
# (200)->(201)-> // ->(209)->(210 invalid)
#
# On delay_node:
# >(210)->(211)-> // ->(219)-> // ->(229 tip)
# /
# (200)->(201)-> // ->(209)->(210)
delay_node = self.nodes[2]
wait_for_tip(delay_node, alt_node_new_tip)
assert_equal(delay_node.getfinalizedblockhash(), str())
self.log.info(
"Check that finalization delay does not prevent auto-finalization")
# Expire the delay, then generate 1 new block with alt_node to
# update the tip on all chains.
# Because the finalization delay is expired, auto-finalization
# should occur.
#
# Expected state:
#
# On alt_node:
# >(220 auto-finalized)-> // ->(230 tip)
# /
# (200)->(201)-> // ->(209)->(210 invalid)
#
# On delay_node:
# >(220 auto-finalized)-> // ->(230 tip)
# /
# (200)->(201)-> // ->(209)->(210)
self.mocktime += self.finalization_delay
set_node_times([delay_node], self.mocktime)
new_tip = alt_node.generatetoaddress(
1, alt_node.get_deterministic_priv_key().address)[-1]
wait_for_tip(delay_node, new_tip)
assert_equal(alt_node.getbestblockhash(), new_tip)
assert_equal(node.getfinalizedblockhash(), block_to_autofinalize)
assert_equal(alt_node.getfinalizedblockhash(), block_to_autofinalize)
self.log.info(
"Check that finalization delay is effective on node boot")
# Restart the new node, so the blocks have no header received time.
self.restart_node(2)
# There should be no finalized block (getfinalizedblockhash returns an
# empty string)
assert_equal(delay_node.getfinalizedblockhash(), str())
# Generate 20 blocks with no delay. This should not trigger auto-finalization.
#
# Expected state:
#
# On delay_node:
# >(220)-> // ->(250 tip)
# /
# (200)->(201)-> // ->(209)->(210)
blocks = delay_node.generatetoaddress(
20, alt_node.get_deterministic_priv_key().address)
reboot_autofinalized_block = blocks[10]
new_tip = blocks[-1]
wait_for_tip(delay_node, new_tip)
assert_equal(delay_node.getfinalizedblockhash(), str())
# Now let the finalization delay to expire, then generate one more block.
# This should resume auto-finalization.
#
# Expected state:
#
# On delay_node:
# >(220)-> // ->(241 auto-finalized)-> // ->(251 tip)
# /
# (200)->(201)-> // ->(209)->(210)
self.mocktime += self.finalization_delay
set_node_times([delay_node], self.mocktime)
new_tip = delay_node.generatetoaddress(
1, delay_node.get_deterministic_priv_key().address)[-1]
wait_for_tip(delay_node, new_tip)
assert_equal(delay_node.getfinalizedblockhash(),
reboot_autofinalized_block)
if __name__ == '__main__':
FinalizeBlockTest().main()
| mit | -5,691,586,649,551,708,000 | 40.145455 | 105 | 0.523494 | false |
jcoady9/python-for-android | python-modules/twisted/twisted/words/protocols/jabber/jid.py | 54 | 7167 | # -*- test-case-name: twisted.words.test.test_jabberjid -*-
#
# Copyright (c) 2001-2008 Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Jabber Identifier support.
This module provides an object to represent Jabber Identifiers (JIDs) and
parse string representations into them with proper checking for illegal
characters, case folding and canonicalisation through L{stringprep<twisted.words.protocols.jabber.xmpp_stringprep>}.
"""
from twisted.words.protocols.jabber.xmpp_stringprep import nodeprep, resourceprep, nameprep
class InvalidFormat(Exception):
"""
The given string could not be parsed into a valid Jabber Identifier (JID).
"""
def parse(jidstring):
"""
Parse given JID string into its respective parts and apply stringprep.
@param jidstring: string representation of a JID.
@type jidstring: C{unicode}
@return: tuple of (user, host, resource), each of type C{unicode} as
the parsed and stringprep'd parts of the given JID. If the
given string did not have a user or resource part, the respective
field in the tuple will hold C{None}.
@rtype: C{tuple}
"""
user = None
host = None
resource = None
# Search for delimiters
user_sep = jidstring.find("@")
res_sep = jidstring.find("/")
if user_sep == -1:
if res_sep == -1:
# host
host = jidstring
else:
# host/resource
host = jidstring[0:res_sep]
resource = jidstring[res_sep + 1:] or None
else:
if res_sep == -1:
# user@host
user = jidstring[0:user_sep] or None
host = jidstring[user_sep + 1:]
else:
if user_sep < res_sep:
# user@host/resource
user = jidstring[0:user_sep] or None
host = jidstring[user_sep + 1:user_sep + (res_sep - user_sep)]
resource = jidstring[res_sep + 1:] or None
else:
# host/resource (with an @ in resource)
host = jidstring[0:res_sep]
resource = jidstring[res_sep + 1:] or None
return prep(user, host, resource)
def prep(user, host, resource):
"""
Perform stringprep on all JID fragments.
@param user: The user part of the JID.
@type user: C{unicode}
@param host: The host part of the JID.
@type host: C{unicode}
@param resource: The resource part of the JID.
@type resource: C{unicode}
@return: The given parts with stringprep applied.
@rtype: C{tuple}
"""
if user:
try:
user = nodeprep.prepare(unicode(user))
except UnicodeError:
raise InvalidFormat, "Invalid character in username"
else:
user = None
if not host:
raise InvalidFormat, "Server address required."
else:
try:
host = nameprep.prepare(unicode(host))
except UnicodeError:
raise InvalidFormat, "Invalid character in hostname"
if resource:
try:
resource = resourceprep.prepare(unicode(resource))
except UnicodeError:
raise InvalidFormat, "Invalid character in resource"
else:
resource = None
return (user, host, resource)
__internJIDs = {}
def internJID(jidstring):
"""
Return interned JID.
@rtype: L{JID}
"""
if jidstring in __internJIDs:
return __internJIDs[jidstring]
else:
j = JID(jidstring)
__internJIDs[jidstring] = j
return j
class JID(object):
"""
Represents a stringprep'd Jabber ID.
JID objects are hashable so they can be used in sets and as keys in
dictionaries.
"""
def __init__(self, str=None, tuple=None):
if not (str or tuple):
raise RuntimeError("You must provide a value for either 'str' or "
"'tuple' arguments.")
if str:
user, host, res = parse(str)
else:
user, host, res = prep(*tuple)
self.user = user
self.host = host
self.resource = res
def userhost(self):
"""
Extract the bare JID as a unicode string.
A bare JID does not have a resource part, so this returns either
C{user@host} or just C{host}.
@rtype: C{unicode}
"""
if self.user:
return u"%s@%s" % (self.user, self.host)
else:
return self.host
def userhostJID(self):
"""
Extract the bare JID.
A bare JID does not have a resource part, so this returns a
L{JID} object representing either C{user@host} or just C{host}.
If the object this method is called upon doesn't have a resource
set, it will return itself. Otherwise, the bare JID object will
be created, interned using L{internJID}.
@rtype: L{JID}
"""
if self.resource:
return internJID(self.userhost())
else:
return self
def full(self):
"""
Return the string representation of this JID.
@rtype: C{unicode}
"""
if self.user:
if self.resource:
return u"%s@%s/%s" % (self.user, self.host, self.resource)
else:
return u"%s@%s" % (self.user, self.host)
else:
if self.resource:
return u"%s/%s" % (self.host, self.resource)
else:
return self.host
def __eq__(self, other):
"""
Equality comparison.
L{JID}s compare equal if their user, host and resource parts all
compare equal. When comparing against instances of other types, it
uses the default comparison.
"""
if isinstance(other, JID):
return (self.user == other.user and
self.host == other.host and
self.resource == other.resource)
else:
return NotImplemented
def __ne__(self, other):
"""
Inequality comparison.
This negates L{__eq__} for comparison with JIDs and uses the default
comparison for other types.
"""
result = self.__eq__(other)
if result is NotImplemented:
return result
else:
return not result
def __hash__(self):
"""
Calculate hash.
L{JID}s with identical constituent user, host and resource parts have
equal hash values. In combination with the comparison defined on JIDs,
this allows for using L{JID}s in sets and as dictionary keys.
"""
return hash((self.user, self.host, self.resource))
def __unicode__(self):
"""
Get unicode representation.
Return the string representation of this JID as a unicode string.
@see: L{full}
"""
return self.full()
def __repr__(self):
"""
Get object representation.
Returns a string that would create a new JID object that compares equal
to this one.
"""
return 'JID(%r)' % self.full()
| apache-2.0 | 569,233,239,388,996,000 | 27.783133 | 116 | 0.572345 | false |
HyperBaton/ansible | lib/ansible/plugins/doc_fragments/nxos.py | 44 | 5396 | # -*- coding: utf-8 -*-
# Copyright: (c) 2015, Peter Sprygada <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
class ModuleDocFragment(object):
# Standard files documentation fragment
DOCUMENTATION = r'''
options:
provider:
description:
- B(Deprecated)
- "Starting with Ansible 2.5 we recommend using C(connection: network_cli)."
- This option is only required if you are using NX-API.
- For more information please see the L(NXOS Platform Options guide, ../network/user_guide/platform_nxos.html).
- HORIZONTALLINE
- A dict object containing connection details.
type: dict
suboptions:
host:
description:
- Specifies the DNS host name or address for connecting to the remote
device over the specified transport. The value of host is used as
the destination address for the transport.
type: str
required: true
port:
description:
- Specifies the port to use when building the connection to the remote
device. This value applies to either I(cli) or I(nxapi). The port
value will default to the appropriate transport common port if
none is provided in the task. (cli=22, http=80, https=443).
type: int
default: 0 (use common port)
username:
description:
- Configures the username to use to authenticate the connection to
the remote device. This value is used to authenticate
either the CLI login or the nxapi authentication depending on which
transport is used. If the value is not specified in the task, the
value of environment variable C(ANSIBLE_NET_USERNAME) will be used instead.
type: str
password:
description:
- Specifies the password to use to authenticate the connection to
the remote device. This is a common argument used for either I(cli)
or I(nxapi) transports. If the value is not specified in the task, the
value of environment variable C(ANSIBLE_NET_PASSWORD) will be used instead.
type: str
authorize:
description:
- Instructs the module to enter privileged mode on the remote device
before sending any commands. If not specified, the device will
attempt to execute all commands in non-privileged mode. If the value
is not specified in the task, the value of environment variable
C(ANSIBLE_NET_AUTHORIZE) will be used instead.
type: bool
default: no
version_added: '2.5.3'
auth_pass:
description:
- Specifies the password to use if required to enter privileged mode
on the remote device. If I(authorize) is false, then this argument
does nothing. If the value is not specified in the task, the value of
environment variable C(ANSIBLE_NET_AUTH_PASS) will be used instead.
type: str
version_added: '2.5.3'
timeout:
description:
- Specifies the timeout in seconds for communicating with the network device
for either connecting or sending commands. If the timeout is
exceeded before the operation is completed, the module will error.
NX-API can be slow to return on long-running commands (sh mac, sh bgp, etc).
type: int
default: 10
version_added: '2.3'
ssh_keyfile:
description:
- Specifies the SSH key to use to authenticate the connection to
the remote device. This argument is only used for the I(cli)
transport. If the value is not specified in the task, the
value of environment variable C(ANSIBLE_NET_SSH_KEYFILE) will be used instead.
type: str
transport:
description:
- Configures the transport connection to use when connecting to the
remote device. The transport argument supports connectivity to the
device over cli (ssh) or nxapi.
type: str
required: true
choices: [ cli, nxapi ]
default: cli
use_ssl:
description:
- Configures the I(transport) to use SSL if set to C(yes) only when the
C(transport=nxapi), otherwise this value is ignored.
type: bool
default: no
validate_certs:
description:
- If C(no), SSL certificates will not be validated. This should only be used
on personally controlled sites using self-signed certificates. If the transport
argument is not nxapi, this value is ignored.
type: bool
default: yes
use_proxy:
description:
- If C(no), the environment variables C(http_proxy) and C(https_proxy) will be ignored.
type: bool
default: yes
version_added: "2.5"
notes:
- For information on using CLI and NX-API see the :ref:`NXOS Platform Options guide <nxos_platform_options>`
- For more information on using Ansible to manage network devices see the :ref:`Ansible Network Guide <network_guide>`
- For more information on using Ansible to manage Cisco devices see the `Cisco integration page <https://www.ansible.com/integrations/networks/cisco>`_.
'''
| gpl-3.0 | -7,850,867,660,753,214,000 | 44.344538 | 154 | 0.645849 | false |
evensonbryan/yocto-autobuilder | lib/python2.7/site-packages/buildbot-0.8.8-py2.7.egg/buildbot/status/progress.py | 4 | 11969 | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from twisted.internet import reactor
from twisted.spread import pb
from twisted.python import log
from buildbot import util
from collections import defaultdict
class StepProgress:
"""I keep track of how much progress a single BuildStep has made.
Progress is measured along various axes. Time consumed is one that is
available for all steps. Amount of command output is another, and may be
better quantified by scanning the output for markers to derive number of
files compiled, directories walked, tests run, etc.
I am created when the build begins, and given to a BuildProgress object
so it can track the overall progress of the whole build.
"""
startTime = None
stopTime = None
expectedTime = None
buildProgress = None
debug = False
def __init__(self, name, metricNames):
self.name = name
self.progress = {}
self.expectations = {}
for m in metricNames:
self.progress[m] = None
self.expectations[m] = None
def setBuildProgress(self, bp):
self.buildProgress = bp
def setExpectations(self, metrics):
"""The step can call this to explicitly set a target value for one
of its metrics. E.g., ShellCommands knows how many commands it will
execute, so it could set the 'commands' expectation."""
for metric, value in metrics.items():
self.expectations[metric] = value
self.buildProgress.newExpectations()
def setExpectedTime(self, seconds):
self.expectedTime = seconds
self.buildProgress.newExpectations()
def start(self):
if self.debug: print "StepProgress.start[%s]" % self.name
self.startTime = util.now()
def setProgress(self, metric, value):
"""The step calls this as progress is made along various axes."""
if self.debug:
print "setProgress[%s][%s] = %s" % (self.name, metric, value)
self.progress[metric] = value
if self.debug:
r = self.remaining()
print " step remaining:", r
self.buildProgress.newProgress()
def finish(self):
"""This stops the 'time' metric and marks the step as finished
overall. It should be called after the last .setProgress has been
done for each axis."""
if self.debug: print "StepProgress.finish[%s]" % self.name
self.stopTime = util.now()
self.buildProgress.stepFinished(self.name)
def totalTime(self):
if self.startTime != None and self.stopTime != None:
return self.stopTime - self.startTime
def remaining(self):
if self.startTime == None:
return self.expectedTime
if self.stopTime != None:
return 0 # already finished
# TODO: replace this with cleverness that graphs each metric vs.
# time, then finds the inverse function. Will probably need to save
# a timestamp with each setProgress update, when finished, go back
# and find the 2% transition points, then save those 50 values in a
# list. On the next build, do linear interpolation between the two
# closest samples to come up with a percentage represented by that
# metric.
# TODO: If no other metrics are available, just go with elapsed
# time. Given the non-time-uniformity of text output from most
# steps, this would probably be better than the text-percentage
# scheme currently implemented.
percentages = []
for metric, value in self.progress.items():
expectation = self.expectations[metric]
if value != None and expectation != None:
p = 1.0 * value / expectation
percentages.append(p)
if percentages:
avg = reduce(lambda x,y: x+y, percentages) / len(percentages)
if avg > 1.0:
# overdue
avg = 1.0
if avg < 0.0:
avg = 0.0
if percentages and self.expectedTime != None:
return self.expectedTime - (avg * self.expectedTime)
if self.expectedTime is not None:
# fall back to pure time
return self.expectedTime - (util.now() - self.startTime)
return None # no idea
class WatcherState:
def __init__(self, interval):
self.interval = interval
self.timer = None
self.needUpdate = 0
class BuildProgress(pb.Referenceable):
"""I keep track of overall build progress. I hold a list of StepProgress
objects.
"""
def __init__(self, stepProgresses):
self.steps = {}
for s in stepProgresses:
self.steps[s.name] = s
s.setBuildProgress(self)
self.finishedSteps = []
self.watchers = {}
self.debug = 0
def setExpectationsFrom(self, exp):
"""Set our expectations from the builder's Expectations object."""
for name, metrics in exp.steps.items():
s = self.steps.get(name)
if s:
s.setExpectedTime(exp.times[name])
s.setExpectations(exp.steps[name])
def newExpectations(self):
"""Call this when one of the steps has changed its expectations.
This should trigger us to update our ETA value and notify any
subscribers."""
pass # subscribers are not implemented: they just poll
def stepFinished(self, stepname):
assert(stepname not in self.finishedSteps)
self.finishedSteps.append(stepname)
if len(self.finishedSteps) == len(self.steps.keys()):
self.sendLastUpdates()
def newProgress(self):
r = self.remaining()
if self.debug:
print " remaining:", r
if r != None:
self.sendAllUpdates()
def remaining(self):
# sum eta of all steps
sum = 0
for name, step in self.steps.items():
rem = step.remaining()
if rem == None:
return None # not sure
sum += rem
return sum
def eta(self):
left = self.remaining()
if left == None:
return None # not sure
done = util.now() + left
return done
def remote_subscribe(self, remote, interval=5):
# [interval, timer, needUpdate]
# don't send an update more than once per interval
self.watchers[remote] = WatcherState(interval)
remote.notifyOnDisconnect(self.removeWatcher)
self.updateWatcher(remote)
self.startTimer(remote)
log.msg("BuildProgress.remote_subscribe(%s)" % remote)
def remote_unsubscribe(self, remote):
# TODO: this doesn't work. I think 'remote' will always be different
# than the object that appeared in _subscribe.
log.msg("BuildProgress.remote_unsubscribe(%s)" % remote)
self.removeWatcher(remote)
#remote.dontNotifyOnDisconnect(self.removeWatcher)
def removeWatcher(self, remote):
#log.msg("removeWatcher(%s)" % remote)
try:
timer = self.watchers[remote].timer
if timer:
timer.cancel()
del self.watchers[remote]
except KeyError:
log.msg("Weird, removeWatcher on non-existent subscriber:",
remote)
def sendAllUpdates(self):
for r in self.watchers.keys():
self.updateWatcher(r)
def updateWatcher(self, remote):
# an update wants to go to this watcher. Send it if we can, otherwise
# queue it for later
w = self.watchers[remote]
if not w.timer:
# no timer, so send update now and start the timer
self.sendUpdate(remote)
self.startTimer(remote)
else:
# timer is running, just mark as needing an update
w.needUpdate = 1
def startTimer(self, remote):
w = self.watchers[remote]
timer = reactor.callLater(w.interval, self.watcherTimeout, remote)
w.timer = timer
def sendUpdate(self, remote, last=0):
self.watchers[remote].needUpdate = 0
#text = self.asText() # TODO: not text, duh
try:
remote.callRemote("progress", self.remaining())
if last:
remote.callRemote("finished", self)
except:
log.deferr()
self.removeWatcher(remote)
def watcherTimeout(self, remote):
w = self.watchers.get(remote, None)
if not w:
return # went away
w.timer = None
if w.needUpdate:
self.sendUpdate(remote)
self.startTimer(remote)
def sendLastUpdates(self):
for remote in self.watchers.keys():
self.sendUpdate(remote, 1)
self.removeWatcher(remote)
class Expectations:
debug = False
# decay=1.0 ignores all but the last build
# 0.9 is short time constant. 0.1 is very long time constant
# TODO: let decay be specified per-metric
decay = 0.5
def __init__(self, buildprogress):
"""Create us from a successful build. We will expect each step to
take as long as it did in that build."""
# .steps maps stepname to dict2
# dict2 maps metricname to final end-of-step value
self.steps = defaultdict(dict)
# .times maps stepname to per-step elapsed time
self.times = {}
for name, step in buildprogress.steps.items():
self.steps[name] = {}
for metric, value in step.progress.items():
self.steps[name][metric] = value
self.times[name] = None
if step.startTime is not None and step.stopTime is not None:
self.times[name] = step.stopTime - step.startTime
def wavg(self, old, current):
if old is None:
return current
if current is None:
return old
else:
return (current * self.decay) + (old * (1 - self.decay))
def update(self, buildprogress):
for name, stepprogress in buildprogress.steps.items():
old = self.times.get(name)
current = stepprogress.totalTime()
if current == None:
log.msg("Expectations.update: current[%s] was None!" % name)
continue
new = self.wavg(old, current)
self.times[name] = new
if self.debug:
print "new expected time[%s] = %s, old %s, cur %s" % \
(name, new, old, current)
for metric, current in stepprogress.progress.items():
old = self.steps[name].get(metric)
new = self.wavg(old, current)
if self.debug:
print "new expectation[%s][%s] = %s, old %s, cur %s" % \
(name, metric, new, old, current)
self.steps[name][metric] = new
def expectedBuildTime(self):
if None in self.times.values():
return None
#return sum(self.times.values())
# python-2.2 doesn't have 'sum'. TODO: drop python-2.2 support
s = 0
for v in self.times.values():
s += v
return s
| gpl-2.0 | -3,542,692,039,136,569,000 | 35.941358 | 79 | 0.601053 | false |
dxj19831029/keras | tests/manual/check_constraints.py | 86 | 2841 | from __future__ import absolute_import
from __future__ import print_function
import keras
from keras.datasets import mnist
import keras.models
from keras.models import Sequential
from keras.layers.core import Dense, Dropout, Activation
from keras.regularizers import l2, l1
from keras.constraints import maxnorm, nonneg
from keras.optimizers import SGD, Adam, RMSprop
from keras.utils import np_utils, generic_utils
import theano
import theano.tensor as T
import numpy as np
import scipy
batch_size = 100
nb_classes = 10
nb_epoch = 10
# the data, shuffled and split between tran and test sets
(X_train, y_train), (X_test, y_test) = mnist.load_data()
X_train=X_train.reshape(60000,784)
X_test=X_test.reshape(10000,784)
X_train = X_train.astype("float32")
X_test = X_test.astype("float32")
X_train /= 255
X_test /= 255
# convert class vectors to binary class matrices
Y_train = np_utils.to_categorical(y_train, nb_classes)
Y_test = np_utils.to_categorical(y_test, nb_classes)
model = Sequential()
model.add(Dense(784, 20, W_constraint=maxnorm(1)))
model.add(Activation('relu'))
model.add(Dropout(0.1))
model.add(Dense(20, 20, W_constraint=nonneg()))
model.add(Activation('relu'))
model.add(Dropout(0.1))
model.add(Dense(20, 10, W_constraint=maxnorm(1)))
model.add(Activation('softmax'))
rms = RMSprop()
model.compile(loss='categorical_crossentropy', optimizer=rms)
model.fit(X_train, Y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=True, verbose=0)
a=model.params[0].eval()
if np.isclose(np.max(np.sqrt(np.sum(a**2, axis=0))),1):
print('Maxnorm test passed')
else:
raise ValueError('Maxnorm test failed!')
b=model.params[2].eval()
if np.min(b)==0 and np.min(a)!=0:
print('Nonneg test passed')
else:
raise ValueError('Nonneg test failed!')
model = Sequential()
model.add(Dense(784, 20))
model.add(Activation('relu'))
model.add(Dense(20, 20, W_regularizer=l1(.01)))
model.add(Activation('relu'))
model.add(Dense(20, 10))
model.add(Activation('softmax'))
rms = RMSprop()
model.compile(loss='categorical_crossentropy', optimizer=rms)
model.fit(X_train, Y_train, batch_size=batch_size, nb_epoch=20, show_accuracy=True, verbose=0)
a=model.params[2].eval().reshape(400)
(D, p1) = scipy.stats.kurtosistest(a)
model = Sequential()
model.add(Dense(784, 20))
model.add(Activation('relu'))
model.add(Dense(20, 20, W_regularizer=l2(.01)))
model.add(Activation('relu'))
model.add(Dense(20, 10))
model.add(Activation('softmax'))
rms = RMSprop()
model.compile(loss='categorical_crossentropy', optimizer=rms)
model.fit(X_train, Y_train, batch_size=batch_size, nb_epoch=20, show_accuracy=True, verbose=0)
a=model.params[2].eval().reshape(400)
(D, p2) = scipy.stats.kurtosistest(a)
if p1<.01 and p2>.01:
print('L1 and L2 regularization tests passed')
else:
raise ValueError('L1 and L2 regularization tests failed!') | mit | 2,906,159,712,180,073,500 | 27.42 | 100 | 0.734952 | false |
archesproject/arches | arches/app/models/migrations/0001_initial.py | 1 | 36390 | # -*- coding: utf-8 -*-
import os
import uuid
import codecs
import django.contrib.gis.db.models.fields
from django.core import management
from django.contrib.postgres.fields import JSONField
from django.db import migrations, models
from arches.db.migration_operations.extras import CreateExtension, CreateAutoPopulateUUIDField, CreateFunction
from arches.app.models.system_settings import settings
def get_sql_string_from_file(pathtofile):
ret = []
with codecs.open(pathtofile, encoding="utf-8") as f:
ret = f.read()
# print sqlparse.split(sqlparse.format(ret,strip_comments=True))
# for stmt in sqlparse.split(sqlparse.format(f.read(),strip_comments=True)):
# if stmt.strip() != '':
# ret.append(stmt)
return ret
def forwards_func(apps, schema_editor):
# We get the model from the versioned app registry;
# if we directly import it, it'll be the wrong version
pass
def reverse_func(apps, schema_editor):
Ontology = apps.get_model("models", "Ontology")
Ontology.objects.filter(version="6.2").delete()
# a work around for not being able to create permissions during an initial migration
# from https://code.djangoproject.com/ticket/23422#comment:6
def make_permissions(apps, schema_editor, with_create_permissions=True):
db_alias = schema_editor.connection.alias
Group = apps.get_model("auth", "Group")
User = apps.get_model("auth", "User")
Permission = apps.get_model("auth", "Permission")
try:
read_nodegroup = Permission.objects.using(db_alias).get(
codename="read_nodegroup", content_type__app_label="models", content_type__model="nodegroup"
)
write_nodegroup = Permission.objects.using(db_alias).get(
codename="write_nodegroup", content_type__app_label="models", content_type__model="nodegroup"
)
delete_nodegroup = Permission.objects.using(db_alias).get(
codename="delete_nodegroup", content_type__app_label="models", content_type__model="nodegroup"
)
except Permission.DoesNotExist:
if with_create_permissions:
# Manually run create_permissions
from django.contrib.auth.management import create_permissions
assert not getattr(apps, "models_module", None)
model_app = apps.get_app_config("models")
model_app.models_module = True
create_permissions(model_app, verbosity=0)
model_app.models_module = None
return make_permissions(apps, schema_editor, with_create_permissions=False)
else:
raise
graph_editor_group = Group.objects.using(db_alias).create(name="Graph Editor")
graph_editor_group.permissions.add(read_nodegroup, write_nodegroup, delete_nodegroup)
resource_editor_group = Group.objects.using(db_alias).create(name="Resource Editor")
rdm_admin_group = Group.objects.using(db_alias).create(name="RDM Administrator")
app_admin_group = Group.objects.using(db_alias).create(name="Application Administrator")
sys_admin_group = Group.objects.using(db_alias).create(name="System Administrator")
mobile_project_admin_group = Group.objects.using(db_alias).create(name="Mobile Project Administrator")
crowdsource_editor_group = Group.objects.using(db_alias).create(name="Crowdsource Editor")
guest_group = Group.objects.using(db_alias).create(name="Guest")
anonymous_user = User.objects.using(db_alias).get(username="anonymous")
anonymous_user.groups.add(guest_group)
admin_user = User.objects.using(db_alias).get(username="admin")
admin_user.groups.add(graph_editor_group)
admin_user.groups.add(resource_editor_group)
admin_user.groups.add(rdm_admin_group)
admin_user.groups.add(app_admin_group)
admin_user.groups.add(sys_admin_group)
admin_user.groups.add(mobile_project_admin_group)
admin_user.groups.add(crowdsource_editor_group)
admin_user.groups.add(guest_group)
class Migration(migrations.Migration):
dependencies = []
initial = True
operations = [
CreateExtension(name="uuid-ossp"),
CreateFunction(
name="insert_relation",
arguments=["p_label text", "p_relationtype text", "p_legacyid2 text"],
declarations=["v_conceptidfrom uuid = null;", "v_conceptidto uuid = null;"],
language="plpgsql",
body="""
v_conceptidfrom =
(select conceptid from concepts c
where trim(legacyoid) = trim(p_legacyid1));
v_conceptidto = (select conceptid from concepts c
where trim(legacyoid) = trim(p_legacyid2));
IF v_conceptidfrom is not null and v_conceptidto is not null and
v_conceptidto <> v_conceptidfrom and
v_conceptidfrom::text||v_conceptidto::text NOT IN (SELECT conceptidfrom::text||conceptidto::text FROM relations) then
INSERT INTO relations(relationid, conceptidfrom, conceptidto, relationtype) VALUES (uuid_generate_v1mc(), v_conceptidfrom, v_conceptidto, p_relationtype);
return 'success!';
ELSE return 'fail! no relation inserted.';
END IF;
""",
returntype="text",
),
CreateFunction(
name="get_conceptid",
arguments=["p_label text"],
declarations=["v_return text;",],
language="plpgsql",
body="""
v_return =
(select a.conceptid from concepts a, values b
where 1=1 and
b.valuetype = 'prefLabel' and
b.value = p_label and
b.conceptid = a.conceptid LIMIT 1);
return v_return;
""",
returntype="uuid",
),
CreateFunction(
name="insert_concept",
arguments=["p_label text", "p_note text", "p_languageid text", "p_legacyid text", "p_nodetype text"],
declarations=[
"v_conceptid uuid = public.uuid_generate_v1mc();",
"v_valueid uuid = public.uuid_generate_v1mc();",
"v_languageid text = p_languageid;",
],
language="plpgsql",
body="""
INSERT INTO concepts(conceptid, nodetype, legacyoid) VALUES (v_conceptid, p_nodetype, p_legacyid);
IF trim(p_label) is not null and p_label<>'' then
INSERT INTO values (valueid, conceptid, valuetype, value, languageid)
VALUES (v_valueid, v_conceptid, 'prefLabel', trim(initcap(p_label)), v_languageid);
END IF;
IF trim(p_note) is not null and p_note <> '' then
INSERT INTO values (valueid, conceptid, valuetype, value, languageid)
VALUES (v_valueid, v_conceptid, 'scopeNote', p_note, v_languageid);
END IF;
return v_conceptid;
""",
returntype="uuid",
),
migrations.CreateModel(
name="GraphModel",
fields=[
("graphid", models.UUIDField(default=uuid.uuid1, serialize=False, primary_key=True)),
("name", models.TextField(null=True, blank=True)),
("description", models.TextField(null=True, blank=True)),
("deploymentfile", models.TextField(null=True, blank=True)),
("author", models.TextField(null=True, blank=True)),
("deploymentdate", models.DateTimeField(null=True, blank=True)),
("version", models.TextField(null=True, blank=True)),
("isresource", models.BooleanField()),
("isactive", models.BooleanField()),
("iconclass", models.TextField(null=True, blank=True)),
("mapfeaturecolor", models.TextField(blank=True, null=True)),
("maplinewidth", models.IntegerField(blank=True, null=True)),
("mappointsize", models.IntegerField(blank=True, null=True)),
("subtitle", models.TextField(null=True, blank=True)),
],
options={"db_table": "graphs", "managed": True,},
),
migrations.CreateModel(name="Graph", fields=[], options={"proxy": True,}, bases=("models.GraphModel",),),
migrations.CreateModel(
name="CardModel",
fields=[
("cardid", models.UUIDField(default=uuid.uuid1, serialize=False, primary_key=True)),
("name", models.TextField(null=True, blank=True)),
("description", models.TextField(null=True, blank=True)),
("instructions", models.TextField(null=True, blank=True)),
("helpenabled", models.BooleanField(default=False)),
("helptitle", models.TextField(null=True, blank=True)),
("helptext", models.TextField(null=True, blank=True)),
("active", models.BooleanField(default=True)),
("visible", models.BooleanField(default=True)),
("sortorder", models.IntegerField(blank=True, null=True, default=None)),
],
options={"db_table": "cards", "managed": True,},
),
migrations.CreateModel(name="Card", fields=[], options={"proxy": True,}, bases=("models.CardModel",),),
migrations.CreateModel(
name="CardXNodeXWidget",
fields=[
("card", models.ForeignKey(to="models.CardModel", db_column="cardid", on_delete=models.CASCADE)),
("id", models.UUIDField(default=uuid.uuid1, primary_key=True, serialize=False)),
("config", JSONField(blank=True, db_column="config", null=True)),
("label", models.TextField(blank=True, null=True)),
("sortorder", models.IntegerField(blank=True, null=True, default=None)),
],
options={"db_table": "cards_x_nodes_x_widgets", "managed": True,},
),
migrations.CreateModel(
name="Concept",
fields=[
("conceptid", models.UUIDField(default=uuid.uuid1, primary_key=True, serialize=False)),
("legacyoid", models.TextField(unique=True)),
],
options={"db_table": "concepts", "managed": True,},
),
migrations.CreateModel(
name="DDataType",
fields=[
("datatype", models.TextField(primary_key=True, serialize=False)),
("iconclass", models.TextField()),
("modulename", models.TextField(blank=True, null=True)),
("classname", models.TextField(blank=True, null=True)),
("configcomponent", models.TextField(blank=True, null=True)),
("defaultconfig", JSONField(blank=True, db_column="defaultconfig", null=True)),
("configname", models.TextField(blank=True, null=True)),
("isgeometric", models.BooleanField(default=False)),
],
options={"db_table": "d_data_types", "managed": True,},
),
migrations.CreateModel(
name="DLanguage",
fields=[
("languageid", models.TextField(primary_key=True, serialize=False)),
("languagename", models.TextField()),
("isdefault", models.BooleanField()),
],
options={"db_table": "d_languages", "managed": True,},
),
migrations.CreateModel(
name="DNodeType",
fields=[("nodetype", models.TextField(primary_key=True, serialize=False)), ("namespace", models.TextField()),],
options={"db_table": "d_node_types", "managed": True,},
),
migrations.CreateModel(
name="DRelationType",
fields=[
("relationtype", models.TextField(primary_key=True, serialize=False)),
("category", models.TextField()),
("namespace", models.TextField()),
],
options={"db_table": "d_relation_types", "managed": True,},
),
migrations.CreateModel(
name="DValueType",
fields=[
("valuetype", models.TextField(primary_key=True, serialize=False)),
("category", models.TextField(blank=True, null=True)),
("description", models.TextField(blank=True, null=True)),
("namespace", models.TextField()),
("datatype", models.TextField(blank=True, null=True)),
],
options={"db_table": "d_value_types", "managed": True,},
),
migrations.CreateModel(
name="Edge",
fields=[
("edgeid", models.UUIDField(default=uuid.uuid1, primary_key=True, serialize=False)),
("name", models.TextField(blank=True, null=True)),
("description", models.TextField(blank=True, null=True)),
("ontologyproperty", models.TextField(blank=True, null=True)),
(
"graph",
models.ForeignKey(blank=False, db_column="graphid", null=False, to="models.GraphModel", on_delete=models.CASCADE),
),
],
options={"db_table": "edges", "managed": True,},
),
migrations.CreateModel(
name="EditLog",
fields=[
("editlogid", models.UUIDField(default=uuid.uuid1, serialize=False, primary_key=True)),
("resourceclassid", models.TextField(null=True, blank=True)),
("resourceinstanceid", models.TextField(null=True, blank=True)),
("attributenodeid", models.TextField(null=True, blank=True)),
("tileinstanceid", models.TextField(null=True, blank=True)),
("edittype", models.TextField(null=True, blank=True)),
("newvalue", models.TextField(null=True, blank=True)),
("oldvalue", models.TextField(null=True, blank=True)),
("timestamp", models.DateTimeField(null=True, blank=True)),
("userid", models.TextField(null=True, blank=True)),
("user_firstname", models.TextField(null=True, blank=True)),
("user_lastname", models.TextField(null=True, blank=True)),
("user_email", models.TextField(null=True, blank=True)),
("note", models.TextField(null=True, blank=True)),
],
options={"db_table": "edit_log", "managed": True,},
),
migrations.CreateModel(
name="File",
fields=[
("fileid", models.UUIDField(default=uuid.uuid1, primary_key=True, serialize=False)),
("path", models.FileField(upload_to="uploadedfiles")),
],
options={"db_table": "files", "managed": True,},
),
migrations.CreateModel(
name="Form",
fields=[
("formid", models.UUIDField(default=uuid.uuid1, primary_key=True, serialize=False)),
("title", models.TextField(blank=True, null=True)),
("subtitle", models.TextField(blank=True, null=True)),
("iconclass", models.TextField(blank=True, null=True)),
("visible", models.BooleanField(default=True)),
("sortorder", models.IntegerField(blank=True, null=True, default=None)),
],
options={"db_table": "forms", "managed": True,},
),
migrations.CreateModel(
name="FormXCard",
fields=[
("id", models.UUIDField(default=uuid.uuid1, primary_key=True, serialize=False)),
("card", models.ForeignKey(to="models.CardModel", db_column="cardid", on_delete=models.CASCADE)),
("form", models.ForeignKey(to="models.Form", db_column="formid", on_delete=models.CASCADE)),
("sortorder", models.IntegerField(blank=True, null=True, default=None)),
],
options={"db_table": "forms_x_cards", "managed": True,},
),
migrations.CreateModel(
name="Function",
fields=[
("functionid", models.UUIDField(primary_key=True, default=uuid.uuid1, serialize=False)),
("functiontype", models.TextField(blank=True, null=True)),
("name", models.TextField(blank=True, null=True)),
("description", models.TextField(blank=True, null=True)),
("defaultconfig", JSONField(blank=True, null=True, db_column="defaultconfig")),
("modulename", models.TextField(blank=True, null=True)),
("classname", models.TextField(blank=True, null=True)),
("component", models.TextField(blank=True, null=True)),
],
options={"db_table": "functions", "managed": True,},
),
migrations.CreateModel(
name="FunctionXGraph",
fields=[
("id", models.UUIDField(primary_key=True, default=uuid.uuid1, serialize=False)),
("function", models.ForeignKey(to="models.Function", db_column="functionid", on_delete=models.CASCADE)),
("graph", models.ForeignKey(to="models.GraphModel", db_column="graphid", on_delete=models.CASCADE)),
("config", JSONField(blank=True, null=True, db_column="config")),
],
options={"db_table": "functions_x_graphs", "managed": True,},
),
migrations.CreateModel(
name="Icon",
fields=[
("id", models.AutoField(primary_key=True, serialize=True)),
("name", models.TextField(blank=True, null=True)),
("cssclass", models.TextField(blank=True, null=True)),
],
options={"db_table": "icons", "managed": True,},
),
migrations.CreateModel(
name="Node",
fields=[
("nodeid", models.UUIDField(default=uuid.uuid1, primary_key=True, serialize=False)),
("name", models.TextField()),
("description", models.TextField(blank=True, null=True)),
("istopnode", models.BooleanField()),
("ontologyclass", models.TextField(blank=True, null=True)),
("datatype", models.TextField()),
(
"graph",
models.ForeignKey(blank=False, db_column="graphid", null=False, to="models.GraphModel", on_delete=models.CASCADE),
),
("config", JSONField(blank=True, db_column="config", null=True)),
],
options={"db_table": "nodes", "managed": True,},
),
migrations.CreateModel(
name="NodeGroup",
fields=[
("nodegroupid", models.UUIDField(default=uuid.uuid1, primary_key=True, serialize=False)),
("legacygroupid", models.TextField(blank=True, null=True)),
("cardinality", models.TextField(blank=True, default="1")),
(
"parentnodegroup",
models.ForeignKey(
blank=True, db_column="parentnodegroupid", null=True, to="models.NodeGroup", on_delete=models.CASCADE
),
),
],
options={
"db_table": "node_groups",
"managed": True,
"default_permissions": (),
"permissions": (
("read_nodegroup", "Read"),
("write_nodegroup", "Create/Update"),
("delete_nodegroup", "Delete"),
("no_access_to_nodegroup", "No Access"),
),
},
),
migrations.CreateModel(
name="Ontology",
fields=[
("ontologyid", models.UUIDField(default=uuid.uuid1, primary_key=True)),
("name", models.TextField()),
("version", models.TextField()),
("path", models.TextField()),
(
"parentontology",
models.ForeignKey(
to="models.Ontology",
db_column="parentontologyid",
related_name="extensions",
null=True,
blank=True,
on_delete=models.CASCADE,
),
),
],
options={"db_table": "ontologies", "managed": True,},
),
migrations.CreateModel(
name="OntologyClass",
fields=[
("ontologyclassid", models.UUIDField(default=uuid.uuid1, primary_key=True)),
("source", models.TextField()),
("target", JSONField(null=True)),
(
"ontology",
models.ForeignKey(
to="models.Ontology", db_column="ontologyid", related_name="ontologyclasses", on_delete=models.CASCADE
),
),
],
options={"db_table": "ontologyclasses", "managed": True,},
),
migrations.CreateModel(
name="Relation",
fields=[
("relationid", models.UUIDField(default=uuid.uuid1, primary_key=True, serialize=False)),
(
"conceptfrom",
models.ForeignKey(
db_column="conceptidfrom", related_name="relation_concepts_from", to="models.Concept", on_delete=models.CASCADE
),
),
(
"conceptto",
models.ForeignKey(
db_column="conceptidto", related_name="relation_concepts_to", to="models.Concept", on_delete=models.CASCADE
),
),
("relationtype", models.ForeignKey(db_column="relationtype", to="models.DRelationType", on_delete=models.CASCADE)),
],
options={"db_table": "relations", "managed": True,},
),
migrations.CreateModel(
name="ReportTemplate",
fields=[
("templateid", models.UUIDField(default=uuid.uuid1, primary_key=True, serialize=False)),
("name", models.TextField(null=True, blank=True)),
("description", models.TextField(null=True, blank=True)),
("component", models.TextField()),
("componentname", models.TextField()),
("defaultconfig", JSONField(blank=True, db_column="defaultconfig", null=True)),
],
options={"db_table": "report_templates", "managed": True,},
),
migrations.CreateModel(
name="Report",
fields=[
("reportid", models.UUIDField(default=uuid.uuid1, primary_key=True, serialize=False)),
("name", models.TextField(null=True, blank=True)),
("template", models.ForeignKey(db_column="templateid", to="models.ReportTemplate", on_delete=models.CASCADE)),
("graph", models.ForeignKey(db_column="graphid", to="models.GraphModel", on_delete=models.CASCADE)),
("config", JSONField(blank=True, db_column="config", null=True)),
("formsconfig", JSONField(blank=True, db_column="formsconfig", null=True)),
("active", models.BooleanField(default=False)),
],
options={"db_table": "reports", "managed": True,},
),
migrations.CreateModel(
name="Resource2ResourceConstraint",
fields=[
("resource2resourceid", models.UUIDField(default=uuid.uuid1, primary_key=True, serialize=False)),
(
"resourceclassfrom",
models.ForeignKey(
blank=True,
db_column="resourceclassfrom",
null=True,
related_name="resxres_contstraint_classes_from",
to="models.Node",
on_delete=models.SET_NULL,
),
),
(
"resourceclassto",
models.ForeignKey(
blank=True,
db_column="resourceclassto",
null=True,
related_name="resxres_contstraint_classes_to",
to="models.Node",
on_delete=models.SET_NULL,
),
),
],
options={"db_table": "resource_2_resource_constraints", "managed": True,},
),
migrations.CreateModel(
name="ResourceInstance",
fields=[
("resourceinstanceid", models.UUIDField(default=uuid.uuid1, primary_key=True, serialize=False)),
("legacyid", models.TextField(blank=True, unique=True, null=True)),
("graph", models.ForeignKey(db_column="graphid", to="models.GraphModel", on_delete=models.CASCADE)),
("createdtime", models.DateTimeField(auto_now_add=True)),
],
options={"db_table": "resource_instances", "managed": True,},
),
migrations.CreateModel(
name="ResourceXResource",
fields=[
("resourcexid", models.UUIDField(default=uuid.uuid1, primary_key=True, serialize=False)),
("notes", models.TextField(blank=True, null=True)),
("datestarted", models.DateField(blank=True, null=True)),
("dateended", models.DateField(blank=True, null=True)),
],
options={"db_table": "resource_x_resource", "managed": True,},
),
migrations.CreateModel(
name="TileModel",
fields=[
("tileid", models.UUIDField(default=uuid.uuid1, primary_key=True, serialize=False)),
("data", JSONField(blank=True, db_column="tiledata", null=True)),
("nodegroup", models.ForeignKey(db_column="nodegroupid", to="models.NodeGroup", on_delete=models.CASCADE)),
(
"parenttile",
models.ForeignKey(blank=True, db_column="parenttileid", null=True, to="models.TileModel", on_delete=models.CASCADE),
),
(
"resourceinstance",
models.ForeignKey(db_column="resourceinstanceid", to="models.ResourceInstance", on_delete=models.CASCADE),
),
("sortorder", models.IntegerField(blank=True, null=True, default=0)),
],
options={"db_table": "tiles", "managed": True,},
),
migrations.CreateModel(
name="Value",
fields=[
("valueid", models.UUIDField(default=uuid.uuid1, primary_key=True, serialize=False)),
("value", models.TextField()),
("concept", models.ForeignKey(db_column="conceptid", to="models.Concept", on_delete=models.CASCADE)),
(
"language",
models.ForeignKey(blank=True, db_column="languageid", null=True, to="models.DLanguage", on_delete=models.CASCADE),
),
("valuetype", models.ForeignKey(db_column="valuetype", to="models.DValueType", on_delete=models.CASCADE)),
],
options={"db_table": "values", "managed": True,},
),
migrations.CreateModel(
name="Widget",
fields=[
("widgetid", models.UUIDField(default=uuid.uuid1, primary_key=True, serialize=False)),
("name", models.TextField()),
("component", models.TextField()),
("defaultconfig", JSONField(blank=True, db_column="defaultconfig", null=True)),
("helptext", models.TextField(blank=True, null=True)),
("datatype", models.TextField()),
],
options={"db_table": "widgets", "managed": True,},
),
migrations.CreateModel(
name="MapLayer",
fields=[
("maplayerid", models.UUIDField(default=uuid.uuid1, primary_key=True, serialize=False)),
("name", models.TextField(unique=True)),
("layerdefinitions", JSONField(blank=True, db_column="layerdefinitions", null=True)),
("isoverlay", models.BooleanField(default=False)),
("icon", models.TextField(default=None)),
("activated", models.BooleanField(default=True)),
("addtomap", models.BooleanField(default=False)),
],
options={"db_table": "map_layers", "managed": True,},
),
migrations.CreateModel(
name="MapSource",
fields=[
("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
("name", models.TextField(unique=True)),
("source", JSONField(blank=True, db_column="source", null=True)),
],
options={"db_table": "map_sources", "managed": True,},
),
migrations.CreateModel(
name="TileserverLayer",
fields=[
("name", models.TextField(unique=True)),
("path", models.TextField()),
("config", JSONField(db_column="config")),
("map_layer", models.ForeignKey(db_column="map_layerid", to="models.MapLayer", on_delete=models.CASCADE)),
("map_source", models.ForeignKey(db_column="map_sourceid", to="models.MapSource", on_delete=models.CASCADE)),
],
options={"db_table": "tileserver_layers", "managed": True,},
),
migrations.CreateModel(
name="GraphXMapping",
fields=[
("id", models.UUIDField(primary_key=True, default=uuid.uuid1, serialize=False)),
("graph", models.ForeignKey(to="models.GraphModel", db_column="graphid", on_delete=models.CASCADE)),
("mapping", JSONField(blank=True, db_column="mapping")),
],
options={"db_table": "graphs_x_mapping_file", "managed": True,},
),
migrations.AddField(
model_name="ddatatype",
name="defaultwidget",
field=models.ForeignKey(db_column="defaultwidget", to="models.Widget", null=True, on_delete=models.SET_NULL),
),
migrations.AddField(
model_name="resourcexresource",
name="relationshiptype",
field=models.ForeignKey(db_column="relationshiptype", to="models.Value", on_delete=models.CASCADE),
),
migrations.AddField(
model_name="resourcexresource",
name="resourceinstanceidfrom",
field=models.ForeignKey(
blank=True,
db_column="resourceinstanceidfrom",
null=True,
related_name="resxres_resource_instance_ids_from",
to="models.ResourceInstance",
on_delete=models.CASCADE,
),
),
migrations.AddField(
model_name="resourcexresource",
name="resourceinstanceidto",
field=models.ForeignKey(
blank=True,
db_column="resourceinstanceidto",
null=True,
related_name="resxres_resource_instance_ids_to",
to="models.ResourceInstance",
on_delete=models.CASCADE,
),
),
migrations.AddField(
model_name="node",
name="nodegroup",
field=models.ForeignKey(blank=True, db_column="nodegroupid", null=True, to="models.NodeGroup", on_delete=models.CASCADE),
),
migrations.AddField(
model_name="edge",
name="domainnode",
field=models.ForeignKey(db_column="domainnodeid", related_name="edge_domains", to="models.Node", on_delete=models.CASCADE),
),
migrations.AddField(
model_name="edge",
name="rangenode",
field=models.ForeignKey(db_column="rangenodeid", related_name="edge_ranges", to="models.Node", on_delete=models.CASCADE),
),
migrations.AddField(
model_name="concept",
name="nodetype",
field=models.ForeignKey(db_column="nodetype", to="models.DNodeType", on_delete=models.CASCADE),
),
migrations.AddField(
model_name="cardxnodexwidget",
name="node",
field=models.ForeignKey(db_column="nodeid", to="models.Node", on_delete=models.CASCADE),
),
migrations.AddField(
model_name="cardxnodexwidget",
name="widget",
field=models.ForeignKey(db_column="widgetid", to="models.Widget", on_delete=models.CASCADE),
),
migrations.AddField(
model_name="cardmodel",
name="nodegroup",
field=models.ForeignKey(db_column="nodegroupid", to="models.NodeGroup", on_delete=models.CASCADE),
),
migrations.AddField(
model_name="cardmodel",
name="graph",
field=models.ForeignKey(db_column="graphid", to="models.GraphModel", on_delete=models.CASCADE),
),
migrations.AddField(
model_name="form",
name="graph",
field=models.ForeignKey(
to="models.GraphModel", db_column="graphid", related_name="forms", null=False, blank=False, on_delete=models.CASCADE
),
),
migrations.AddField(
model_name="graphmodel", name="functions", field=models.ManyToManyField(to="models.Function", through="FunctionXGraph"),
),
migrations.AddField(
model_name="graphmodel",
name="ontology",
field=models.ForeignKey(
to="models.Ontology", db_column="ontologyid", related_name="graphs", null=True, blank=True, on_delete=models.SET_NULL
),
),
migrations.AlterUniqueTogether(name="edge", unique_together={("rangenode", "domainnode")},),
migrations.AlterUniqueTogether(name="cardxnodexwidget", unique_together={("node", "card", "widget")},),
migrations.AlterUniqueTogether(name="ontologyclass", unique_together={("source", "ontology")},),
migrations.AlterUniqueTogether(name="relation", unique_together={("conceptfrom", "conceptto", "relationtype")},),
migrations.AlterUniqueTogether(name="functionxgraph", unique_together={("function", "graph")},),
CreateAutoPopulateUUIDField("graphs", ["graphid"]),
CreateAutoPopulateUUIDField("cards", ["cardid"]),
CreateAutoPopulateUUIDField("concepts", ["conceptid"]),
CreateAutoPopulateUUIDField("edges", ["edgeid"]),
CreateAutoPopulateUUIDField("edit_log", ["editlogid"]),
CreateAutoPopulateUUIDField("forms", ["formid"]),
CreateAutoPopulateUUIDField("node_groups", ["nodegroupid"]),
CreateAutoPopulateUUIDField("nodes", ["nodeid"]),
CreateAutoPopulateUUIDField("relations", ["relationid"]),
CreateAutoPopulateUUIDField("resource_2_resource_constraints", ["resource2resourceid"]),
CreateAutoPopulateUUIDField("resource_instances", ["resourceinstanceid"]),
CreateAutoPopulateUUIDField("tiles", ["tileid"]),
CreateAutoPopulateUUIDField("values", ["valueid"]),
CreateAutoPopulateUUIDField("widgets", ["widgetid"]),
migrations.RunSQL(
"""
ALTER TABLE nodes ADD CONSTRAINT nodes_ddatatypes_fk FOREIGN KEY (datatype)
REFERENCES public.d_data_types (datatype) MATCH SIMPLE
"""
),
migrations.RunSQL(get_sql_string_from_file(os.path.join(settings.ROOT_DIR, "db", "dml", "db_data.sql")), ""),
migrations.RunPython(forwards_func, reverse_func),
migrations.RunPython(make_permissions, reverse_code=lambda *args, **kwargs: True),
]
| agpl-3.0 | 134,465,421,707,686,270 | 47.649733 | 182 | 0.553394 | false |
stuart-c/pgu | data/themes/default/generate.py | 28 | 3502 | import pygame
from pygame.locals import *
pygame.display.init()
pygame.display.set_mode((80,80),32)
def prep(name):
fname = name+".png"
img = pygame.image.load(fname)
w,h = img.get_width()/2,img.get_height()/2
out = pygame.Surface((w*3,h*3),SWSURFACE|SRCALPHA,32)
out.fill((0,0,0,0))
out.blit(img.subsurface(0,0,w,h),(0,0))
out.blit(img.subsurface(w,0,w,h),(w*2,0))
out.blit(img.subsurface(0,h,w,h),(0,h*2))
out.blit(img.subsurface(w,h,w,h),(w*2,h*2))
for i in range(0,w):
img = out.subsurface((w-1,0,1,h*3)).convert_alpha()
out.blit(img,(w+i,0))
for i in range(0,h):
img = out.subsurface((0,h-1,w*3,1)).convert_alpha()
out.blit(img,(0,h+i))
return out,w,h
todo = [
('button.normal','dot.normal',None,3,3,'789456123'),
('button.hover','dot.hover',None,3,3,'789456123'),
('button.down','dot.down',None,3,3,'789456123'),
('checkbox.off.normal','box.normal',None,2,2,'7913'),
('checkbox.on.normal','box.down','check',2,2,'7913'),
('checkbox.off.hover','box.hover',None,2,2,'7913'),
('checkbox.on.hover','box.hover','check',2,2,'7913'),
('radio.off.normal','dot.normal',None,2,2,'7913'),
('radio.on.normal','dot.down','radio',2,2,'7913'),
('radio.off.hover','dot.hover',None,2,2,'7913'),
('radio.on.hover','dot.hover','radio',2,2,'7913'),
('tool.normal','box.normal',None,3,3,'789456123'),
('tool.hover','box.hover',None,3,3,'789456123'),
('tool.down','box.down',None,3,3,'789456123'),
('hslider','idot.normal',None,3,3,'789456123'),
('hslider.bar.normal','dot.normal',None,3,3,'789456123'),
('hslider.bar.hover','dot.hover',None,3,3,'789456123'),
('hslider.left','sbox.normal','left',2,2,'7913'),
('hslider.right','sbox.normal','right',2,2,'7913'),
('vslider','idot.normal',None,3,3,'789456123'),
('vslider.bar.normal','vdot.normal',None,3,3,'789456123'),
('vslider.bar.hover','vdot.hover',None,3,3,'789456123'),
('vslider.up','vsbox.normal','up',2,2,'7913'),
('vslider.down','vsbox.normal','down',2,2,'7913'),
('dialog.close.normal','rdot.hover',None,2,2,'7913'),
('dialog.close.hover','rdot.hover','x',2,2,'7913'),
('dialog.close.down','rdot.down','x',2,2,'7913'),
('menu.normal','desktop',None,1,1,'7'),
('menu.hover','box.normal',None,3,3,'789456123'),
('menu.down','box.down',None,3,3,'789456123'),
('select.selected.normal','box.normal',None,3,3,'788455122'),
('select.selected.hover','box.hover',None,3,3,'788455122'),
('select.selected.down','box.down',None,3,3,'788455122'),
('select.arrow.normal','box.hover',None,3,3,'889556223'),
('select.arrow.hover','box.hover',None,3,3,'889556223'),
('select.arrow.down','box.down',None,3,3,'889556223'),
('progressbar','sbox.normal',None,3,3,'789456123'),
('progressbar.bar','box.hover',None,3,3,'789456123'),
]
for fname,img,over,ww,hh,s in todo:
print(fname)
img,w,h = prep(img)
out = pygame.Surface((ww*w,hh*h),SWSURFACE|SRCALPHA,32)
out.fill((0,0,0,0))
n = 0
for y in range(0,hh):
for x in range(0,ww):
c = int(s[n])
xx,yy = (c-1)%3,2-(c-1)/3
out.blit(img.subsurface((xx*w,yy*h,w,h)),(x*w,y*h))
n += 1
if over != None:
over = pygame.image.load(over+".png")
out.blit(over,(0,0))
pygame.image.save(out,fname+".tga")
| lgpl-2.1 | -5,766,512,715,269,177,000 | 34.734694 | 65 | 0.573101 | false |
eparis/contrib | hack/verify-flags-underscore.py | 34 | 8924 | #!/usr/bin/env python
# Copyright 2015 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import json
import mmap
import os
import re
import sys
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("filenames", help="list of files to check, all files if unspecified", nargs='*')
parser.add_argument("-e", "--skip-exceptions", help="ignore hack/verify-flags/exceptions.txt and print all output", action="store_true")
args = parser.parse_args()
# Cargo culted from http://stackoverflow.com/questions/898669/how-can-i-detect-if-a-file-is-binary-non-text-in-python
def is_binary(pathname):
"""Return true if the given filename is binary.
@raise EnvironmentError: if the file does not exist or cannot be accessed.
@attention: found @ http://bytes.com/topic/python/answers/21222-determine-file-type-binary-text on 6/08/2010
@author: Trent Mick <[email protected]>
@author: Jorge Orpinel <[email protected]>"""
try:
with open(pathname, 'r') as f:
CHUNKSIZE = 1024
while 1:
chunk = f.read(CHUNKSIZE)
if '\0' in chunk: # found null byte
return True
if len(chunk) < CHUNKSIZE:
break # done
except:
return True
return False
def get_all_files(rootdir):
all_files = []
for root, dirs, files in os.walk(rootdir):
# don't visit certain dirs
if 'Godeps' in dirs:
dirs.remove('Godeps')
if 'third_party' in dirs:
dirs.remove('third_party')
if '.git' in dirs:
dirs.remove('.git')
if 'exceptions.txt' in files:
files.remove('exceptions.txt')
if 'known-flags.txt' in files:
files.remove('known-flags.txt')
if 'vendor' in dirs:
dirs.remove('vendor')
for name in files:
if name.endswith(".svg"):
continue
if name.endswith(".gliffy"):
continue
pathname = os.path.join(root, name)
if is_binary(pathname):
continue
all_files.append(pathname)
return all_files
def normalize_files(rootdir, files):
newfiles = []
a = ['Godeps', 'vendor', 'third_party', 'exceptions.txt', 'known-flags.txt']
for f in files:
if any(x in f for x in a):
continue
if f.endswith(".svg"):
continue
if f.endswith(".gliffy"):
continue
newfiles.append(f)
for i, f in enumerate(newfiles):
if not os.path.isabs(f):
newfiles[i] = os.path.join(rootdir, f)
return newfiles
def line_has_bad_flag(line, flagre):
results = flagre.findall(line)
for result in results:
if not "_" in result:
return False
# this should exclude many cases where jinja2 templates use kube flags
# as variables, except it uses _ for the variable name
if "{% set" + result + "= \"" in line:
return False
if "pillar[" + result + "]" in line:
return False
if "grains" + result in line:
return False
# These are usually yaml definitions
if result.endswith(":"):
return False
# something common in juju variables...
if "template_data[" + result + "]" in line:
return False
return True
return False
# The list of files might not be the whole repo. If someone only changed a
# couple of files we don't want to run all of the golang files looking for
# flags. Instead load the list of flags from hack/verify-flags/known-flags.txt
# If running the golang files finds a new flag not in that file, return an
# error and tell the user to add the flag to the flag list.
def get_flags(rootdir, files):
# preload the 'known' flags
pathname = os.path.join(rootdir, "hack/verify-flags/known-flags.txt")
f = open(pathname, 'r')
flags = set(f.read().splitlines())
f.close()
# preload the 'known' flags which don't follow the - standard
pathname = os.path.join(rootdir, "hack/verify-flags/excluded-flags.txt")
f = open(pathname, 'r')
excluded_flags = set(f.read().splitlines())
f.close()
regexs = [ re.compile('Var[P]?\([^,]*, "([^"]*)"'),
re.compile('.String[P]?\("([^"]*)",[^,]+,[^)]+\)'),
re.compile('.Int[P]?\("([^"]*)",[^,]+,[^)]+\)'),
re.compile('.Bool[P]?\("([^"]*)",[^,]+,[^)]+\)'),
re.compile('.Duration[P]?\("([^"]*)",[^,]+,[^)]+\)'),
re.compile('.StringSlice[P]?\("([^"]*)",[^,]+,[^)]+\)') ]
new_flags = set()
new_excluded_flags = set()
# walk all the files looking for any flags being declared
for pathname in files:
if not pathname.endswith(".go"):
continue
f = open(pathname, 'r')
data = f.read()
f.close()
matches = []
for regex in regexs:
matches = matches + regex.findall(data)
for flag in matches:
if any(x in flag for x in excluded_flags):
continue
if "_" in flag:
new_excluded_flags.add(flag)
if not "-" in flag:
continue
if flag not in flags:
new_flags.add(flag)
if len(new_excluded_flags) != 0:
print("Found a flag declared with an _ but which is not explicitly listed as a valid flag name in hack/verify-flags/excluded-flags.txt")
print("Are you certain this flag should not have been declared with an - instead?")
l = list(new_excluded_flags)
l.sort()
print("%s" % "\n".join(l))
sys.exit(1)
if len(new_flags) != 0:
print("Found flags in golang files not in the list of known flags. Please add these to hack/verify-flags/known-flags.txt")
l = list(new_flags)
l.sort()
print("%s" % "\n".join(l))
sys.exit(1)
return list(flags)
def flags_to_re(flags):
"""turn the list of all flags we found into a regex find both - and _ versions"""
dashRE = re.compile('[-_]')
flagREs = []
for flag in flags:
# turn all flag names into regexs which will find both types
newre = dashRE.sub('[-_]', flag)
# only match if there is not a leading or trailing alphanumeric character
flagREs.append("[^\w${]" + newre + "[^\w]")
# turn that list of regex strings into a single large RE
flagRE = "|".join(flagREs)
flagRE = re.compile(flagRE)
return flagRE
def load_exceptions(rootdir):
exceptions = set()
if args.skip_exceptions:
return exceptions
exception_filename = os.path.join(rootdir, "hack/verify-flags/exceptions.txt")
exception_file = open(exception_filename, 'r')
for exception in exception_file.read().splitlines():
out = exception.split(":", 1)
if len(out) != 2:
printf("Invalid line in exceptions file: %s" % exception)
continue
filename = out[0]
line = out[1]
exceptions.add((filename, line))
return exceptions
def main():
rootdir = os.path.dirname(__file__) + "/../"
rootdir = os.path.abspath(rootdir)
exceptions = load_exceptions(rootdir)
if len(args.filenames) > 0:
files = args.filenames
else:
files = get_all_files(rootdir)
files = normalize_files(rootdir, files)
flags = get_flags(rootdir, files)
flagRE = flags_to_re(flags)
bad_lines = []
# walk all the file looking for any flag that was declared and now has an _
for pathname in files:
relname = os.path.relpath(pathname, rootdir)
f = open(pathname, 'r')
for line in f.read().splitlines():
if line_has_bad_flag(line, flagRE):
if (relname, line) not in exceptions:
bad_lines.append((relname, line))
f.close()
if len(bad_lines) != 0:
if not args.skip_exceptions:
print("Found illegal 'flag' usage. If these are false positives you should run `hack/verify-flags-underscore.py -e > hack/verify-flags/exceptions.txt` to update the list.")
bad_lines.sort()
for (relname, line) in bad_lines:
print("%s:%s" % (relname, line))
return 1
if __name__ == "__main__":
sys.exit(main())
| apache-2.0 | 294,579,953,526,699,600 | 35.57377 | 184 | 0.589758 | false |
noelbk/neutron-juniper | neutron/agent/securitygroups_rpc.py | 8 | 8254 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright 2012, Nachi Ueno, NTT MCL, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
from oslo.config import cfg
from neutron.common import topics
from neutron.openstack.common import importutils
from neutron.openstack.common import log as logging
LOG = logging.getLogger(__name__)
SG_RPC_VERSION = "1.1"
security_group_opts = [
cfg.StrOpt(
'firewall_driver',
default='neutron.agent.firewall.NoopFirewallDriver',
help=_('Driver for Security Groups Firewall'))
]
cfg.CONF.register_opts(security_group_opts, 'SECURITYGROUP')
def is_firewall_enabled():
return (cfg.CONF.SECURITYGROUP.firewall_driver !=
'neutron.agent.firewall.NoopFirewallDriver')
def disable_security_group_extension_if_noop_driver(
supported_extension_aliases):
if not is_firewall_enabled():
LOG.debug(_('Disabled security-group extension.'))
supported_extension_aliases.remove('security-group')
class SecurityGroupServerRpcApiMixin(object):
"""A mix-in that enable SecurityGroup support in plugin rpc."""
def security_group_rules_for_devices(self, context, devices):
LOG.debug(_("Get security group rules "
"for devices via rpc %r"), devices)
return self.call(context,
self.make_msg('security_group_rules_for_devices',
devices=devices),
version=SG_RPC_VERSION,
topic=self.topic)
class SecurityGroupAgentRpcCallbackMixin(object):
"""A mix-in that enable SecurityGroup agent
support in agent implementations.
"""
#mix-in object should be have sg_agent
sg_agent = None
def _security_groups_agent_not_set(self):
LOG.warning(_("Security group agent binding currently not set. "
"This should be set by the end of the init "
"process."))
def security_groups_rule_updated(self, context, **kwargs):
"""Callback for security group rule update.
:param security_groups: list of updated security_groups
"""
security_groups = kwargs.get('security_groups', [])
LOG.debug(
_("Security group rule updated on remote: %s"), security_groups)
if not self.sg_agent:
return self._security_groups_agent_not_set()
self.sg_agent.security_groups_rule_updated(security_groups)
def security_groups_member_updated(self, context, **kwargs):
"""Callback for security group member update.
:param security_groups: list of updated security_groups
"""
security_groups = kwargs.get('security_groups', [])
LOG.debug(
_("Security group member updated on remote: %s"), security_groups)
if not self.sg_agent:
return self._security_groups_agent_not_set()
self.sg_agent.security_groups_member_updated(security_groups)
def security_groups_provider_updated(self, context, **kwargs):
"""Callback for security group provider update."""
LOG.debug(_("Provider rule updated"))
if not self.sg_agent:
return self._security_groups_agent_not_set()
self.sg_agent.security_groups_provider_updated()
class SecurityGroupAgentRpcMixin(object):
"""A mix-in that enable SecurityGroup agent
support in agent implementations.
"""
def init_firewall(self):
firewall_driver = cfg.CONF.SECURITYGROUP.firewall_driver
LOG.debug(_("Init firewall settings (driver=%s)"), firewall_driver)
self.firewall = importutils.import_object(firewall_driver)
def prepare_devices_filter(self, device_ids):
if not device_ids:
return
LOG.info(_("Preparing filters for devices %s"), device_ids)
devices = self.plugin_rpc.security_group_rules_for_devices(
self.context, list(device_ids))
with self.firewall.defer_apply():
for device in devices.values():
self.firewall.prepare_port_filter(device)
def security_groups_rule_updated(self, security_groups):
LOG.info(_("Security group "
"rule updated %r"), security_groups)
self._security_group_updated(
security_groups,
'security_groups')
def security_groups_member_updated(self, security_groups):
LOG.info(_("Security group "
"member updated %r"), security_groups)
self._security_group_updated(
security_groups,
'security_group_source_groups')
def _security_group_updated(self, security_groups, attribute):
devices = []
sec_grp_set = set(security_groups)
for device in self.firewall.ports.values():
if sec_grp_set & set(device.get(attribute, [])):
devices.append(device)
if devices:
self.refresh_firewall(devices)
def security_groups_provider_updated(self):
LOG.info(_("Provider rule updated"))
self.refresh_firewall()
def remove_devices_filter(self, device_ids):
if not device_ids:
return
LOG.info(_("Remove device filter for %r"), device_ids)
with self.firewall.defer_apply():
for device_id in device_ids:
device = self.firewall.ports.get(device_id)
if not device:
continue
self.firewall.remove_port_filter(device)
def refresh_firewall(self, devices=None):
LOG.info(_("Refresh firewall rules"))
if devices:
device_ids = [d['device'] for d in devices]
else:
device_ids = self.firewall.ports.keys()
if not device_ids:
LOG.info(_("No ports here to refresh firewall"))
return
devices = self.plugin_rpc.security_group_rules_for_devices(
self.context, device_ids)
with self.firewall.defer_apply():
for device in devices.values():
LOG.debug(_("Update port filter for %s"), device['device'])
self.firewall.update_port_filter(device)
class SecurityGroupAgentRpcApiMixin(object):
def _get_security_group_topic(self):
return topics.get_topic_name(self.topic,
topics.SECURITY_GROUP,
topics.UPDATE)
def security_groups_rule_updated(self, context, security_groups):
"""Notify rule updated security groups."""
if not security_groups:
return
self.fanout_cast(context,
self.make_msg('security_groups_rule_updated',
security_groups=security_groups),
version=SG_RPC_VERSION,
topic=self._get_security_group_topic())
def security_groups_member_updated(self, context, security_groups):
"""Notify member updated security groups."""
if not security_groups:
return
self.fanout_cast(context,
self.make_msg('security_groups_member_updated',
security_groups=security_groups),
version=SG_RPC_VERSION,
topic=self._get_security_group_topic())
def security_groups_provider_updated(self, context):
"""Notify provider updated security groups."""
self.fanout_cast(context,
self.make_msg('security_groups_provider_updated'),
version=SG_RPC_VERSION,
topic=self._get_security_group_topic())
| apache-2.0 | -6,769,385,185,486,068,000 | 37.570093 | 78 | 0.612673 | false |
linked67/p2pool-exclusivecoin | p2pool/bitcoin/height_tracker.py | 227 | 4678 | from twisted.internet import defer
from twisted.python import log
import p2pool
from p2pool.bitcoin import data as bitcoin_data
from p2pool.util import deferral, forest, jsonrpc, variable
class HeaderWrapper(object):
__slots__ = 'hash previous_hash'.split(' ')
@classmethod
def from_header(cls, header):
return cls(bitcoin_data.hash256(bitcoin_data.block_header_type.pack(header)), header['previous_block'])
def __init__(self, hash, previous_hash):
self.hash, self.previous_hash = hash, previous_hash
class HeightTracker(object):
'''Point this at a factory and let it take care of getting block heights'''
def __init__(self, best_block_func, factory, backlog_needed):
self._best_block_func = best_block_func
self._factory = factory
self._backlog_needed = backlog_needed
self._tracker = forest.Tracker()
self._watch1 = self._factory.new_headers.watch(self._heard_headers)
self._watch2 = self._factory.new_block.watch(self._request)
self._requested = set()
self._clear_task = deferral.RobustLoopingCall(self._requested.clear)
self._clear_task.start(60)
self._last_notified_size = 0
self.updated = variable.Event()
self._think_task = deferral.RobustLoopingCall(self._think)
self._think_task.start(15)
self._think2_task = deferral.RobustLoopingCall(self._think2)
self._think2_task.start(15)
def _think(self):
try:
highest_head = max(self._tracker.heads, key=lambda h: self._tracker.get_height_and_last(h)[0]) if self._tracker.heads else None
if highest_head is None:
return # wait for think2
height, last = self._tracker.get_height_and_last(highest_head)
if height < self._backlog_needed:
self._request(last)
except:
log.err(None, 'Error in HeightTracker._think:')
def _think2(self):
self._request(self._best_block_func())
def _heard_headers(self, headers):
changed = False
for header in headers:
hw = HeaderWrapper.from_header(header)
if hw.hash in self._tracker.items:
continue
changed = True
self._tracker.add(hw)
if changed:
self.updated.happened()
self._think()
if len(self._tracker.items) >= self._last_notified_size + 100:
print 'Have %i/%i block headers' % (len(self._tracker.items), self._backlog_needed)
self._last_notified_size = len(self._tracker.items)
@defer.inlineCallbacks
def _request(self, last):
if last in self._tracker.items:
return
if last in self._requested:
return
self._requested.add(last)
(yield self._factory.getProtocol()).send_getheaders(version=1, have=[], last=last)
def get_height_rel_highest(self, block_hash):
# callers: highest height can change during yields!
best_height, best_last = self._tracker.get_height_and_last(self._best_block_func())
height, last = self._tracker.get_height_and_last(block_hash)
if last != best_last:
return -1000000000 # XXX hack
return height - best_height
@defer.inlineCallbacks
def get_height_rel_highest_func(bitcoind, factory, best_block_func, net):
if '\ngetblock ' in (yield deferral.retry()(bitcoind.rpc_help)()):
@deferral.DeferredCacher
@defer.inlineCallbacks
def height_cacher(block_hash):
try:
x = yield bitcoind.rpc_getblock('%x' % (block_hash,))
except jsonrpc.Error_for_code(-5): # Block not found
if not p2pool.DEBUG:
raise deferral.RetrySilentlyException()
else:
raise
defer.returnValue(x['blockcount'] if 'blockcount' in x else x['height'])
best_height_cached = variable.Variable((yield deferral.retry()(height_cacher)(best_block_func())))
def get_height_rel_highest(block_hash):
this_height = height_cacher.call_now(block_hash, 0)
best_height = height_cacher.call_now(best_block_func(), 0)
best_height_cached.set(max(best_height_cached.value, this_height, best_height))
return this_height - best_height_cached.value
else:
get_height_rel_highest = HeightTracker(best_block_func, factory, 5*net.SHARE_PERIOD*net.CHAIN_LENGTH/net.PARENT.BLOCK_PERIOD).get_height_rel_highest
defer.returnValue(get_height_rel_highest)
| gpl-3.0 | 6,235,291,453,202,136,000 | 40.39823 | 156 | 0.614793 | false |
benschulz/servo | tests/wpt/web-platform-tests/tools/wptserve/tests/functional/test_stash.py | 299 | 1231 | import os
import unittest
import urllib2
import json
import uuid
import wptserve
from wptserve.router import any_method
from base import TestUsingServer, doc_root
class TestResponseSetCookie(TestUsingServer):
def test_put_take(self):
@wptserve.handlers.handler
def handler(request, response):
if request.method == "POST":
request.server.stash.put(request.POST.first("id"), request.POST.first("data"))
data = "OK"
elif request.method == "GET":
data = request.server.stash.take(request.GET.first("id"))
if data is None:
return "NOT FOUND"
return data
id = str(uuid.uuid4())
route = (any_method, "/test/put_take", handler)
self.server.router.register(*route)
resp = self.request(route[1], method="POST", body={"id": id, "data": "Sample data"})
self.assertEquals(resp.read(), "OK")
resp = self.request(route[1], query="id=" + id)
self.assertEquals(resp.read(), "Sample data")
resp = self.request(route[1], query="id=" + id)
self.assertEquals(resp.read(), "NOT FOUND")
if __name__ == '__main__':
unittest.main()
| mpl-2.0 | 3,471,317,136,863,814,700 | 30.564103 | 94 | 0.593014 | false |
Juniper/neutron | neutron/common/test_lib.py | 13 | 2136 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2010 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Colorizer Code is borrowed from Twisted:
# Copyright (c) 2001-2010 Twisted Matrix Laboratories.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
# describes parameters used by different unit/functional tests
# a plugin-specific testing mechanism should import this dictionary
# and override the values in it if needed (e.g., run_tests.py in
# neutron/plugins/openvswitch/ )
test_config = {}
| apache-2.0 | 2,423,776,612,022,261,000 | 47.545455 | 77 | 0.746255 | false |
cmbiwer/pycbc | pycbc/distributions/power_law.py | 6 | 9306 | # Copyright (C) 2016 Christopher M. Biwer
# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation; either version 3 of the License, or (at your
# option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""
This modules provides classes for evaluating distributions where the
probability density function is a power law.
"""
import numpy
from pycbc.distributions import bounded
class UniformPowerLaw(bounded.BoundedDist):
r"""
For a uniform distribution in power law. The parameters are
independent of each other. Instances of this class can be called like
a function. By default, logpdf will be called, but this can be changed
by setting the class's __call__ method to its pdf method.
The cumulative distribution function (CDF) will be the ratio of volumes:
.. math::
F(r) = \frac{V(r)}{V(R)}
Where :math:`R` is the radius of the sphere. So we can write our
probability density function (PDF) as:
.. math::
f(r) = c r^n
For generality we use :math:`n` for the dimension of the volume element,
eg. :math:`n=2` for a 3-dimensional sphere. And use
:math:`c` as a general constant.
So now we calculate the CDF in general for this type of PDF:
.. math::
F(r) = \int f(r) dr = \int c r^n dr = \frac{1}{n + 1} c r^{n + 1} + k
Now with the definition of the CDF at radius :math:`r_{l}` is equal to 0
and at radius :math:`r_{h}` is equal to 1 we find that the constant from
integration from this system of equations:
.. math::
1 = \frac{1}{n + 1} c ((r_{h})^{n + 1} - (r_{l})^{n + 1}) + k
Can see that :math:`c = (n + 1) / ((r_{h})^{n + 1} - (r_{l})^{n + 1}))`.
And :math:`k` is:
.. math::
k = - \frac{r_{l}^{n + 1}}{(r_{h})^{n + 1} - (r_{l})^{n + 1}}
Can see that :math:`c= \frac{n + 1}{R^{n + 1}}`. So can see that the CDF is:
.. math::
F(r) = \frac{1}{(r_{h})^{n + 1} - (r_{l})^{n + 1}} r^{n + 1} - \frac{r_{l}^{n + 1}}{(r_{h})^{n + 1} - (r_{l})^{n + 1}}
And the PDF is the derivative of the CDF:
.. math::
f(r) = \frac{(n + 1)}{(r_{h})^{n + 1} - (r_{l})^{n + 1}} (r)^n
Now we use the probabilty integral transform method to get sampling on
uniform numbers from a continuous random variable. To do this we find
the inverse of the CDF evaluated for uniform numbers:
.. math::
F(r) = u = \frac{1}{(r_{h})^{n + 1} - (r_{l})^{n + 1}} r^{n + 1} - \frac{r_{l}^{n + 1}}{(r_{h})^{n + 1} - (r_{l})^{n + 1}}
And find :math:`F^{-1}(u)` gives:
.. math::
u = \frac{1}{n + 1} \frac{(r_{h})^{n + 1} - (r_{l})^{n + 1}} - \frac{r_{l}^{n + 1}}{(r_{h})^{n + 1} - (r_{l})^{n + 1}}
And solving for :math:`r` gives:
.. math::
r = ( ((r_{h})^{n + 1} - (r_{l})^{n + 1}) u + (r_{l})^{n + 1})^{\frac{1}{n + 1}}
Therefore the radius can be sampled by taking the n-th root of uniform
numbers and multiplying by the radius offset by the lower bound radius.
\**params :
The keyword arguments should provide the names of parameters and their
corresponding bounds, as either tuples or a `boundaries.Bounds`
instance.
Attributes
----------
name : 'uniform_radius'
The name of this distribution.
dim : int
The dimension of volume space. In the notation above `dim`
is :math:`n+1`. For a 3-dimensional sphere this is 3.
Attributes
----------
params : list of strings
The list of parameter names.
bounds : dict
A dictionary of the parameter names and their bounds.
norm : float
The normalization of the multi-dimensional pdf.
lognorm : float
The log of the normalization.
"""
name = "uniform_power_law"
def __init__(self, dim=None, **params):
super(UniformPowerLaw, self).__init__(**params)
self.dim = dim
self._norm = 1.0
self._lognorm = 0.0
for p in self._params:
self._norm *= self.dim / \
(self._bounds[p][1]**(self.dim) -
self._bounds[p][0]**(self.dim))
self._lognorm = numpy.log(self._norm)
@property
def norm(self):
return self._norm
@property
def lognorm(self):
return self._lognorm
def rvs(self, size=1, param=None):
"""Gives a set of random values drawn from this distribution.
Parameters
----------
size : {1, int}
The number of values to generate; default is 1.
param : {None, string}
If provided, will just return values for the given parameter.
Otherwise, returns random values for each parameter.
Returns
-------
structured array
The random values in a numpy structured array. If a param was
specified, the array will only have an element corresponding to the
given parameter. Otherwise, the array will have an element for each
parameter in self's params.
"""
if param is not None:
dtype = [(param, float)]
else:
dtype = [(p, float) for p in self.params]
arr = numpy.zeros(size, dtype=dtype)
for (p,_) in dtype:
offset = numpy.power(self._bounds[p][0], self.dim)
factor = numpy.power(self._bounds[p][1], self.dim) - \
numpy.power(self._bounds[p][0], self.dim)
arr[p] = numpy.random.uniform(0.0, 1.0, size=size)
arr[p] = numpy.power(factor * arr[p] + offset, 1.0 / self.dim)
return arr
def cdfinv(self, param, value):
"""Return inverse of cdf to map unit interval to parameter bounds.
"""
n = self.dim - 1
r_l = self._bounds[param][0]
r_h = self._bounds[param][1]
new_value = ((r_h**(n+1) - r_l**(n+1))*value + r_l**(n+1))**(1./(n+1))
return new_value
def _pdf(self, **kwargs):
"""Returns the pdf at the given values. The keyword arguments must
contain all of parameters in self's params. Unrecognized arguments are
ignored.
"""
for p in self._params:
if p not in kwargs.keys():
raise ValueError(
'Missing parameter {} to construct pdf.'.format(p))
if kwargs in self:
pdf = self._norm * \
numpy.prod([(kwargs[p])**(self.dim - 1)
for p in self._params])
return float(pdf)
else:
return 0.0
def _logpdf(self, **kwargs):
"""Returns the log of the pdf at the given values. The keyword
arguments must contain all of parameters in self's params. Unrecognized
arguments are ignored.
"""
for p in self._params:
if p not in kwargs.keys():
raise ValueError(
'Missing parameter {} to construct pdf.'.format(p))
if kwargs in self:
log_pdf = self._lognorm + \
(self.dim - 1) * \
numpy.log([kwargs[p] for p in self._params]).sum()
return log_pdf
else:
return -numpy.inf
@classmethod
def from_config(cls, cp, section, variable_args):
"""Returns a distribution based on a configuration file. The parameters
for the distribution are retrieved from the section titled
"[`section`-`variable_args`]" in the config file.
Parameters
----------
cp : pycbc.workflow.WorkflowConfigParser
A parsed configuration file that contains the distribution
options.
section : str
Name of the section in the configuration file.
variable_args : str
The names of the parameters for this distribution, separated by
`prior.VARARGS_DELIM`. These must appear in the "tag" part
of the section header.
Returns
-------
Uniform
A distribution instance from the pycbc.inference.prior module.
"""
return super(UniformPowerLaw, cls).from_config(cp, section,
variable_args,
bounds_required=True)
class UniformRadius(UniformPowerLaw):
""" For a uniform distribution in volume using spherical coordinates, this
is the distriubtion to use for the radius.
For more details see UniformPowerLaw.
"""
name = "uniform_radius"
def __init__(self, dim=3, **params):
super(UniformRadius, self).__init__(dim=3, **params)
__all__ = ["UniformPowerLaw", "UniformRadius"]
| gpl-3.0 | 169,296,715,332,956,930 | 34.792308 | 130 | 0.562433 | false |
iabdalkader/openmv | scripts/examples/16-Codes/find_barcodes.py | 3 | 2316 | # Barcode Example
#
# This example shows off how easy it is to detect bar codes using the
# OpenMV Cam M7. Barcode detection does not work on the M4 Camera.
import sensor, image, time, math
sensor.reset()
sensor.set_pixformat(sensor.GRAYSCALE)
sensor.set_framesize(sensor.VGA) # High Res!
sensor.set_windowing((640, 80)) # V Res of 80 == less work (40 for 2X the speed).
sensor.skip_frames(time = 2000)
sensor.set_auto_gain(False) # must turn this off to prevent image washout...
sensor.set_auto_whitebal(False) # must turn this off to prevent image washout...
clock = time.clock()
# Barcode detection can run at the full 640x480 resolution of your OpenMV Cam's
# OV7725 camera module. Barcode detection will also work in RGB565 mode but at
# a lower resolution. That said, barcode detection requires a higher resolution
# to work well so it should always be run at 640x480 in grayscale...
def barcode_name(code):
if(code.type() == image.EAN2):
return "EAN2"
if(code.type() == image.EAN5):
return "EAN5"
if(code.type() == image.EAN8):
return "EAN8"
if(code.type() == image.UPCE):
return "UPCE"
if(code.type() == image.ISBN10):
return "ISBN10"
if(code.type() == image.UPCA):
return "UPCA"
if(code.type() == image.EAN13):
return "EAN13"
if(code.type() == image.ISBN13):
return "ISBN13"
if(code.type() == image.I25):
return "I25"
if(code.type() == image.DATABAR):
return "DATABAR"
if(code.type() == image.DATABAR_EXP):
return "DATABAR_EXP"
if(code.type() == image.CODABAR):
return "CODABAR"
if(code.type() == image.CODE39):
return "CODE39"
if(code.type() == image.PDF417):
return "PDF417"
if(code.type() == image.CODE93):
return "CODE93"
if(code.type() == image.CODE128):
return "CODE128"
while(True):
clock.tick()
img = sensor.snapshot()
codes = img.find_barcodes()
for code in codes:
img.draw_rectangle(code.rect())
print_args = (barcode_name(code), code.payload(), (180 * code.rotation()) / math.pi, code.quality(), clock.fps())
print("Barcode %s, Payload \"%s\", rotation %f (degrees), quality %d, FPS %f" % print_args)
if not codes:
print("FPS %f" % clock.fps())
| mit | -7,142,224,181,834,798,000 | 34.630769 | 121 | 0.631261 | false |
ramsateesh/designate | designate/objects/blacklist.py | 6 | 1438 | # Copyright (c) 2014 Rackspace Hosting
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from designate.objects import base
class Blacklist(base.DictObjectMixin, base.PersistentObjectMixin,
base.DesignateObject):
FIELDS = {
'pattern': {
'schema': {
'type': 'string',
'description': 'Regex for blacklisted zone name',
'format': 'regex',
'maxLength': 255,
},
'required': True
},
'description': {
'schema': {
'type': ['string', 'null'],
'description': 'Description for the blacklisted zone',
'maxLength': 160
}
}
}
STRING_KEYS = [
'id', 'pattern'
]
class BlacklistList(base.ListObjectMixin, base.DesignateObject):
LIST_ITEM_TYPE = Blacklist
| apache-2.0 | 3,806,849,839,191,204,400 | 30.955556 | 78 | 0.59388 | false |
Distrotech/qemu | scripts/tracetool/transform.py | 78 | 4238 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Type-transformation rules.
"""
__author__ = "Lluís Vilanova <[email protected]>"
__copyright__ = "Copyright 2012-2014, Lluís Vilanova <[email protected]>"
__license__ = "GPL version 2 or (at your option) any later version"
__maintainer__ = "Stefan Hajnoczi"
__email__ = "[email protected]"
def _transform_type(type_, trans):
if isinstance(trans, str):
return trans
elif isinstance(trans, dict):
if type_ in trans:
return _transform_type(type_, trans[type_])
elif None in trans:
return _transform_type(type_, trans[None])
else:
return type_
elif callable(trans):
return trans(type_)
else:
raise ValueError("Invalid type transformation rule: %s" % trans)
def transform_type(type_, *trans):
"""Return a new type transformed according to the given rules.
Applies each of the transformation rules in trans in order.
If an element of trans is a string, return it.
If an element of trans is a function, call it with type_ as its only
argument.
If an element of trans is a dict, search type_ in its keys. If type_ is
a key, use the value as a transformation rule for type_. Otherwise, if
None is a key use the value as a transformation rule for type_.
Otherwise, return type_.
Parameters
----------
type_ : str
Type to transform.
trans : list of function or dict
Type transformation rules.
"""
if len(trans) == 0:
raise ValueError
res = type_
for t in trans:
res = _transform_type(res, t)
return res
##################################################
# tcg -> host
def _tcg_2_host(type_):
if type_ == "TCGv":
# force a fixed-size type (target-independent)
return "uint64_t"
else:
return type_
TCG_2_HOST = {
"TCGv_i32": "uint32_t",
"TCGv_i64": "uint64_t",
"TCGv_ptr": "void *",
None: _tcg_2_host,
}
##################################################
# host -> host compatible with tcg sizes
HOST_2_TCG_COMPAT = {
"uint8_t": "uint32_t",
}
##################################################
# host/tcg -> tcg
def _host_2_tcg(type_):
if type_.startswith("TCGv"):
return type_
raise ValueError("Don't know how to translate '%s' into a TCG type\n" % type_)
HOST_2_TCG = {
"uint32_t": "TCGv_i32",
"uint64_t": "TCGv_i64",
"void *" : "TCGv_ptr",
None: _host_2_tcg,
}
##################################################
# tcg -> tcg helper definition
def _tcg_2_helper_def(type_):
if type_ == "TCGv":
return "target_ulong"
else:
return type_
TCG_2_TCG_HELPER_DEF = {
"TCGv_i32": "uint32_t",
"TCGv_i64": "uint64_t",
"TCGv_ptr": "void *",
None: _tcg_2_helper_def,
}
##################################################
# tcg -> tcg helper declaration
def _tcg_2_tcg_helper_decl_error(type_):
raise ValueError("Don't know how to translate type '%s' into a TCG helper declaration type\n" % type_)
TCG_2_TCG_HELPER_DECL = {
"TCGv" : "tl",
"TCGv_ptr": "ptr",
"TCGv_i32": "i32",
"TCGv_i64": "i64",
None: _tcg_2_tcg_helper_decl_error,
}
##################################################
# host/tcg -> tcg temporal constant allocation
def _host_2_tcg_tmp_new(type_):
if type_.startswith("TCGv"):
return "tcg_temp_new_nop"
raise ValueError("Don't know how to translate type '%s' into a TCG temporal allocation" % type_)
HOST_2_TCG_TMP_NEW = {
"uint32_t": "tcg_const_i32",
"uint64_t": "tcg_const_i64",
"void *" : "tcg_const_ptr",
None: _host_2_tcg_tmp_new,
}
##################################################
# host/tcg -> tcg temporal constant deallocation
def _host_2_tcg_tmp_free(type_):
if type_.startswith("TCGv"):
return "tcg_temp_free_nop"
raise ValueError("Don't know how to translate type '%s' into a TCG temporal deallocation" % type_)
HOST_2_TCG_TMP_FREE = {
"uint32_t": "tcg_temp_free_i32",
"uint64_t": "tcg_temp_free_i64",
"void *" : "tcg_temp_free_ptr",
None: _host_2_tcg_tmp_free,
}
| gpl-2.0 | -1,201,690,106,573,326,800 | 24.518072 | 106 | 0.54627 | false |
miqui/python-hpOneView | hpOneView/security.py | 2 | 5456 | # -*- coding: utf-8 -*-
"""
security.py
~~~~~~~~~~~~
This module implements Settings HP OneView REST API
"""
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from future import standard_library
standard_library.install_aliases()
__title__ = 'security'
__version__ = '0.0.1'
__copyright__ = '(C) Copyright (2012-2015) Hewlett Packard Enterprise ' \
' Development LP'
__license__ = 'MIT'
__status__ = 'Development'
###
# (C) Copyright (2012-2015) Hewlett Packard Enterprise Development LP
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
###
from hpOneView.common import *
from hpOneView.connection import *
from hpOneView.activity import *
from hpOneView.exceptions import *
class security(object):
def __init__(self, con):
self._con = con
###########################################################################
# User management and Roles
###########################################################################
def get_users(self):
body = self._con.get(uri['users'])
return get_members(body)
def get_user(self, user):
body = self._con.get(uri['users'] + '/' + user)
return body
def get_user_roles(self, user):
body = self._con.get(uri['userRole'] + '/' + user)
return get_members(body)
def set_user_roles(self, user, roles):
request = []
for role in roles:
req = {'type': 'RoleNameDtoV2', 'roleName': role}
request.append(req)
task, body = self._con.put(uri['users'] + '/' + user +
'/roles?multiResource=true', request)
return body
def set_user_role(self, user, role):
request = {'type': 'RoleNameDtoV2', 'roleName': role}
task, body = self._con.put(uri['users'] + '/' + user +
'/roles?multiResource=true', [request])
return body
def create_user(self, name, password, enabled=True, fullName='',
emailAddress='', officePhone='', mobilePhone='',
roles=['Infrastructure administrator']):
usr = make_user_dict(name, password, enabled, fullName,
emailAddress, officePhone, mobilePhone,
roles)
task, body = self._con.post(uri['users'], usr)
return body
def delete_user(self, user):
task, body = self._con.delete(uri['users'] + '/' + user)
return body
def update_user(self, updateUser):
task, body = self._con.put(uri['users'], updateUser)
return body
def get_roles(self):
body = self._con.get(uri['roles'])
return get_members(body)
###########################################################################
# Certificates
###########################################################################
def get_certs(self):
body = self._con.get(uri['certificates'])
return body
def get_cert_https(self):
body = self._con.get(uri['cert-https'])
return body
def get_cert_ca(self):
body = self._con.get(uri['ca'])
return body
def get_cert_ca_crl(self):
body = self._con.get(uri['crl'])
return body
def gen_rabbitmq_internal_signed_ca(self):
request = {'type': 'RabbitMqClientCertV2', 'commonName': 'default'}
task, body = self._con.post(uri['rabbitmq'], request)
return body
def gen_rabbitmq_self_signed_ca(self):
request = {'type': 'RabbitMqClientCertV2', 'commonName': 'any',
'signedCert': False}
task, body = self._con.post(uri['rabbitmq'], request)
return body
def get_rabbitmq_kp(self, alias='default'):
body = self._con.get(uri['rabbitmq-kp'] + '/' + alias)
return body
def get_rabbitmq_ca(self, alias='default'):
body = self._con.get(uri['rabbitmq'] + '/' + alias)
return body
def get_active_user_sessions(self):
body = self._con.get(uri['activeSessions'])
return body
def get_category_actions(self):
body = self._con.get(uri['category-actions'])
return body
def get_role_category_actions(self):
body = self._con.get(uri['role-category-actions'])
return body
# vim:set shiftwidth=4 tabstop=4 expandtab textwidth=79:
| mit | -6,956,537,522,747,369,000 | 33.314465 | 79 | 0.586327 | false |
duncanhawthorne/robot-robot | libs/future/backports/urllib/error.py | 81 | 2715 | """Exception classes raised by urllib.
The base exception class is URLError, which inherits from IOError. It
doesn't define any behavior of its own, but is the base class for all
exceptions defined in this package.
HTTPError is an exception class that is also a valid HTTP response
instance. It behaves this way because HTTP protocol errors are valid
responses, with a status code, headers, and a body. In some contexts,
an application may want to handle an exception like a regular
response.
"""
from __future__ import absolute_import, division, unicode_literals
from future import standard_library
from future.backports.urllib import response as urllib_response
__all__ = ['URLError', 'HTTPError', 'ContentTooShortError']
# do these error classes make sense?
# make sure all of the IOError stuff is overridden. we just want to be
# subtypes.
class URLError(IOError):
# URLError is a sub-type of IOError, but it doesn't share any of
# the implementation. need to override __init__ and __str__.
# It sets self.args for compatibility with other EnvironmentError
# subclasses, but args doesn't have the typical format with errno in
# slot 0 and strerror in slot 1. This may be better than nothing.
def __init__(self, reason, filename=None):
self.args = reason,
self.reason = reason
if filename is not None:
self.filename = filename
def __str__(self):
return '<urlopen error %s>' % self.reason
class HTTPError(URLError, urllib_response.addinfourl):
"""Raised when HTTP error occurs, but also acts like non-error return"""
__super_init = urllib_response.addinfourl.__init__
def __init__(self, url, code, msg, hdrs, fp):
self.code = code
self.msg = msg
self.hdrs = hdrs
self.fp = fp
self.filename = url
# The addinfourl classes depend on fp being a valid file
# object. In some cases, the HTTPError may not have a valid
# file object. If this happens, the simplest workaround is to
# not initialize the base classes.
if fp is not None:
self.__super_init(fp, hdrs, url, code)
def __str__(self):
return 'HTTP Error %s: %s' % (self.code, self.msg)
# since URLError specifies a .reason attribute, HTTPError should also
# provide this attribute. See issue13211 for discussion.
@property
def reason(self):
return self.msg
def info(self):
return self.hdrs
# exception raised when downloaded size does not match content-length
class ContentTooShortError(URLError):
def __init__(self, message, content):
URLError.__init__(self, message)
self.content = content
| mit | 2,623,764,460,675,288,000 | 35.2 | 76 | 0.682873 | false |
TiVoMaker/boto | boto/file/bucket.py | 153 | 4085 | # Copyright 2010 Google Inc.
# Copyright (c) 2011, Nexenta Systems Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
# File representation of bucket, for use with "file://" URIs.
import os
from boto.file.key import Key
from boto.file.simpleresultset import SimpleResultSet
from boto.s3.bucketlistresultset import BucketListResultSet
class Bucket(object):
def __init__(self, name, contained_key):
"""Instantiate an anonymous file-based Bucket around a single key.
"""
self.name = name
self.contained_key = contained_key
def __iter__(self):
return iter(BucketListResultSet(self))
def __str__(self):
return 'anonymous bucket for file://' + self.contained_key
def delete_key(self, key_name, headers=None,
version_id=None, mfa_token=None):
"""
Deletes a key from the bucket.
:type key_name: string
:param key_name: The key name to delete
:type version_id: string
:param version_id: Unused in this subclass.
:type mfa_token: tuple or list of strings
:param mfa_token: Unused in this subclass.
"""
os.remove(key_name)
def get_all_keys(self, headers=None, **params):
"""
This method returns the single key around which this anonymous Bucket
was instantiated.
:rtype: SimpleResultSet
:return: The result from file system listing the keys requested
"""
key = Key(self.name, self.contained_key)
return SimpleResultSet([key])
def get_key(self, key_name, headers=None, version_id=None,
key_type=Key.KEY_REGULAR_FILE):
"""
Check to see if a particular key exists within the bucket.
Returns: An instance of a Key object or None
:type key_name: string
:param key_name: The name of the key to retrieve
:type version_id: string
:param version_id: Unused in this subclass.
:type stream_type: integer
:param stream_type: Type of the Key - Regular File or input/output Stream
:rtype: :class:`boto.file.key.Key`
:returns: A Key object from this bucket.
"""
if key_name == '-':
return Key(self.name, '-', key_type=Key.KEY_STREAM_READABLE)
else:
fp = open(key_name, 'rb')
return Key(self.name, key_name, fp)
def new_key(self, key_name=None, key_type=Key.KEY_REGULAR_FILE):
"""
Creates a new key
:type key_name: string
:param key_name: The name of the key to create
:rtype: :class:`boto.file.key.Key`
:returns: An instance of the newly created key object
"""
if key_name == '-':
return Key(self.name, '-', key_type=Key.KEY_STREAM_WRITABLE)
else:
dir_name = os.path.dirname(key_name)
if dir_name and not os.path.exists(dir_name):
os.makedirs(dir_name)
fp = open(key_name, 'wb')
return Key(self.name, key_name, fp)
| mit | 5,406,911,787,228,197,000 | 35.473214 | 81 | 0.640147 | false |
nickromano/django-slow-tests | _examples/django18/mysite/settings.py | 2 | 2798 | """
Django settings for mysite project.
Generated by 'django-admin startproject' using Django 1.8.2.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'vikmt@b6)=_z^3a3ji%2&#znmz)ure%k7xrz@phly(0#&as84z'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'polls',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
)
ROOT_URLCONF = 'mysite.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'mysite.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_URL = '/static/'
# Custom test runner
TEST_RUNNER = 'django_slowtests.testrunner.DiscoverSlowestTestsRunner'
NUM_SLOW_TESTS = 5
| mit | -1,660,231,314,956,478,000 | 25.149533 | 71 | 0.694782 | false |
qgis/QGIS-Django | qgis-app/plugins/tests/HelloWorld/1.8-author-slashes-error/HelloWorld/HelloWorld.py | 20 | 1094 | # -*- coding: utf-8 -*-
# Import the PyQt and QGIS libraries
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from qgis.core import *
class HelloWorld:
def __init__(self, iface):
# Save reference to the QGIS interface
self.iface = iface
self.canvas = iface.mapCanvas()
def initGui(self):
# Create action that will start plugin
self.action = QAction(QIcon(":/plugins/"), "&HelloWorld", self.iface.mainWindow())
# connect the action to the run method
QObject.connect(self.action, SIGNAL("activated()"), self.hello_world)
# Add toolbar button and menu item
self.iface.addPluginToMenu("HelloWorld", self.action)
def unload(self):
# Remove the plugin menu item and icon
self.iface.removePluginMenu("HelloWorld",self.action)
# run
def hello_world(self):
QMessageBox.information(self.iface.mainWindow(), QCoreApplication.translate('HelloWorld', "HelloWorld"), QCoreApplication.translate('HelloWorld', "HelloWorld"))
return
if __name__ == "__main__":
pass
| gpl-2.0 | -8,981,936,775,292,599,000 | 26.35 | 168 | 0.652651 | false |
mxm/incubator-beam | sdks/python/apache_beam/runners/direct/direct_metrics_test.py | 7 | 9689 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import absolute_import
import unittest
import hamcrest as hc
from apache_beam.metrics.cells import DistributionData
from apache_beam.metrics.cells import DistributionResult
from apache_beam.metrics.execution import MetricKey
from apache_beam.metrics.execution import MetricResult
from apache_beam.metrics.execution import MetricUpdates
from apache_beam.metrics.metricbase import MetricName
from apache_beam.runners.direct.direct_metrics import DirectMetrics
class DirectMetricsTest(unittest.TestCase):
name1 = MetricName('namespace1', 'name1')
name2 = MetricName('namespace1', 'name2')
name3 = MetricName('namespace2', 'name1')
bundle1 = object() # For this test, any object can be a bundle
bundle2 = object()
def test_combiner_functions(self):
metrics = DirectMetrics()
counter = metrics._counters['anykey']
counter.commit_logical(self.bundle1, 5)
self.assertEqual(counter.extract_committed(), 5)
with self.assertRaises(TypeError):
counter.commit_logical(self.bundle1, None)
distribution = metrics._distributions['anykey']
distribution.commit_logical(self.bundle1, DistributionData(4, 1, 4, 4))
self.assertEqual(distribution.extract_committed(),
DistributionResult(DistributionData(4, 1, 4, 4)))
with self.assertRaises(AttributeError):
distribution.commit_logical(self.bundle1, None)
def test_commit_logical_no_filter(self):
metrics = DirectMetrics()
metrics.commit_logical(
self.bundle1,
MetricUpdates(
counters={MetricKey('step1', self.name1): 5,
MetricKey('step1', self.name2): 8},
distributions={
MetricKey('step1', self.name1): DistributionData(8, 2, 3, 5)}))
metrics.commit_logical(
self.bundle1,
MetricUpdates(
counters={MetricKey('step2', self.name1): 7,
MetricKey('step1', self.name2): 4},
distributions={
MetricKey('step1', self.name1): DistributionData(4, 1, 4, 4)}))
results = metrics.query()
hc.assert_that(
results['counters'],
hc.contains_inanyorder(*[
MetricResult(MetricKey('step1', self.name2), 12, 0),
MetricResult(MetricKey('step2', self.name1), 7, 0),
MetricResult(MetricKey('step1', self.name1), 5, 0)]))
hc.assert_that(
results['distributions'],
hc.contains_inanyorder(
MetricResult(MetricKey('step1', self.name1),
DistributionResult(
DistributionData(12, 3, 3, 5)),
DistributionResult(
DistributionData(0, 0, None, None)))))
def test_apply_physical_no_filter(self):
metrics = DirectMetrics()
metrics.update_physical(object(),
MetricUpdates(
counters={MetricKey('step1', self.name1): 5,
MetricKey('step1', self.name3): 8}))
metrics.update_physical(object(),
MetricUpdates(
counters={MetricKey('step2', self.name1): 7,
MetricKey('step1', self.name3): 4}))
results = metrics.query()
hc.assert_that(results['counters'],
hc.contains_inanyorder(*[
MetricResult(MetricKey('step1', self.name1), 0, 5),
MetricResult(MetricKey('step1', self.name3), 0, 12),
MetricResult(MetricKey('step2', self.name1), 0, 7)]))
metrics.commit_physical(object(), MetricUpdates())
results = metrics.query()
hc.assert_that(results['counters'],
hc.contains_inanyorder(*[
MetricResult(MetricKey('step1', self.name1), 0, 5),
MetricResult(MetricKey('step1', self.name3), 0, 12),
MetricResult(MetricKey('step2', self.name1), 0, 7)]))
def test_apply_physical_logical(self):
metrics = DirectMetrics()
dist_zero = DistributionData(0, 0, None, None)
metrics.update_physical(
object(),
MetricUpdates(
counters={MetricKey('step1', self.name1): 7,
MetricKey('step1', self.name2): 5,
MetricKey('step2', self.name1): 1},
distributions={MetricKey('step1', self.name1):
DistributionData(3, 1, 3, 3),
MetricKey('step2', self.name3):
DistributionData(8, 2, 4, 4)}))
results = metrics.query()
hc.assert_that(results['counters'],
hc.contains_inanyorder(*[
MetricResult(MetricKey('step1', self.name1), 0, 7),
MetricResult(MetricKey('step1', self.name2), 0, 5),
MetricResult(MetricKey('step2', self.name1), 0, 1)]))
hc.assert_that(results['distributions'],
hc.contains_inanyorder(*[
MetricResult(
MetricKey('step1', self.name1),
DistributionResult(dist_zero),
DistributionResult(DistributionData(3, 1, 3, 3))),
MetricResult(
MetricKey('step2', self.name3),
DistributionResult(dist_zero),
DistributionResult(DistributionData(8, 2, 4, 4)))]))
metrics.commit_physical(
object(),
MetricUpdates(
counters={MetricKey('step1', self.name1): -3,
MetricKey('step2', self.name1): -5},
distributions={MetricKey('step1', self.name1):
DistributionData(8, 4, 1, 5),
MetricKey('step2', self.name2):
DistributionData(8, 8, 1, 1)}))
results = metrics.query()
hc.assert_that(results['counters'],
hc.contains_inanyorder(*[
MetricResult(MetricKey('step1', self.name1), 0, 4),
MetricResult(MetricKey('step1', self.name2), 0, 5),
MetricResult(MetricKey('step2', self.name1), 0, -4)]))
hc.assert_that(results['distributions'],
hc.contains_inanyorder(*[
MetricResult(
MetricKey('step1', self.name1),
DistributionResult(dist_zero),
DistributionResult(DistributionData(11, 5, 1, 5))),
MetricResult(
MetricKey('step2', self.name3),
DistributionResult(dist_zero),
DistributionResult(DistributionData(8, 2, 4, 4))),
MetricResult(
MetricKey('step2', self.name2),
DistributionResult(dist_zero),
DistributionResult(DistributionData(8, 8, 1, 1)))]))
metrics.commit_logical(
object(),
MetricUpdates(
counters={MetricKey('step1', self.name1): 3,
MetricKey('step1', self.name2): 5,
MetricKey('step2', self.name1): -3},
distributions={MetricKey('step1', self.name1):
DistributionData(11, 5, 1, 5),
MetricKey('step2', self.name2):
DistributionData(8, 8, 1, 1),
MetricKey('step2', self.name3):
DistributionData(4, 1, 4, 4)}))
results = metrics.query()
hc.assert_that(results['counters'],
hc.contains_inanyorder(*[
MetricResult(MetricKey('step1', self.name1), 3, 4),
MetricResult(MetricKey('step1', self.name2), 5, 5),
MetricResult(MetricKey('step2', self.name1), -3, -4)]))
hc.assert_that(results['distributions'],
hc.contains_inanyorder(*[
MetricResult(
MetricKey('step1', self.name1),
DistributionResult(DistributionData(11, 5, 1, 5)),
DistributionResult(DistributionData(11, 5, 1, 5))),
MetricResult(
MetricKey('step2', self.name3),
DistributionResult(DistributionData(4, 1, 4, 4)),
DistributionResult(DistributionData(8, 2, 4, 4))),
MetricResult(
MetricKey('step2', self.name2),
DistributionResult(DistributionData(8, 8, 1, 1)),
DistributionResult(DistributionData(8, 8, 1, 1)))]))
if __name__ == '__main__':
unittest.main()
| apache-2.0 | 2,083,599,663,281,380,000 | 44.488263 | 79 | 0.548457 | false |
ScoutAlarm/node-sodium-linux | node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings_test.py | 778 | 65880 | #!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Unit tests for the MSVSSettings.py file."""
import StringIO
import unittest
import gyp.MSVSSettings as MSVSSettings
class TestSequenceFunctions(unittest.TestCase):
def setUp(self):
self.stderr = StringIO.StringIO()
def _ExpectedWarnings(self, expected):
"""Compares recorded lines to expected warnings."""
self.stderr.seek(0)
actual = self.stderr.read().split('\n')
actual = [line for line in actual if line]
self.assertEqual(sorted(expected), sorted(actual))
def testValidateMSVSSettings_tool_names(self):
"""Tests that only MSVS tool names are allowed."""
MSVSSettings.ValidateMSVSSettings(
{'VCCLCompilerTool': {},
'VCLinkerTool': {},
'VCMIDLTool': {},
'foo': {},
'VCResourceCompilerTool': {},
'VCLibrarianTool': {},
'VCManifestTool': {},
'ClCompile': {}},
self.stderr)
self._ExpectedWarnings([
'Warning: unrecognized tool foo',
'Warning: unrecognized tool ClCompile'])
def testValidateMSVSSettings_settings(self):
"""Tests that for invalid MSVS settings."""
MSVSSettings.ValidateMSVSSettings(
{'VCCLCompilerTool': {
'AdditionalIncludeDirectories': 'folder1;folder2',
'AdditionalOptions': ['string1', 'string2'],
'AdditionalUsingDirectories': 'folder1;folder2',
'AssemblerListingLocation': 'a_file_name',
'AssemblerOutput': '0',
'BasicRuntimeChecks': '5',
'BrowseInformation': 'fdkslj',
'BrowseInformationFile': 'a_file_name',
'BufferSecurityCheck': 'true',
'CallingConvention': '-1',
'CompileAs': '1',
'DebugInformationFormat': '2',
'DefaultCharIsUnsigned': 'true',
'Detect64BitPortabilityProblems': 'true',
'DisableLanguageExtensions': 'true',
'DisableSpecificWarnings': 'string1;string2',
'EnableEnhancedInstructionSet': '1',
'EnableFiberSafeOptimizations': 'true',
'EnableFunctionLevelLinking': 'true',
'EnableIntrinsicFunctions': 'true',
'EnablePREfast': 'true',
'Enableprefast': 'bogus',
'ErrorReporting': '1',
'ExceptionHandling': '1',
'ExpandAttributedSource': 'true',
'FavorSizeOrSpeed': '1',
'FloatingPointExceptions': 'true',
'FloatingPointModel': '1',
'ForceConformanceInForLoopScope': 'true',
'ForcedIncludeFiles': 'file1;file2',
'ForcedUsingFiles': 'file1;file2',
'GeneratePreprocessedFile': '1',
'GenerateXMLDocumentationFiles': 'true',
'IgnoreStandardIncludePath': 'true',
'InlineFunctionExpansion': '1',
'KeepComments': 'true',
'MinimalRebuild': 'true',
'ObjectFile': 'a_file_name',
'OmitDefaultLibName': 'true',
'OmitFramePointers': 'true',
'OpenMP': 'true',
'Optimization': '1',
'PrecompiledHeaderFile': 'a_file_name',
'PrecompiledHeaderThrough': 'a_file_name',
'PreprocessorDefinitions': 'string1;string2',
'ProgramDataBaseFileName': 'a_file_name',
'RuntimeLibrary': '1',
'RuntimeTypeInfo': 'true',
'ShowIncludes': 'true',
'SmallerTypeCheck': 'true',
'StringPooling': 'true',
'StructMemberAlignment': '1',
'SuppressStartupBanner': 'true',
'TreatWChar_tAsBuiltInType': 'true',
'UndefineAllPreprocessorDefinitions': 'true',
'UndefinePreprocessorDefinitions': 'string1;string2',
'UseFullPaths': 'true',
'UsePrecompiledHeader': '1',
'UseUnicodeResponseFiles': 'true',
'WarnAsError': 'true',
'WarningLevel': '1',
'WholeProgramOptimization': 'true',
'XMLDocumentationFileName': 'a_file_name',
'ZZXYZ': 'bogus'},
'VCLinkerTool': {
'AdditionalDependencies': 'file1;file2',
'AdditionalLibraryDirectories': 'folder1;folder2',
'AdditionalManifestDependencies': 'file1;file2',
'AdditionalOptions': 'a string1',
'AddModuleNamesToAssembly': 'file1;file2',
'AllowIsolation': 'true',
'AssemblyDebug': '2',
'AssemblyLinkResource': 'file1;file2',
'BaseAddress': 'a string1',
'CLRImageType': '2',
'CLRThreadAttribute': '2',
'CLRUnmanagedCodeCheck': 'true',
'DataExecutionPrevention': '2',
'DelayLoadDLLs': 'file1;file2',
'DelaySign': 'true',
'Driver': '2',
'EmbedManagedResourceFile': 'file1;file2',
'EnableCOMDATFolding': '2',
'EnableUAC': 'true',
'EntryPointSymbol': 'a string1',
'ErrorReporting': '2',
'FixedBaseAddress': '2',
'ForceSymbolReferences': 'file1;file2',
'FunctionOrder': 'a_file_name',
'GenerateDebugInformation': 'true',
'GenerateManifest': 'true',
'GenerateMapFile': 'true',
'HeapCommitSize': 'a string1',
'HeapReserveSize': 'a string1',
'IgnoreAllDefaultLibraries': 'true',
'IgnoreDefaultLibraryNames': 'file1;file2',
'IgnoreEmbeddedIDL': 'true',
'IgnoreImportLibrary': 'true',
'ImportLibrary': 'a_file_name',
'KeyContainer': 'a_file_name',
'KeyFile': 'a_file_name',
'LargeAddressAware': '2',
'LinkIncremental': '2',
'LinkLibraryDependencies': 'true',
'LinkTimeCodeGeneration': '2',
'ManifestFile': 'a_file_name',
'MapExports': 'true',
'MapFileName': 'a_file_name',
'MergedIDLBaseFileName': 'a_file_name',
'MergeSections': 'a string1',
'MidlCommandFile': 'a_file_name',
'ModuleDefinitionFile': 'a_file_name',
'OptimizeForWindows98': '1',
'OptimizeReferences': '2',
'OutputFile': 'a_file_name',
'PerUserRedirection': 'true',
'Profile': 'true',
'ProfileGuidedDatabase': 'a_file_name',
'ProgramDatabaseFile': 'a_file_name',
'RandomizedBaseAddress': '2',
'RegisterOutput': 'true',
'ResourceOnlyDLL': 'true',
'SetChecksum': 'true',
'ShowProgress': '2',
'StackCommitSize': 'a string1',
'StackReserveSize': 'a string1',
'StripPrivateSymbols': 'a_file_name',
'SubSystem': '2',
'SupportUnloadOfDelayLoadedDLL': 'true',
'SuppressStartupBanner': 'true',
'SwapRunFromCD': 'true',
'SwapRunFromNet': 'true',
'TargetMachine': '2',
'TerminalServerAware': '2',
'TurnOffAssemblyGeneration': 'true',
'TypeLibraryFile': 'a_file_name',
'TypeLibraryResourceID': '33',
'UACExecutionLevel': '2',
'UACUIAccess': 'true',
'UseLibraryDependencyInputs': 'true',
'UseUnicodeResponseFiles': 'true',
'Version': 'a string1'},
'VCMIDLTool': {
'AdditionalIncludeDirectories': 'folder1;folder2',
'AdditionalOptions': 'a string1',
'CPreprocessOptions': 'a string1',
'DefaultCharType': '1',
'DLLDataFileName': 'a_file_name',
'EnableErrorChecks': '1',
'ErrorCheckAllocations': 'true',
'ErrorCheckBounds': 'true',
'ErrorCheckEnumRange': 'true',
'ErrorCheckRefPointers': 'true',
'ErrorCheckStubData': 'true',
'GenerateStublessProxies': 'true',
'GenerateTypeLibrary': 'true',
'HeaderFileName': 'a_file_name',
'IgnoreStandardIncludePath': 'true',
'InterfaceIdentifierFileName': 'a_file_name',
'MkTypLibCompatible': 'true',
'notgood': 'bogus',
'OutputDirectory': 'a string1',
'PreprocessorDefinitions': 'string1;string2',
'ProxyFileName': 'a_file_name',
'RedirectOutputAndErrors': 'a_file_name',
'StructMemberAlignment': '1',
'SuppressStartupBanner': 'true',
'TargetEnvironment': '1',
'TypeLibraryName': 'a_file_name',
'UndefinePreprocessorDefinitions': 'string1;string2',
'ValidateParameters': 'true',
'WarnAsError': 'true',
'WarningLevel': '1'},
'VCResourceCompilerTool': {
'AdditionalOptions': 'a string1',
'AdditionalIncludeDirectories': 'folder1;folder2',
'Culture': '1003',
'IgnoreStandardIncludePath': 'true',
'notgood2': 'bogus',
'PreprocessorDefinitions': 'string1;string2',
'ResourceOutputFileName': 'a string1',
'ShowProgress': 'true',
'SuppressStartupBanner': 'true',
'UndefinePreprocessorDefinitions': 'string1;string2'},
'VCLibrarianTool': {
'AdditionalDependencies': 'file1;file2',
'AdditionalLibraryDirectories': 'folder1;folder2',
'AdditionalOptions': 'a string1',
'ExportNamedFunctions': 'string1;string2',
'ForceSymbolReferences': 'a string1',
'IgnoreAllDefaultLibraries': 'true',
'IgnoreSpecificDefaultLibraries': 'file1;file2',
'LinkLibraryDependencies': 'true',
'ModuleDefinitionFile': 'a_file_name',
'OutputFile': 'a_file_name',
'SuppressStartupBanner': 'true',
'UseUnicodeResponseFiles': 'true'},
'VCManifestTool': {
'AdditionalManifestFiles': 'file1;file2',
'AdditionalOptions': 'a string1',
'AssemblyIdentity': 'a string1',
'ComponentFileName': 'a_file_name',
'DependencyInformationFile': 'a_file_name',
'GenerateCatalogFiles': 'true',
'InputResourceManifests': 'a string1',
'ManifestResourceFile': 'a_file_name',
'OutputManifestFile': 'a_file_name',
'RegistrarScriptFile': 'a_file_name',
'ReplacementsFile': 'a_file_name',
'SuppressStartupBanner': 'true',
'TypeLibraryFile': 'a_file_name',
'UpdateFileHashes': 'truel',
'UpdateFileHashesSearchPath': 'a_file_name',
'UseFAT32Workaround': 'true',
'UseUnicodeResponseFiles': 'true',
'VerboseOutput': 'true'}},
self.stderr)
self._ExpectedWarnings([
'Warning: for VCCLCompilerTool/BasicRuntimeChecks, '
'index value (5) not in expected range [0, 4)',
'Warning: for VCCLCompilerTool/BrowseInformation, '
"invalid literal for int() with base 10: 'fdkslj'",
'Warning: for VCCLCompilerTool/CallingConvention, '
'index value (-1) not in expected range [0, 3)',
'Warning: for VCCLCompilerTool/DebugInformationFormat, '
'converted value for 2 not specified.',
'Warning: unrecognized setting VCCLCompilerTool/Enableprefast',
'Warning: unrecognized setting VCCLCompilerTool/ZZXYZ',
'Warning: for VCLinkerTool/TargetMachine, '
'converted value for 2 not specified.',
'Warning: unrecognized setting VCMIDLTool/notgood',
'Warning: unrecognized setting VCResourceCompilerTool/notgood2',
'Warning: for VCManifestTool/UpdateFileHashes, '
"expected bool; got 'truel'"
''])
def testValidateMSBuildSettings_settings(self):
"""Tests that for invalid MSBuild settings."""
MSVSSettings.ValidateMSBuildSettings(
{'ClCompile': {
'AdditionalIncludeDirectories': 'folder1;folder2',
'AdditionalOptions': ['string1', 'string2'],
'AdditionalUsingDirectories': 'folder1;folder2',
'AssemblerListingLocation': 'a_file_name',
'AssemblerOutput': 'NoListing',
'BasicRuntimeChecks': 'StackFrameRuntimeCheck',
'BrowseInformation': 'false',
'BrowseInformationFile': 'a_file_name',
'BufferSecurityCheck': 'true',
'BuildingInIDE': 'true',
'CallingConvention': 'Cdecl',
'CompileAs': 'CompileAsC',
'CompileAsManaged': 'Pure',
'CreateHotpatchableImage': 'true',
'DebugInformationFormat': 'ProgramDatabase',
'DisableLanguageExtensions': 'true',
'DisableSpecificWarnings': 'string1;string2',
'EnableEnhancedInstructionSet': 'StreamingSIMDExtensions',
'EnableFiberSafeOptimizations': 'true',
'EnablePREfast': 'true',
'Enableprefast': 'bogus',
'ErrorReporting': 'Prompt',
'ExceptionHandling': 'SyncCThrow',
'ExpandAttributedSource': 'true',
'FavorSizeOrSpeed': 'Neither',
'FloatingPointExceptions': 'true',
'FloatingPointModel': 'Precise',
'ForceConformanceInForLoopScope': 'true',
'ForcedIncludeFiles': 'file1;file2',
'ForcedUsingFiles': 'file1;file2',
'FunctionLevelLinking': 'false',
'GenerateXMLDocumentationFiles': 'true',
'IgnoreStandardIncludePath': 'true',
'InlineFunctionExpansion': 'OnlyExplicitInline',
'IntrinsicFunctions': 'false',
'MinimalRebuild': 'true',
'MultiProcessorCompilation': 'true',
'ObjectFileName': 'a_file_name',
'OmitDefaultLibName': 'true',
'OmitFramePointers': 'true',
'OpenMPSupport': 'true',
'Optimization': 'Disabled',
'PrecompiledHeader': 'NotUsing',
'PrecompiledHeaderFile': 'a_file_name',
'PrecompiledHeaderOutputFile': 'a_file_name',
'PreprocessKeepComments': 'true',
'PreprocessorDefinitions': 'string1;string2',
'PreprocessOutputPath': 'a string1',
'PreprocessSuppressLineNumbers': 'false',
'PreprocessToFile': 'false',
'ProcessorNumber': '33',
'ProgramDataBaseFileName': 'a_file_name',
'RuntimeLibrary': 'MultiThreaded',
'RuntimeTypeInfo': 'true',
'ShowIncludes': 'true',
'SmallerTypeCheck': 'true',
'StringPooling': 'true',
'StructMemberAlignment': '1Byte',
'SuppressStartupBanner': 'true',
'TrackerLogDirectory': 'a_folder',
'TreatSpecificWarningsAsErrors': 'string1;string2',
'TreatWarningAsError': 'true',
'TreatWChar_tAsBuiltInType': 'true',
'UndefineAllPreprocessorDefinitions': 'true',
'UndefinePreprocessorDefinitions': 'string1;string2',
'UseFullPaths': 'true',
'UseUnicodeForAssemblerListing': 'true',
'WarningLevel': 'TurnOffAllWarnings',
'WholeProgramOptimization': 'true',
'XMLDocumentationFileName': 'a_file_name',
'ZZXYZ': 'bogus'},
'Link': {
'AdditionalDependencies': 'file1;file2',
'AdditionalLibraryDirectories': 'folder1;folder2',
'AdditionalManifestDependencies': 'file1;file2',
'AdditionalOptions': 'a string1',
'AddModuleNamesToAssembly': 'file1;file2',
'AllowIsolation': 'true',
'AssemblyDebug': '',
'AssemblyLinkResource': 'file1;file2',
'BaseAddress': 'a string1',
'BuildingInIDE': 'true',
'CLRImageType': 'ForceIJWImage',
'CLRSupportLastError': 'Enabled',
'CLRThreadAttribute': 'MTAThreadingAttribute',
'CLRUnmanagedCodeCheck': 'true',
'CreateHotPatchableImage': 'X86Image',
'DataExecutionPrevention': 'false',
'DelayLoadDLLs': 'file1;file2',
'DelaySign': 'true',
'Driver': 'NotSet',
'EmbedManagedResourceFile': 'file1;file2',
'EnableCOMDATFolding': 'false',
'EnableUAC': 'true',
'EntryPointSymbol': 'a string1',
'FixedBaseAddress': 'false',
'ForceFileOutput': 'Enabled',
'ForceSymbolReferences': 'file1;file2',
'FunctionOrder': 'a_file_name',
'GenerateDebugInformation': 'true',
'GenerateMapFile': 'true',
'HeapCommitSize': 'a string1',
'HeapReserveSize': 'a string1',
'IgnoreAllDefaultLibraries': 'true',
'IgnoreEmbeddedIDL': 'true',
'IgnoreSpecificDefaultLibraries': 'a_file_list',
'ImageHasSafeExceptionHandlers': 'true',
'ImportLibrary': 'a_file_name',
'KeyContainer': 'a_file_name',
'KeyFile': 'a_file_name',
'LargeAddressAware': 'false',
'LinkDLL': 'true',
'LinkErrorReporting': 'SendErrorReport',
'LinkStatus': 'true',
'LinkTimeCodeGeneration': 'UseLinkTimeCodeGeneration',
'ManifestFile': 'a_file_name',
'MapExports': 'true',
'MapFileName': 'a_file_name',
'MergedIDLBaseFileName': 'a_file_name',
'MergeSections': 'a string1',
'MidlCommandFile': 'a_file_name',
'MinimumRequiredVersion': 'a string1',
'ModuleDefinitionFile': 'a_file_name',
'MSDOSStubFileName': 'a_file_name',
'NoEntryPoint': 'true',
'OptimizeReferences': 'false',
'OutputFile': 'a_file_name',
'PerUserRedirection': 'true',
'PreventDllBinding': 'true',
'Profile': 'true',
'ProfileGuidedDatabase': 'a_file_name',
'ProgramDatabaseFile': 'a_file_name',
'RandomizedBaseAddress': 'false',
'RegisterOutput': 'true',
'SectionAlignment': '33',
'SetChecksum': 'true',
'ShowProgress': 'LinkVerboseREF',
'SpecifySectionAttributes': 'a string1',
'StackCommitSize': 'a string1',
'StackReserveSize': 'a string1',
'StripPrivateSymbols': 'a_file_name',
'SubSystem': 'Console',
'SupportNobindOfDelayLoadedDLL': 'true',
'SupportUnloadOfDelayLoadedDLL': 'true',
'SuppressStartupBanner': 'true',
'SwapRunFromCD': 'true',
'SwapRunFromNET': 'true',
'TargetMachine': 'MachineX86',
'TerminalServerAware': 'false',
'TrackerLogDirectory': 'a_folder',
'TreatLinkerWarningAsErrors': 'true',
'TurnOffAssemblyGeneration': 'true',
'TypeLibraryFile': 'a_file_name',
'TypeLibraryResourceID': '33',
'UACExecutionLevel': 'AsInvoker',
'UACUIAccess': 'true',
'Version': 'a string1'},
'ResourceCompile': {
'AdditionalIncludeDirectories': 'folder1;folder2',
'AdditionalOptions': 'a string1',
'Culture': '0x236',
'IgnoreStandardIncludePath': 'true',
'NullTerminateStrings': 'true',
'PreprocessorDefinitions': 'string1;string2',
'ResourceOutputFileName': 'a string1',
'ShowProgress': 'true',
'SuppressStartupBanner': 'true',
'TrackerLogDirectory': 'a_folder',
'UndefinePreprocessorDefinitions': 'string1;string2'},
'Midl': {
'AdditionalIncludeDirectories': 'folder1;folder2',
'AdditionalOptions': 'a string1',
'ApplicationConfigurationMode': 'true',
'ClientStubFile': 'a_file_name',
'CPreprocessOptions': 'a string1',
'DefaultCharType': 'Signed',
'DllDataFileName': 'a_file_name',
'EnableErrorChecks': 'EnableCustom',
'ErrorCheckAllocations': 'true',
'ErrorCheckBounds': 'true',
'ErrorCheckEnumRange': 'true',
'ErrorCheckRefPointers': 'true',
'ErrorCheckStubData': 'true',
'GenerateClientFiles': 'Stub',
'GenerateServerFiles': 'None',
'GenerateStublessProxies': 'true',
'GenerateTypeLibrary': 'true',
'HeaderFileName': 'a_file_name',
'IgnoreStandardIncludePath': 'true',
'InterfaceIdentifierFileName': 'a_file_name',
'LocaleID': '33',
'MkTypLibCompatible': 'true',
'OutputDirectory': 'a string1',
'PreprocessorDefinitions': 'string1;string2',
'ProxyFileName': 'a_file_name',
'RedirectOutputAndErrors': 'a_file_name',
'ServerStubFile': 'a_file_name',
'StructMemberAlignment': 'NotSet',
'SuppressCompilerWarnings': 'true',
'SuppressStartupBanner': 'true',
'TargetEnvironment': 'Itanium',
'TrackerLogDirectory': 'a_folder',
'TypeLibFormat': 'NewFormat',
'TypeLibraryName': 'a_file_name',
'UndefinePreprocessorDefinitions': 'string1;string2',
'ValidateAllParameters': 'true',
'WarnAsError': 'true',
'WarningLevel': '1'},
'Lib': {
'AdditionalDependencies': 'file1;file2',
'AdditionalLibraryDirectories': 'folder1;folder2',
'AdditionalOptions': 'a string1',
'DisplayLibrary': 'a string1',
'ErrorReporting': 'PromptImmediately',
'ExportNamedFunctions': 'string1;string2',
'ForceSymbolReferences': 'a string1',
'IgnoreAllDefaultLibraries': 'true',
'IgnoreSpecificDefaultLibraries': 'file1;file2',
'LinkTimeCodeGeneration': 'true',
'MinimumRequiredVersion': 'a string1',
'ModuleDefinitionFile': 'a_file_name',
'Name': 'a_file_name',
'OutputFile': 'a_file_name',
'RemoveObjects': 'file1;file2',
'SubSystem': 'Console',
'SuppressStartupBanner': 'true',
'TargetMachine': 'MachineX86i',
'TrackerLogDirectory': 'a_folder',
'TreatLibWarningAsErrors': 'true',
'UseUnicodeResponseFiles': 'true',
'Verbose': 'true'},
'Manifest': {
'AdditionalManifestFiles': 'file1;file2',
'AdditionalOptions': 'a string1',
'AssemblyIdentity': 'a string1',
'ComponentFileName': 'a_file_name',
'EnableDPIAwareness': 'fal',
'GenerateCatalogFiles': 'truel',
'GenerateCategoryTags': 'true',
'InputResourceManifests': 'a string1',
'ManifestFromManagedAssembly': 'a_file_name',
'notgood3': 'bogus',
'OutputManifestFile': 'a_file_name',
'OutputResourceManifests': 'a string1',
'RegistrarScriptFile': 'a_file_name',
'ReplacementsFile': 'a_file_name',
'SuppressDependencyElement': 'true',
'SuppressStartupBanner': 'true',
'TrackerLogDirectory': 'a_folder',
'TypeLibraryFile': 'a_file_name',
'UpdateFileHashes': 'true',
'UpdateFileHashesSearchPath': 'a_file_name',
'VerboseOutput': 'true'},
'ProjectReference': {
'LinkLibraryDependencies': 'true',
'UseLibraryDependencyInputs': 'true'},
'ManifestResourceCompile': {
'ResourceOutputFileName': 'a_file_name'},
'': {
'EmbedManifest': 'true',
'GenerateManifest': 'true',
'IgnoreImportLibrary': 'true',
'LinkIncremental': 'false'}},
self.stderr)
self._ExpectedWarnings([
'Warning: unrecognized setting ClCompile/Enableprefast',
'Warning: unrecognized setting ClCompile/ZZXYZ',
'Warning: unrecognized setting Manifest/notgood3',
'Warning: for Manifest/GenerateCatalogFiles, '
"expected bool; got 'truel'",
'Warning: for Lib/TargetMachine, unrecognized enumerated value '
'MachineX86i',
"Warning: for Manifest/EnableDPIAwareness, expected bool; got 'fal'"])
def testConvertToMSBuildSettings_empty(self):
"""Tests an empty conversion."""
msvs_settings = {}
expected_msbuild_settings = {}
actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(
msvs_settings,
self.stderr)
self.assertEqual(expected_msbuild_settings, actual_msbuild_settings)
self._ExpectedWarnings([])
def testConvertToMSBuildSettings_minimal(self):
"""Tests a minimal conversion."""
msvs_settings = {
'VCCLCompilerTool': {
'AdditionalIncludeDirectories': 'dir1',
'AdditionalOptions': '/foo',
'BasicRuntimeChecks': '0',
},
'VCLinkerTool': {
'LinkTimeCodeGeneration': '1',
'ErrorReporting': '1',
'DataExecutionPrevention': '2',
},
}
expected_msbuild_settings = {
'ClCompile': {
'AdditionalIncludeDirectories': 'dir1',
'AdditionalOptions': '/foo',
'BasicRuntimeChecks': 'Default',
},
'Link': {
'LinkTimeCodeGeneration': 'UseLinkTimeCodeGeneration',
'LinkErrorReporting': 'PromptImmediately',
'DataExecutionPrevention': 'true',
},
}
actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(
msvs_settings,
self.stderr)
self.assertEqual(expected_msbuild_settings, actual_msbuild_settings)
self._ExpectedWarnings([])
def testConvertToMSBuildSettings_warnings(self):
"""Tests conversion that generates warnings."""
msvs_settings = {
'VCCLCompilerTool': {
'AdditionalIncludeDirectories': '1',
'AdditionalOptions': '2',
# These are incorrect values:
'BasicRuntimeChecks': '12',
'BrowseInformation': '21',
'UsePrecompiledHeader': '13',
'GeneratePreprocessedFile': '14'},
'VCLinkerTool': {
# These are incorrect values:
'Driver': '10',
'LinkTimeCodeGeneration': '31',
'ErrorReporting': '21',
'FixedBaseAddress': '6'},
'VCResourceCompilerTool': {
# Custom
'Culture': '1003'}}
expected_msbuild_settings = {
'ClCompile': {
'AdditionalIncludeDirectories': '1',
'AdditionalOptions': '2'},
'Link': {},
'ResourceCompile': {
# Custom
'Culture': '0x03eb'}}
actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(
msvs_settings,
self.stderr)
self.assertEqual(expected_msbuild_settings, actual_msbuild_settings)
self._ExpectedWarnings([
'Warning: while converting VCCLCompilerTool/BasicRuntimeChecks to '
'MSBuild, index value (12) not in expected range [0, 4)',
'Warning: while converting VCCLCompilerTool/BrowseInformation to '
'MSBuild, index value (21) not in expected range [0, 3)',
'Warning: while converting VCCLCompilerTool/UsePrecompiledHeader to '
'MSBuild, index value (13) not in expected range [0, 3)',
'Warning: while converting VCCLCompilerTool/GeneratePreprocessedFile to '
'MSBuild, value must be one of [0, 1, 2]; got 14',
'Warning: while converting VCLinkerTool/Driver to '
'MSBuild, index value (10) not in expected range [0, 4)',
'Warning: while converting VCLinkerTool/LinkTimeCodeGeneration to '
'MSBuild, index value (31) not in expected range [0, 5)',
'Warning: while converting VCLinkerTool/ErrorReporting to '
'MSBuild, index value (21) not in expected range [0, 3)',
'Warning: while converting VCLinkerTool/FixedBaseAddress to '
'MSBuild, index value (6) not in expected range [0, 3)',
])
def testConvertToMSBuildSettings_full_synthetic(self):
"""Tests conversion of all the MSBuild settings."""
msvs_settings = {
'VCCLCompilerTool': {
'AdditionalIncludeDirectories': 'folder1;folder2;folder3',
'AdditionalOptions': 'a_string',
'AdditionalUsingDirectories': 'folder1;folder2;folder3',
'AssemblerListingLocation': 'a_file_name',
'AssemblerOutput': '0',
'BasicRuntimeChecks': '1',
'BrowseInformation': '2',
'BrowseInformationFile': 'a_file_name',
'BufferSecurityCheck': 'true',
'CallingConvention': '0',
'CompileAs': '1',
'DebugInformationFormat': '4',
'DefaultCharIsUnsigned': 'true',
'Detect64BitPortabilityProblems': 'true',
'DisableLanguageExtensions': 'true',
'DisableSpecificWarnings': 'd1;d2;d3',
'EnableEnhancedInstructionSet': '0',
'EnableFiberSafeOptimizations': 'true',
'EnableFunctionLevelLinking': 'true',
'EnableIntrinsicFunctions': 'true',
'EnablePREfast': 'true',
'ErrorReporting': '1',
'ExceptionHandling': '2',
'ExpandAttributedSource': 'true',
'FavorSizeOrSpeed': '0',
'FloatingPointExceptions': 'true',
'FloatingPointModel': '1',
'ForceConformanceInForLoopScope': 'true',
'ForcedIncludeFiles': 'file1;file2;file3',
'ForcedUsingFiles': 'file1;file2;file3',
'GeneratePreprocessedFile': '1',
'GenerateXMLDocumentationFiles': 'true',
'IgnoreStandardIncludePath': 'true',
'InlineFunctionExpansion': '2',
'KeepComments': 'true',
'MinimalRebuild': 'true',
'ObjectFile': 'a_file_name',
'OmitDefaultLibName': 'true',
'OmitFramePointers': 'true',
'OpenMP': 'true',
'Optimization': '3',
'PrecompiledHeaderFile': 'a_file_name',
'PrecompiledHeaderThrough': 'a_file_name',
'PreprocessorDefinitions': 'd1;d2;d3',
'ProgramDataBaseFileName': 'a_file_name',
'RuntimeLibrary': '0',
'RuntimeTypeInfo': 'true',
'ShowIncludes': 'true',
'SmallerTypeCheck': 'true',
'StringPooling': 'true',
'StructMemberAlignment': '1',
'SuppressStartupBanner': 'true',
'TreatWChar_tAsBuiltInType': 'true',
'UndefineAllPreprocessorDefinitions': 'true',
'UndefinePreprocessorDefinitions': 'd1;d2;d3',
'UseFullPaths': 'true',
'UsePrecompiledHeader': '1',
'UseUnicodeResponseFiles': 'true',
'WarnAsError': 'true',
'WarningLevel': '2',
'WholeProgramOptimization': 'true',
'XMLDocumentationFileName': 'a_file_name'},
'VCLinkerTool': {
'AdditionalDependencies': 'file1;file2;file3',
'AdditionalLibraryDirectories': 'folder1;folder2;folder3',
'AdditionalLibraryDirectories_excluded': 'folder1;folder2;folder3',
'AdditionalManifestDependencies': 'file1;file2;file3',
'AdditionalOptions': 'a_string',
'AddModuleNamesToAssembly': 'file1;file2;file3',
'AllowIsolation': 'true',
'AssemblyDebug': '0',
'AssemblyLinkResource': 'file1;file2;file3',
'BaseAddress': 'a_string',
'CLRImageType': '1',
'CLRThreadAttribute': '2',
'CLRUnmanagedCodeCheck': 'true',
'DataExecutionPrevention': '0',
'DelayLoadDLLs': 'file1;file2;file3',
'DelaySign': 'true',
'Driver': '1',
'EmbedManagedResourceFile': 'file1;file2;file3',
'EnableCOMDATFolding': '0',
'EnableUAC': 'true',
'EntryPointSymbol': 'a_string',
'ErrorReporting': '0',
'FixedBaseAddress': '1',
'ForceSymbolReferences': 'file1;file2;file3',
'FunctionOrder': 'a_file_name',
'GenerateDebugInformation': 'true',
'GenerateManifest': 'true',
'GenerateMapFile': 'true',
'HeapCommitSize': 'a_string',
'HeapReserveSize': 'a_string',
'IgnoreAllDefaultLibraries': 'true',
'IgnoreDefaultLibraryNames': 'file1;file2;file3',
'IgnoreEmbeddedIDL': 'true',
'IgnoreImportLibrary': 'true',
'ImportLibrary': 'a_file_name',
'KeyContainer': 'a_file_name',
'KeyFile': 'a_file_name',
'LargeAddressAware': '2',
'LinkIncremental': '1',
'LinkLibraryDependencies': 'true',
'LinkTimeCodeGeneration': '2',
'ManifestFile': 'a_file_name',
'MapExports': 'true',
'MapFileName': 'a_file_name',
'MergedIDLBaseFileName': 'a_file_name',
'MergeSections': 'a_string',
'MidlCommandFile': 'a_file_name',
'ModuleDefinitionFile': 'a_file_name',
'OptimizeForWindows98': '1',
'OptimizeReferences': '0',
'OutputFile': 'a_file_name',
'PerUserRedirection': 'true',
'Profile': 'true',
'ProfileGuidedDatabase': 'a_file_name',
'ProgramDatabaseFile': 'a_file_name',
'RandomizedBaseAddress': '1',
'RegisterOutput': 'true',
'ResourceOnlyDLL': 'true',
'SetChecksum': 'true',
'ShowProgress': '0',
'StackCommitSize': 'a_string',
'StackReserveSize': 'a_string',
'StripPrivateSymbols': 'a_file_name',
'SubSystem': '2',
'SupportUnloadOfDelayLoadedDLL': 'true',
'SuppressStartupBanner': 'true',
'SwapRunFromCD': 'true',
'SwapRunFromNet': 'true',
'TargetMachine': '3',
'TerminalServerAware': '2',
'TurnOffAssemblyGeneration': 'true',
'TypeLibraryFile': 'a_file_name',
'TypeLibraryResourceID': '33',
'UACExecutionLevel': '1',
'UACUIAccess': 'true',
'UseLibraryDependencyInputs': 'false',
'UseUnicodeResponseFiles': 'true',
'Version': 'a_string'},
'VCResourceCompilerTool': {
'AdditionalIncludeDirectories': 'folder1;folder2;folder3',
'AdditionalOptions': 'a_string',
'Culture': '1003',
'IgnoreStandardIncludePath': 'true',
'PreprocessorDefinitions': 'd1;d2;d3',
'ResourceOutputFileName': 'a_string',
'ShowProgress': 'true',
'SuppressStartupBanner': 'true',
'UndefinePreprocessorDefinitions': 'd1;d2;d3'},
'VCMIDLTool': {
'AdditionalIncludeDirectories': 'folder1;folder2;folder3',
'AdditionalOptions': 'a_string',
'CPreprocessOptions': 'a_string',
'DefaultCharType': '0',
'DLLDataFileName': 'a_file_name',
'EnableErrorChecks': '2',
'ErrorCheckAllocations': 'true',
'ErrorCheckBounds': 'true',
'ErrorCheckEnumRange': 'true',
'ErrorCheckRefPointers': 'true',
'ErrorCheckStubData': 'true',
'GenerateStublessProxies': 'true',
'GenerateTypeLibrary': 'true',
'HeaderFileName': 'a_file_name',
'IgnoreStandardIncludePath': 'true',
'InterfaceIdentifierFileName': 'a_file_name',
'MkTypLibCompatible': 'true',
'OutputDirectory': 'a_string',
'PreprocessorDefinitions': 'd1;d2;d3',
'ProxyFileName': 'a_file_name',
'RedirectOutputAndErrors': 'a_file_name',
'StructMemberAlignment': '3',
'SuppressStartupBanner': 'true',
'TargetEnvironment': '1',
'TypeLibraryName': 'a_file_name',
'UndefinePreprocessorDefinitions': 'd1;d2;d3',
'ValidateParameters': 'true',
'WarnAsError': 'true',
'WarningLevel': '4'},
'VCLibrarianTool': {
'AdditionalDependencies': 'file1;file2;file3',
'AdditionalLibraryDirectories': 'folder1;folder2;folder3',
'AdditionalLibraryDirectories_excluded': 'folder1;folder2;folder3',
'AdditionalOptions': 'a_string',
'ExportNamedFunctions': 'd1;d2;d3',
'ForceSymbolReferences': 'a_string',
'IgnoreAllDefaultLibraries': 'true',
'IgnoreSpecificDefaultLibraries': 'file1;file2;file3',
'LinkLibraryDependencies': 'true',
'ModuleDefinitionFile': 'a_file_name',
'OutputFile': 'a_file_name',
'SuppressStartupBanner': 'true',
'UseUnicodeResponseFiles': 'true'},
'VCManifestTool': {
'AdditionalManifestFiles': 'file1;file2;file3',
'AdditionalOptions': 'a_string',
'AssemblyIdentity': 'a_string',
'ComponentFileName': 'a_file_name',
'DependencyInformationFile': 'a_file_name',
'EmbedManifest': 'true',
'GenerateCatalogFiles': 'true',
'InputResourceManifests': 'a_string',
'ManifestResourceFile': 'my_name',
'OutputManifestFile': 'a_file_name',
'RegistrarScriptFile': 'a_file_name',
'ReplacementsFile': 'a_file_name',
'SuppressStartupBanner': 'true',
'TypeLibraryFile': 'a_file_name',
'UpdateFileHashes': 'true',
'UpdateFileHashesSearchPath': 'a_file_name',
'UseFAT32Workaround': 'true',
'UseUnicodeResponseFiles': 'true',
'VerboseOutput': 'true'}}
expected_msbuild_settings = {
'ClCompile': {
'AdditionalIncludeDirectories': 'folder1;folder2;folder3',
'AdditionalOptions': 'a_string /J',
'AdditionalUsingDirectories': 'folder1;folder2;folder3',
'AssemblerListingLocation': 'a_file_name',
'AssemblerOutput': 'NoListing',
'BasicRuntimeChecks': 'StackFrameRuntimeCheck',
'BrowseInformation': 'true',
'BrowseInformationFile': 'a_file_name',
'BufferSecurityCheck': 'true',
'CallingConvention': 'Cdecl',
'CompileAs': 'CompileAsC',
'DebugInformationFormat': 'EditAndContinue',
'DisableLanguageExtensions': 'true',
'DisableSpecificWarnings': 'd1;d2;d3',
'EnableEnhancedInstructionSet': 'NotSet',
'EnableFiberSafeOptimizations': 'true',
'EnablePREfast': 'true',
'ErrorReporting': 'Prompt',
'ExceptionHandling': 'Async',
'ExpandAttributedSource': 'true',
'FavorSizeOrSpeed': 'Neither',
'FloatingPointExceptions': 'true',
'FloatingPointModel': 'Strict',
'ForceConformanceInForLoopScope': 'true',
'ForcedIncludeFiles': 'file1;file2;file3',
'ForcedUsingFiles': 'file1;file2;file3',
'FunctionLevelLinking': 'true',
'GenerateXMLDocumentationFiles': 'true',
'IgnoreStandardIncludePath': 'true',
'InlineFunctionExpansion': 'AnySuitable',
'IntrinsicFunctions': 'true',
'MinimalRebuild': 'true',
'ObjectFileName': 'a_file_name',
'OmitDefaultLibName': 'true',
'OmitFramePointers': 'true',
'OpenMPSupport': 'true',
'Optimization': 'Full',
'PrecompiledHeader': 'Create',
'PrecompiledHeaderFile': 'a_file_name',
'PrecompiledHeaderOutputFile': 'a_file_name',
'PreprocessKeepComments': 'true',
'PreprocessorDefinitions': 'd1;d2;d3',
'PreprocessSuppressLineNumbers': 'false',
'PreprocessToFile': 'true',
'ProgramDataBaseFileName': 'a_file_name',
'RuntimeLibrary': 'MultiThreaded',
'RuntimeTypeInfo': 'true',
'ShowIncludes': 'true',
'SmallerTypeCheck': 'true',
'StringPooling': 'true',
'StructMemberAlignment': '1Byte',
'SuppressStartupBanner': 'true',
'TreatWarningAsError': 'true',
'TreatWChar_tAsBuiltInType': 'true',
'UndefineAllPreprocessorDefinitions': 'true',
'UndefinePreprocessorDefinitions': 'd1;d2;d3',
'UseFullPaths': 'true',
'WarningLevel': 'Level2',
'WholeProgramOptimization': 'true',
'XMLDocumentationFileName': 'a_file_name'},
'Link': {
'AdditionalDependencies': 'file1;file2;file3',
'AdditionalLibraryDirectories': 'folder1;folder2;folder3',
'AdditionalManifestDependencies': 'file1;file2;file3',
'AdditionalOptions': 'a_string',
'AddModuleNamesToAssembly': 'file1;file2;file3',
'AllowIsolation': 'true',
'AssemblyDebug': '',
'AssemblyLinkResource': 'file1;file2;file3',
'BaseAddress': 'a_string',
'CLRImageType': 'ForceIJWImage',
'CLRThreadAttribute': 'STAThreadingAttribute',
'CLRUnmanagedCodeCheck': 'true',
'DataExecutionPrevention': '',
'DelayLoadDLLs': 'file1;file2;file3',
'DelaySign': 'true',
'Driver': 'Driver',
'EmbedManagedResourceFile': 'file1;file2;file3',
'EnableCOMDATFolding': '',
'EnableUAC': 'true',
'EntryPointSymbol': 'a_string',
'FixedBaseAddress': 'false',
'ForceSymbolReferences': 'file1;file2;file3',
'FunctionOrder': 'a_file_name',
'GenerateDebugInformation': 'true',
'GenerateMapFile': 'true',
'HeapCommitSize': 'a_string',
'HeapReserveSize': 'a_string',
'IgnoreAllDefaultLibraries': 'true',
'IgnoreEmbeddedIDL': 'true',
'IgnoreSpecificDefaultLibraries': 'file1;file2;file3',
'ImportLibrary': 'a_file_name',
'KeyContainer': 'a_file_name',
'KeyFile': 'a_file_name',
'LargeAddressAware': 'true',
'LinkErrorReporting': 'NoErrorReport',
'LinkTimeCodeGeneration': 'PGInstrument',
'ManifestFile': 'a_file_name',
'MapExports': 'true',
'MapFileName': 'a_file_name',
'MergedIDLBaseFileName': 'a_file_name',
'MergeSections': 'a_string',
'MidlCommandFile': 'a_file_name',
'ModuleDefinitionFile': 'a_file_name',
'NoEntryPoint': 'true',
'OptimizeReferences': '',
'OutputFile': 'a_file_name',
'PerUserRedirection': 'true',
'Profile': 'true',
'ProfileGuidedDatabase': 'a_file_name',
'ProgramDatabaseFile': 'a_file_name',
'RandomizedBaseAddress': 'false',
'RegisterOutput': 'true',
'SetChecksum': 'true',
'ShowProgress': 'NotSet',
'StackCommitSize': 'a_string',
'StackReserveSize': 'a_string',
'StripPrivateSymbols': 'a_file_name',
'SubSystem': 'Windows',
'SupportUnloadOfDelayLoadedDLL': 'true',
'SuppressStartupBanner': 'true',
'SwapRunFromCD': 'true',
'SwapRunFromNET': 'true',
'TargetMachine': 'MachineARM',
'TerminalServerAware': 'true',
'TurnOffAssemblyGeneration': 'true',
'TypeLibraryFile': 'a_file_name',
'TypeLibraryResourceID': '33',
'UACExecutionLevel': 'HighestAvailable',
'UACUIAccess': 'true',
'Version': 'a_string'},
'ResourceCompile': {
'AdditionalIncludeDirectories': 'folder1;folder2;folder3',
'AdditionalOptions': 'a_string',
'Culture': '0x03eb',
'IgnoreStandardIncludePath': 'true',
'PreprocessorDefinitions': 'd1;d2;d3',
'ResourceOutputFileName': 'a_string',
'ShowProgress': 'true',
'SuppressStartupBanner': 'true',
'UndefinePreprocessorDefinitions': 'd1;d2;d3'},
'Midl': {
'AdditionalIncludeDirectories': 'folder1;folder2;folder3',
'AdditionalOptions': 'a_string',
'CPreprocessOptions': 'a_string',
'DefaultCharType': 'Unsigned',
'DllDataFileName': 'a_file_name',
'EnableErrorChecks': 'All',
'ErrorCheckAllocations': 'true',
'ErrorCheckBounds': 'true',
'ErrorCheckEnumRange': 'true',
'ErrorCheckRefPointers': 'true',
'ErrorCheckStubData': 'true',
'GenerateStublessProxies': 'true',
'GenerateTypeLibrary': 'true',
'HeaderFileName': 'a_file_name',
'IgnoreStandardIncludePath': 'true',
'InterfaceIdentifierFileName': 'a_file_name',
'MkTypLibCompatible': 'true',
'OutputDirectory': 'a_string',
'PreprocessorDefinitions': 'd1;d2;d3',
'ProxyFileName': 'a_file_name',
'RedirectOutputAndErrors': 'a_file_name',
'StructMemberAlignment': '4',
'SuppressStartupBanner': 'true',
'TargetEnvironment': 'Win32',
'TypeLibraryName': 'a_file_name',
'UndefinePreprocessorDefinitions': 'd1;d2;d3',
'ValidateAllParameters': 'true',
'WarnAsError': 'true',
'WarningLevel': '4'},
'Lib': {
'AdditionalDependencies': 'file1;file2;file3',
'AdditionalLibraryDirectories': 'folder1;folder2;folder3',
'AdditionalOptions': 'a_string',
'ExportNamedFunctions': 'd1;d2;d3',
'ForceSymbolReferences': 'a_string',
'IgnoreAllDefaultLibraries': 'true',
'IgnoreSpecificDefaultLibraries': 'file1;file2;file3',
'ModuleDefinitionFile': 'a_file_name',
'OutputFile': 'a_file_name',
'SuppressStartupBanner': 'true',
'UseUnicodeResponseFiles': 'true'},
'Manifest': {
'AdditionalManifestFiles': 'file1;file2;file3',
'AdditionalOptions': 'a_string',
'AssemblyIdentity': 'a_string',
'ComponentFileName': 'a_file_name',
'GenerateCatalogFiles': 'true',
'InputResourceManifests': 'a_string',
'OutputManifestFile': 'a_file_name',
'RegistrarScriptFile': 'a_file_name',
'ReplacementsFile': 'a_file_name',
'SuppressStartupBanner': 'true',
'TypeLibraryFile': 'a_file_name',
'UpdateFileHashes': 'true',
'UpdateFileHashesSearchPath': 'a_file_name',
'VerboseOutput': 'true'},
'ManifestResourceCompile': {
'ResourceOutputFileName': 'my_name'},
'ProjectReference': {
'LinkLibraryDependencies': 'true',
'UseLibraryDependencyInputs': 'false'},
'': {
'EmbedManifest': 'true',
'GenerateManifest': 'true',
'IgnoreImportLibrary': 'true',
'LinkIncremental': 'false'}}
actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(
msvs_settings,
self.stderr)
self.assertEqual(expected_msbuild_settings, actual_msbuild_settings)
self._ExpectedWarnings([])
def testConvertToMSBuildSettings_actual(self):
"""Tests the conversion of an actual project.
A VS2008 project with most of the options defined was created through the
VS2008 IDE. It was then converted to VS2010. The tool settings found in
the .vcproj and .vcxproj files were converted to the two dictionaries
msvs_settings and expected_msbuild_settings.
Note that for many settings, the VS2010 converter adds macros like
%(AdditionalIncludeDirectories) to make sure than inherited values are
included. Since the Gyp projects we generate do not use inheritance,
we removed these macros. They were:
ClCompile:
AdditionalIncludeDirectories: ';%(AdditionalIncludeDirectories)'
AdditionalOptions: ' %(AdditionalOptions)'
AdditionalUsingDirectories: ';%(AdditionalUsingDirectories)'
DisableSpecificWarnings: ';%(DisableSpecificWarnings)',
ForcedIncludeFiles: ';%(ForcedIncludeFiles)',
ForcedUsingFiles: ';%(ForcedUsingFiles)',
PreprocessorDefinitions: ';%(PreprocessorDefinitions)',
UndefinePreprocessorDefinitions:
';%(UndefinePreprocessorDefinitions)',
Link:
AdditionalDependencies: ';%(AdditionalDependencies)',
AdditionalLibraryDirectories: ';%(AdditionalLibraryDirectories)',
AdditionalManifestDependencies:
';%(AdditionalManifestDependencies)',
AdditionalOptions: ' %(AdditionalOptions)',
AddModuleNamesToAssembly: ';%(AddModuleNamesToAssembly)',
AssemblyLinkResource: ';%(AssemblyLinkResource)',
DelayLoadDLLs: ';%(DelayLoadDLLs)',
EmbedManagedResourceFile: ';%(EmbedManagedResourceFile)',
ForceSymbolReferences: ';%(ForceSymbolReferences)',
IgnoreSpecificDefaultLibraries:
';%(IgnoreSpecificDefaultLibraries)',
ResourceCompile:
AdditionalIncludeDirectories: ';%(AdditionalIncludeDirectories)',
AdditionalOptions: ' %(AdditionalOptions)',
PreprocessorDefinitions: ';%(PreprocessorDefinitions)',
Manifest:
AdditionalManifestFiles: ';%(AdditionalManifestFiles)',
AdditionalOptions: ' %(AdditionalOptions)',
InputResourceManifests: ';%(InputResourceManifests)',
"""
msvs_settings = {
'VCCLCompilerTool': {
'AdditionalIncludeDirectories': 'dir1',
'AdditionalOptions': '/more',
'AdditionalUsingDirectories': 'test',
'AssemblerListingLocation': '$(IntDir)\\a',
'AssemblerOutput': '1',
'BasicRuntimeChecks': '3',
'BrowseInformation': '1',
'BrowseInformationFile': '$(IntDir)\\e',
'BufferSecurityCheck': 'false',
'CallingConvention': '1',
'CompileAs': '1',
'DebugInformationFormat': '4',
'DefaultCharIsUnsigned': 'true',
'Detect64BitPortabilityProblems': 'true',
'DisableLanguageExtensions': 'true',
'DisableSpecificWarnings': 'abc',
'EnableEnhancedInstructionSet': '1',
'EnableFiberSafeOptimizations': 'true',
'EnableFunctionLevelLinking': 'true',
'EnableIntrinsicFunctions': 'true',
'EnablePREfast': 'true',
'ErrorReporting': '2',
'ExceptionHandling': '2',
'ExpandAttributedSource': 'true',
'FavorSizeOrSpeed': '2',
'FloatingPointExceptions': 'true',
'FloatingPointModel': '1',
'ForceConformanceInForLoopScope': 'false',
'ForcedIncludeFiles': 'def',
'ForcedUsingFiles': 'ge',
'GeneratePreprocessedFile': '2',
'GenerateXMLDocumentationFiles': 'true',
'IgnoreStandardIncludePath': 'true',
'InlineFunctionExpansion': '1',
'KeepComments': 'true',
'MinimalRebuild': 'true',
'ObjectFile': '$(IntDir)\\b',
'OmitDefaultLibName': 'true',
'OmitFramePointers': 'true',
'OpenMP': 'true',
'Optimization': '3',
'PrecompiledHeaderFile': '$(IntDir)\\$(TargetName).pche',
'PrecompiledHeaderThrough': 'StdAfx.hd',
'PreprocessorDefinitions': 'WIN32;_DEBUG;_CONSOLE',
'ProgramDataBaseFileName': '$(IntDir)\\vc90b.pdb',
'RuntimeLibrary': '3',
'RuntimeTypeInfo': 'false',
'ShowIncludes': 'true',
'SmallerTypeCheck': 'true',
'StringPooling': 'true',
'StructMemberAlignment': '3',
'SuppressStartupBanner': 'false',
'TreatWChar_tAsBuiltInType': 'false',
'UndefineAllPreprocessorDefinitions': 'true',
'UndefinePreprocessorDefinitions': 'wer',
'UseFullPaths': 'true',
'UsePrecompiledHeader': '0',
'UseUnicodeResponseFiles': 'false',
'WarnAsError': 'true',
'WarningLevel': '3',
'WholeProgramOptimization': 'true',
'XMLDocumentationFileName': '$(IntDir)\\c'},
'VCLinkerTool': {
'AdditionalDependencies': 'zx',
'AdditionalLibraryDirectories': 'asd',
'AdditionalManifestDependencies': 's2',
'AdditionalOptions': '/mor2',
'AddModuleNamesToAssembly': 'd1',
'AllowIsolation': 'false',
'AssemblyDebug': '1',
'AssemblyLinkResource': 'd5',
'BaseAddress': '23423',
'CLRImageType': '3',
'CLRThreadAttribute': '1',
'CLRUnmanagedCodeCheck': 'true',
'DataExecutionPrevention': '0',
'DelayLoadDLLs': 'd4',
'DelaySign': 'true',
'Driver': '2',
'EmbedManagedResourceFile': 'd2',
'EnableCOMDATFolding': '1',
'EnableUAC': 'false',
'EntryPointSymbol': 'f5',
'ErrorReporting': '2',
'FixedBaseAddress': '1',
'ForceSymbolReferences': 'd3',
'FunctionOrder': 'fssdfsd',
'GenerateDebugInformation': 'true',
'GenerateManifest': 'false',
'GenerateMapFile': 'true',
'HeapCommitSize': '13',
'HeapReserveSize': '12',
'IgnoreAllDefaultLibraries': 'true',
'IgnoreDefaultLibraryNames': 'flob;flok',
'IgnoreEmbeddedIDL': 'true',
'IgnoreImportLibrary': 'true',
'ImportLibrary': 'f4',
'KeyContainer': 'f7',
'KeyFile': 'f6',
'LargeAddressAware': '2',
'LinkIncremental': '0',
'LinkLibraryDependencies': 'false',
'LinkTimeCodeGeneration': '1',
'ManifestFile':
'$(IntDir)\\$(TargetFileName).2intermediate.manifest',
'MapExports': 'true',
'MapFileName': 'd5',
'MergedIDLBaseFileName': 'f2',
'MergeSections': 'f5',
'MidlCommandFile': 'f1',
'ModuleDefinitionFile': 'sdsd',
'OptimizeForWindows98': '2',
'OptimizeReferences': '2',
'OutputFile': '$(OutDir)\\$(ProjectName)2.exe',
'PerUserRedirection': 'true',
'Profile': 'true',
'ProfileGuidedDatabase': '$(TargetDir)$(TargetName).pgdd',
'ProgramDatabaseFile': 'Flob.pdb',
'RandomizedBaseAddress': '1',
'RegisterOutput': 'true',
'ResourceOnlyDLL': 'true',
'SetChecksum': 'false',
'ShowProgress': '1',
'StackCommitSize': '15',
'StackReserveSize': '14',
'StripPrivateSymbols': 'd3',
'SubSystem': '1',
'SupportUnloadOfDelayLoadedDLL': 'true',
'SuppressStartupBanner': 'false',
'SwapRunFromCD': 'true',
'SwapRunFromNet': 'true',
'TargetMachine': '1',
'TerminalServerAware': '1',
'TurnOffAssemblyGeneration': 'true',
'TypeLibraryFile': 'f3',
'TypeLibraryResourceID': '12',
'UACExecutionLevel': '2',
'UACUIAccess': 'true',
'UseLibraryDependencyInputs': 'true',
'UseUnicodeResponseFiles': 'false',
'Version': '333'},
'VCResourceCompilerTool': {
'AdditionalIncludeDirectories': 'f3',
'AdditionalOptions': '/more3',
'Culture': '3084',
'IgnoreStandardIncludePath': 'true',
'PreprocessorDefinitions': '_UNICODE;UNICODE2',
'ResourceOutputFileName': '$(IntDir)/$(InputName)3.res',
'ShowProgress': 'true'},
'VCManifestTool': {
'AdditionalManifestFiles': 'sfsdfsd',
'AdditionalOptions': 'afdsdafsd',
'AssemblyIdentity': 'sddfdsadfsa',
'ComponentFileName': 'fsdfds',
'DependencyInformationFile': '$(IntDir)\\mt.depdfd',
'EmbedManifest': 'false',
'GenerateCatalogFiles': 'true',
'InputResourceManifests': 'asfsfdafs',
'ManifestResourceFile':
'$(IntDir)\\$(TargetFileName).embed.manifest.resfdsf',
'OutputManifestFile': '$(TargetPath).manifestdfs',
'RegistrarScriptFile': 'sdfsfd',
'ReplacementsFile': 'sdffsd',
'SuppressStartupBanner': 'false',
'TypeLibraryFile': 'sfsd',
'UpdateFileHashes': 'true',
'UpdateFileHashesSearchPath': 'sfsd',
'UseFAT32Workaround': 'true',
'UseUnicodeResponseFiles': 'false',
'VerboseOutput': 'true'}}
expected_msbuild_settings = {
'ClCompile': {
'AdditionalIncludeDirectories': 'dir1',
'AdditionalOptions': '/more /J',
'AdditionalUsingDirectories': 'test',
'AssemblerListingLocation': '$(IntDir)a',
'AssemblerOutput': 'AssemblyCode',
'BasicRuntimeChecks': 'EnableFastChecks',
'BrowseInformation': 'true',
'BrowseInformationFile': '$(IntDir)e',
'BufferSecurityCheck': 'false',
'CallingConvention': 'FastCall',
'CompileAs': 'CompileAsC',
'DebugInformationFormat': 'EditAndContinue',
'DisableLanguageExtensions': 'true',
'DisableSpecificWarnings': 'abc',
'EnableEnhancedInstructionSet': 'StreamingSIMDExtensions',
'EnableFiberSafeOptimizations': 'true',
'EnablePREfast': 'true',
'ErrorReporting': 'Queue',
'ExceptionHandling': 'Async',
'ExpandAttributedSource': 'true',
'FavorSizeOrSpeed': 'Size',
'FloatingPointExceptions': 'true',
'FloatingPointModel': 'Strict',
'ForceConformanceInForLoopScope': 'false',
'ForcedIncludeFiles': 'def',
'ForcedUsingFiles': 'ge',
'FunctionLevelLinking': 'true',
'GenerateXMLDocumentationFiles': 'true',
'IgnoreStandardIncludePath': 'true',
'InlineFunctionExpansion': 'OnlyExplicitInline',
'IntrinsicFunctions': 'true',
'MinimalRebuild': 'true',
'ObjectFileName': '$(IntDir)b',
'OmitDefaultLibName': 'true',
'OmitFramePointers': 'true',
'OpenMPSupport': 'true',
'Optimization': 'Full',
'PrecompiledHeader': 'NotUsing', # Actual conversion gives ''
'PrecompiledHeaderFile': 'StdAfx.hd',
'PrecompiledHeaderOutputFile': '$(IntDir)$(TargetName).pche',
'PreprocessKeepComments': 'true',
'PreprocessorDefinitions': 'WIN32;_DEBUG;_CONSOLE',
'PreprocessSuppressLineNumbers': 'true',
'PreprocessToFile': 'true',
'ProgramDataBaseFileName': '$(IntDir)vc90b.pdb',
'RuntimeLibrary': 'MultiThreadedDebugDLL',
'RuntimeTypeInfo': 'false',
'ShowIncludes': 'true',
'SmallerTypeCheck': 'true',
'StringPooling': 'true',
'StructMemberAlignment': '4Bytes',
'SuppressStartupBanner': 'false',
'TreatWarningAsError': 'true',
'TreatWChar_tAsBuiltInType': 'false',
'UndefineAllPreprocessorDefinitions': 'true',
'UndefinePreprocessorDefinitions': 'wer',
'UseFullPaths': 'true',
'WarningLevel': 'Level3',
'WholeProgramOptimization': 'true',
'XMLDocumentationFileName': '$(IntDir)c'},
'Link': {
'AdditionalDependencies': 'zx',
'AdditionalLibraryDirectories': 'asd',
'AdditionalManifestDependencies': 's2',
'AdditionalOptions': '/mor2',
'AddModuleNamesToAssembly': 'd1',
'AllowIsolation': 'false',
'AssemblyDebug': 'true',
'AssemblyLinkResource': 'd5',
'BaseAddress': '23423',
'CLRImageType': 'ForceSafeILImage',
'CLRThreadAttribute': 'MTAThreadingAttribute',
'CLRUnmanagedCodeCheck': 'true',
'DataExecutionPrevention': '',
'DelayLoadDLLs': 'd4',
'DelaySign': 'true',
'Driver': 'UpOnly',
'EmbedManagedResourceFile': 'd2',
'EnableCOMDATFolding': 'false',
'EnableUAC': 'false',
'EntryPointSymbol': 'f5',
'FixedBaseAddress': 'false',
'ForceSymbolReferences': 'd3',
'FunctionOrder': 'fssdfsd',
'GenerateDebugInformation': 'true',
'GenerateMapFile': 'true',
'HeapCommitSize': '13',
'HeapReserveSize': '12',
'IgnoreAllDefaultLibraries': 'true',
'IgnoreEmbeddedIDL': 'true',
'IgnoreSpecificDefaultLibraries': 'flob;flok',
'ImportLibrary': 'f4',
'KeyContainer': 'f7',
'KeyFile': 'f6',
'LargeAddressAware': 'true',
'LinkErrorReporting': 'QueueForNextLogin',
'LinkTimeCodeGeneration': 'UseLinkTimeCodeGeneration',
'ManifestFile': '$(IntDir)$(TargetFileName).2intermediate.manifest',
'MapExports': 'true',
'MapFileName': 'd5',
'MergedIDLBaseFileName': 'f2',
'MergeSections': 'f5',
'MidlCommandFile': 'f1',
'ModuleDefinitionFile': 'sdsd',
'NoEntryPoint': 'true',
'OptimizeReferences': 'true',
'OutputFile': '$(OutDir)$(ProjectName)2.exe',
'PerUserRedirection': 'true',
'Profile': 'true',
'ProfileGuidedDatabase': '$(TargetDir)$(TargetName).pgdd',
'ProgramDatabaseFile': 'Flob.pdb',
'RandomizedBaseAddress': 'false',
'RegisterOutput': 'true',
'SetChecksum': 'false',
'ShowProgress': 'LinkVerbose',
'StackCommitSize': '15',
'StackReserveSize': '14',
'StripPrivateSymbols': 'd3',
'SubSystem': 'Console',
'SupportUnloadOfDelayLoadedDLL': 'true',
'SuppressStartupBanner': 'false',
'SwapRunFromCD': 'true',
'SwapRunFromNET': 'true',
'TargetMachine': 'MachineX86',
'TerminalServerAware': 'false',
'TurnOffAssemblyGeneration': 'true',
'TypeLibraryFile': 'f3',
'TypeLibraryResourceID': '12',
'UACExecutionLevel': 'RequireAdministrator',
'UACUIAccess': 'true',
'Version': '333'},
'ResourceCompile': {
'AdditionalIncludeDirectories': 'f3',
'AdditionalOptions': '/more3',
'Culture': '0x0c0c',
'IgnoreStandardIncludePath': 'true',
'PreprocessorDefinitions': '_UNICODE;UNICODE2',
'ResourceOutputFileName': '$(IntDir)%(Filename)3.res',
'ShowProgress': 'true'},
'Manifest': {
'AdditionalManifestFiles': 'sfsdfsd',
'AdditionalOptions': 'afdsdafsd',
'AssemblyIdentity': 'sddfdsadfsa',
'ComponentFileName': 'fsdfds',
'GenerateCatalogFiles': 'true',
'InputResourceManifests': 'asfsfdafs',
'OutputManifestFile': '$(TargetPath).manifestdfs',
'RegistrarScriptFile': 'sdfsfd',
'ReplacementsFile': 'sdffsd',
'SuppressStartupBanner': 'false',
'TypeLibraryFile': 'sfsd',
'UpdateFileHashes': 'true',
'UpdateFileHashesSearchPath': 'sfsd',
'VerboseOutput': 'true'},
'ProjectReference': {
'LinkLibraryDependencies': 'false',
'UseLibraryDependencyInputs': 'true'},
'': {
'EmbedManifest': 'false',
'GenerateManifest': 'false',
'IgnoreImportLibrary': 'true',
'LinkIncremental': ''
},
'ManifestResourceCompile': {
'ResourceOutputFileName':
'$(IntDir)$(TargetFileName).embed.manifest.resfdsf'}
}
actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(
msvs_settings,
self.stderr)
self.assertEqual(expected_msbuild_settings, actual_msbuild_settings)
self._ExpectedWarnings([])
if __name__ == '__main__':
unittest.main()
| mit | -5,675,824,001,713,000,000 | 43.453441 | 81 | 0.555935 | false |
ppanczyk/ansible | lib/ansible/module_utils/avi.py | 20 | 3790 | # This code is part of Ansible, but is an independent component.
# This particular file snippet, and this file snippet only, is BSD licensed.
# Modules you write using this snippet, which is embedded dynamically by Ansible
# still belong to the author of the module, and may assign their own license
# to the complete work.
#
# Copyright (c), Gaurav Rastogi <[email protected]>, 2017
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# This module initially matched the namespace of network module avi. However,
# that causes namespace import error when other modules from avi namespaces
# are imported. Added import of absolute_import to avoid import collisions for
# avi.sdk.
from __future__ import absolute_import
import os
from distutils.version import LooseVersion
HAS_AVI = True
try:
import avi.sdk
sdk_version = getattr(avi.sdk, '__version__', None)
if ((sdk_version is None) or (sdk_version and (LooseVersion(sdk_version) < LooseVersion('17.1')))):
# It allows the __version__ to be '' as that value is used in development builds
raise ImportError
from avi.sdk.utils.ansible_utils import avi_ansible_api
except ImportError:
HAS_AVI = False
def avi_common_argument_spec():
"""
Returns common arguments for all Avi modules
:return: dict
"""
return dict(
controller=dict(default=os.environ.get('AVI_CONTROLLER', '')),
username=dict(default=os.environ.get('AVI_USERNAME', '')),
password=dict(default=os.environ.get('AVI_PASSWORD', ''), no_log=True),
tenant=dict(default='admin'),
tenant_uuid=dict(default=''),
api_version=dict(default='16.4'))
def ansible_return(module, rsp, changed, req=None, existing_obj=None):
"""
Helper function to return the right ansible return based on the error code and
changed status.
:param module: AnsibleModule
:param rsp: ApiResponse object returned from ApiSession.
:param changed: Whether something changed in this module.
:param req: Dict data for Avi API call.
:param existing_obj: Dict representing current HTTP resource in Avi Controller.
Returns: specific ansible module exit function
"""
if rsp.status_code > 299:
return module.fail_json(msg='Error %d Msg %s req: %s' % (
rsp.status_code, rsp.text, req))
if changed and existing_obj:
return module.exit_json(
changed=changed, obj=rsp.json(), old_obj=existing_obj)
return module.exit_json(changed=changed, obj=rsp.json())
| gpl-3.0 | 2,332,240,151,232,863,000 | 45.219512 | 103 | 0.727441 | false |
hesam-setareh/nest-simulator | pynest/nest/tests/test_connect_pairwise_bernoulli.py | 4 | 3399 | # -*- coding: utf-8 -*-
#
# test_connect_pairwise_bernoulli.py
#
# This file is part of NEST.
#
# Copyright (C) 2004 The NEST Initiative
#
# NEST is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# NEST is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with NEST. If not, see <http://www.gnu.org/licenses/>.
import numpy as np
import unittest
import scipy.stats
from . import test_connect_helpers as hf
from .test_connect_parameters import TestParams
class TestPairwiseBernoulli(TestParams):
# specify connection pattern and specific params
rule = 'pairwise_bernoulli'
p = 0.5
conn_dict = {'rule': rule, 'p': p}
# sizes of source-, target-population and connection probability for
# statistical test
N_s = 50
N_t = 50
# Critical values and number of iterations of two level test
stat_dict = {'alpha2': 0.05, 'n_runs': 20}
def testStatistics(self):
for fan in ['in', 'out']:
expected = hf.get_expected_degrees_bernoulli(
self.p, fan, self.N_s, self.N_t)
pvalues = []
for i in range(self.stat_dict['n_runs']):
hf.reset_seed(i, self.nr_threads)
self.setUpNetwork(conn_dict=self.conn_dict,
N1=self.N_s, N2=self.N_t)
degrees = hf.get_degrees(fan, self.pop1, self.pop2)
degrees = hf.gather_data(degrees)
# degrees = self.comm.gather(degrees, root=0)
# if self.rank == 0:
if degrees is not None:
chi, p = hf.chi_squared_check(degrees, expected, self.rule)
pvalues.append(p)
hf.mpi_barrier()
if degrees is not None:
ks, p = scipy.stats.kstest(pvalues, 'uniform')
self.assertTrue(p > self.stat_dict['alpha2'])
def testAutapses(self):
conn_params = self.conn_dict.copy()
N = 10
conn_params['multapses'] = False
# test that autapses exist
conn_params['p'] = 1.
conn_params['autapses'] = True
pop = hf.nest.Create('iaf_psc_alpha', N)
hf.nest.Connect(pop, pop, conn_params)
# make sure all connections do exist
M = hf.get_connectivity_matrix(pop, pop)
hf.mpi_assert(np.diag(M), np.ones(N), self)
hf.nest.ResetKernel()
# test that autapses were excluded
conn_params['p'] = 1.
conn_params['autapses'] = False
pop = hf.nest.Create('iaf_psc_alpha', N)
hf.nest.Connect(pop, pop, conn_params)
# make sure all connections do exist
M = hf.get_connectivity_matrix(pop, pop)
hf.mpi_assert(np.diag(M), np.zeros(N), self)
def suite():
suite = unittest.TestLoader().loadTestsFromTestCase(TestPairwiseBernoulli)
return suite
def run():
runner = unittest.TextTestRunner(verbosity=2)
runner.run(suite())
if __name__ == '__main__':
run()
| gpl-2.0 | 5,665,089,324,312,129,000 | 32.653465 | 79 | 0.61165 | false |
clubcapra/ngv_dev | CALIBRATION_0/camera_calibration/gui_cam_calibration.py | 1 | 6603 | from PySide.QtGui import *
import timeit
#import camera
import cv2
import os
import sys
import numpy as np
def generate_crop_indexes_3d(width, height, crop_width, crop_height):
idxs = []
for row in xrange(0, height, crop_height):
for col in xrange(0, width / crop_width):
indexes = []
for c in xrange(crop_height):
indexes.append(range(c * width, c * width + crop_height))
idxs.append(np.add(indexes, col * crop_width + row * width))
return np.asarray(idxs, dtype=np.int64)
def generate_crop_pixels_indexes_3d(width, height, crop_width, crop_height):
def channel_indexes(pixel_coord):
channel_coord = long(pixel_coord) * 3
return range(channel_coord, channel_coord + 3)
crop_indexes = generate_crop_indexes_3d(width, height, crop_width, crop_height)
pixel_channel_indexes = np.zeros(crop_indexes.shape + (3,),
dtype=np.int64)
for i in xrange(len(crop_indexes)):
for j in xrange(len(crop_indexes[0])):
for k in xrange(len(crop_indexes[0][0])):
pixel_channel_indexes[i][j][k] = \
channel_indexes(crop_indexes[i][j][k])
return np.int64(pixel_channel_indexes)
def generate_indexes(width, height, crop_width, crop_height):
indexes = np.arange(width * height).reshape((height, width))
row_count = height / crop_height
col_count = width / crop_width
pixel_indexes = []
for row in xrange(row_count):
for col in xrange(col_count):
pixel_indexes.append(np.asarray(indexes[row * crop_height:(row + 1) * crop_height,
col * crop_width:(col + 1) * crop_width]))
pixel_indexes = np.asarray(pixel_indexes).reshape((2304, 20, 20, 1))
return np.concatenate((pixel_indexes * 3, pixel_indexes * 3 + 1, pixel_indexes * 3 + 2), axis=3)
class ElementImage(QWidget):
def __init__(self, parent=None):
super(ElementImage, self).__init__(parent)
self.setFixedSize(22, 22)
self.image = QImage(20, 20, QImage.Format_RGB32)
self.img_arr = np.ndarray(shape=(20, 20), dtype=np.uint32, buffer=self.image.bits())
np.copyto(self.img_arr, np.zeros((20, 20), dtype=np.uint32))
self.color = QColor(100, 100, 100)
def enterEvent(self, e):
self.color = QColor(255, 150, 0)
self.repaint()
def leaveEvent(self, e):
self.color = QColor(100, 100, 100)
self.repaint()
def mousePressEvent(self, e):
self.color = QColor(255, 255, 0)
self.repaint()
def mouseReleaseEvent(self, e):
self.color = QColor(100, 100, 100)
self.repaint()
def paintEvent(self, e):
qp = QPainter()
qp.begin(self)
self.__draw_element(qp)
qp.end()
def __draw_element(self, qp):
qp.drawImage(1, 1, self.image)
qp.setPen(self.color)
qp.drawRect(0, 0, 21, 21)
def numpy_buffer(self):
return self.img_arr
class MainWindow(QWidget):
def __init__(self):
super(MainWindow, self).__init__()
self.element_images = []
self.init()
def init(self):
def generate_grid_pos(w, h):
pos = []
for row in xrange(h):
for col in xrange(w):
pos.append((row, col))
return pos
h_count = 36
w_count = 64
buffer_indexes = generate_crop_pixels_indexes_3d(1280, 720, 20, 20)
grid = QGridLayout()
count = 0
for coord in generate_grid_pos(w_count, h_count):
e_img = ElementImage()
self.element_images.append((e_img, e_img.numpy_buffer(), buffer_indexes[count]))
grid.addWidget(e_img, coord[0], coord[1])
count += 1
grid.setColumnStretch(w_count, 1)
grid.setSpacing(0)
grid.setRowStretch(h_count, 1)
grid.setVerticalSpacing(0)
self.setLayout(grid)
self.setGeometry(20, 20, 1550, 850)
self.setWindowTitle("Camera Mapping Calibration")
self.show()
self.display_image()
def display_image(self):
img = cv2.cvtColor(cv2.imread(os.path.abspath('test.png')),
cv2.COLOR_BGR2RGB)[14:, 5:1285]
#indexes = generate_indexes(1280, 720, 20, 20)
row_count = len(img)
col_count = len(img[0])
print row_count, col_count
qt_img = np.zeros((row_count, col_count), dtype=np.uint32)
#print indexes[0]
t = timeit.default_timer()
for row in xrange(row_count):
for col in xrange(col_count):
pixel = img[row][col]
qt_img[row][col] = qRgb(pixel[0],
pixel[1],
pixel[2])
#for img_element in img[row*20:(row+1)*20, col*20:(col+1)*20]:
#element_buffer = self.element_images[count][1]
#for row in xrange(row_count):
# for col in xrange(col_count):
# pixel = img_element[row][col]
# #print pixel, img[row, col]
# element_buffer[row][col] = qRgb(pixel[0], pixel[1], pixel[2])
print 'refreshing elements took...', timeit.default_timer() - t, 'seconds'
#for row in xrange(row_count):
# for col in xrange(col_count):
# pixel = img.take(indexes[0])
# print pixel.shape
#element_buffer[row][col] = qRgb(pixel[0], pixel[1], pixel[2])
"""
t = timeit.default_timer()
count = 0
for row in xrange(36):
for col in xrange(64):
crop = img[row*20:(row+1)*20, col*20:(col+1)*20]
pixels = []
for pixel_row in crop:
pixels_col = []
for pixel_col in pixel_row:
pixels_col.append(qRgb(pixel_col[0], pixel_col[1], pixel_col[2]))
pixels.append(pixels_col)
np.copyto(self.element_images[count][1], np.asarray(pixels, dtype=np.uint32))
count += 1
print 'refreshing elements took...', timeit.default_timer() - t, 'seconds'
"""
for i in xrange(len(self.element_images)):
self.element_images[i][0].repaint()
def main():
app = QApplication(sys.argv)
window = MainWindow()
sys.exit(app.exec_())
def testing():
pass
if __name__ == "__main__":
testing()
main()
| gpl-3.0 | -6,782,487,259,623,150,000 | 29.013636 | 100 | 0.54551 | false |
jsxc/xmpp-cloud-auth | xclib/tests/30_isuser_stub_test.py | 1 | 2149 | # Checks whether the isuser() function works as it should
# Stubs the cloud_request() functions for these tests
from xclib.sigcloud import sigcloud
from xclib import xcauth
from xclib.check import assertEqual
def setup_module():
global xc, sc
xc = xcauth(domain_db={
b'xdomain': b'99999\thttps://remotehost\tydomain\t',
b'udomain': b'8888\thttps://oldhost\t',
},
default_url='https://localhost', default_secret='01234')
sc = sigcloud(xc, 'user1', 'domain1')
def teardown_module():
pass
def sc_timeout(data):
assertEqual(data['operation'], 'isuser')
assertEqual(data['username'], 'user1')
assertEqual(data['domain'], 'domain1')
return (False, None, 'Timeout', None)
def test_timeout():
sc.verbose_cloud_request = sc_timeout
assertEqual(sc.isuser(), None)
def sc_404(data):
return (False, 404, None, None)
def test_http404():
sc.verbose_cloud_request = sc_404
assertEqual(sc.isuser(), None)
def sc_500json(data):
return (False, 500, {'result': 'failure'}, None)
def test_http500json():
sc.verbose_cloud_request = sc_500json
assertEqual(sc.isuser(), None)
def sc_malformed(data):
return (True, None, {'result': 'success'}, None)
def test_malformed():
sc.verbose_cloud_request = sc_malformed
assertEqual(sc.isuser(), None)
def sc_success(data):
return (True, None, {
'result': 'success',
'data': {
'isUser': '1'
}}, 'fake body')
def test_success():
sc.verbose_cloud_request = sc_success
assertEqual(sc.isuser(), True)
def sc_xdomain(data):
assertEqual(data['operation'], 'isuser')
assertEqual(data['username'], 'xuser')
assertEqual(data['domain'], 'ydomain')
return (True, None, {
'result': 'success',
'data': {
'isUser': '1'
}}, 'fake body')
def test_xdomain():
sc = sigcloud(xc, 'xuser', 'xdomain')
sc.verbose_cloud_request = sc_xdomain
assertEqual(sc.isuser(), True)
def test_domain_upgrade():
sc = sigcloud(xc, 'uuser', 'udomain')
sc.verbose_cloud_request = sc_success
assertEqual(sc.isuser(), True)
| mit | 8,073,058,404,470,537,000 | 28.438356 | 64 | 0.631922 | false |
agdsn/sipa | sipa.py | 2 | 1632 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
sipa.py
~~~~~~~~~~~~~~
This file shall be used to start the Flask app. Specific things are handled
in the `sipa` package.
"""
import argparse
import logging
from sipa import create_app
from sipa.utils import support_hotline_available
logger = logging.getLogger(__name__)
logger.info('Starting sipa...')
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Sipa launcher")
parser.add_argument("--debug", action="store_true",
help="run Sipa in debug mode")
parser.add_argument("--exposed", action="store_const", const='0.0.0.0',
dest='host', help="expose Sipa on the network")
parser.add_argument("-p", "--port", action="store",
help="tcp port to use", type=int, default=5000)
args = parser.parse_args()
def preparation(app):
if args.debug:
app.debug = True
logger.warning('Running in Debug mode')
app = create_app(prepare_callable=preparation)
app.run(debug=args.debug, host=args.host, port=args.port)
else:
# __name__ == 'uwsgi_file_sipa'
import uwsgi
debug = uwsgi.opt.get('debug', False)
app = create_app()
if debug:
logger.warning("Running in debug mode")
app.debug = True
from werkzeug.debug import DebuggedApplication
app.wsgi_app = DebuggedApplication(app.wsgi_app, evalex=True)
# app will now be used by `uwsgi`
@app.context_processor
def inject_hotline_status():
return dict(support_hotline_available=support_hotline_available())
| mit | -3,576,380,963,574,401,500 | 29.222222 | 79 | 0.626225 | false |
EliotBerriot/django | tests/postgres_tests/test_array.py | 89 | 19906 | import decimal
import json
import unittest
import uuid
from django import forms
from django.core import exceptions, serializers, validators
from django.core.management import call_command
from django.db import IntegrityError, connection, models
from django.test import TransactionTestCase, override_settings
from django.utils import timezone
from . import PostgreSQLTestCase
from .models import (
ArrayFieldSubclass, CharArrayModel, DateTimeArrayModel, IntegerArrayModel,
NestedIntegerArrayModel, NullableIntegerArrayModel, OtherTypesArrayModel,
PostgreSQLModel,
)
try:
from django.contrib.postgres.fields import ArrayField
from django.contrib.postgres.forms import SimpleArrayField, SplitArrayField
except ImportError:
pass
class TestSaveLoad(PostgreSQLTestCase):
def test_integer(self):
instance = IntegerArrayModel(field=[1, 2, 3])
instance.save()
loaded = IntegerArrayModel.objects.get()
self.assertEqual(instance.field, loaded.field)
def test_char(self):
instance = CharArrayModel(field=['hello', 'goodbye'])
instance.save()
loaded = CharArrayModel.objects.get()
self.assertEqual(instance.field, loaded.field)
def test_dates(self):
instance = DateTimeArrayModel(
datetimes=[timezone.now()],
dates=[timezone.now().date()],
times=[timezone.now().time()],
)
instance.save()
loaded = DateTimeArrayModel.objects.get()
self.assertEqual(instance.datetimes, loaded.datetimes)
self.assertEqual(instance.dates, loaded.dates)
self.assertEqual(instance.times, loaded.times)
def test_tuples(self):
instance = IntegerArrayModel(field=(1,))
instance.save()
loaded = IntegerArrayModel.objects.get()
self.assertSequenceEqual(instance.field, loaded.field)
def test_integers_passed_as_strings(self):
# This checks that get_prep_value is deferred properly
instance = IntegerArrayModel(field=['1'])
instance.save()
loaded = IntegerArrayModel.objects.get()
self.assertEqual(loaded.field, [1])
def test_default_null(self):
instance = NullableIntegerArrayModel()
instance.save()
loaded = NullableIntegerArrayModel.objects.get(pk=instance.pk)
self.assertEqual(loaded.field, None)
self.assertEqual(instance.field, loaded.field)
def test_null_handling(self):
instance = NullableIntegerArrayModel(field=None)
instance.save()
loaded = NullableIntegerArrayModel.objects.get()
self.assertEqual(instance.field, loaded.field)
instance = IntegerArrayModel(field=None)
with self.assertRaises(IntegrityError):
instance.save()
def test_nested(self):
instance = NestedIntegerArrayModel(field=[[1, 2], [3, 4]])
instance.save()
loaded = NestedIntegerArrayModel.objects.get()
self.assertEqual(instance.field, loaded.field)
def test_other_array_types(self):
instance = OtherTypesArrayModel(
ips=['192.168.0.1', '::1'],
uuids=[uuid.uuid4()],
decimals=[decimal.Decimal(1.25), 1.75],
)
instance.save()
loaded = OtherTypesArrayModel.objects.get()
self.assertEqual(instance.ips, loaded.ips)
self.assertEqual(instance.uuids, loaded.uuids)
self.assertEqual(instance.decimals, loaded.decimals)
def test_model_set_on_base_field(self):
instance = IntegerArrayModel()
field = instance._meta.get_field('field')
self.assertEqual(field.model, IntegerArrayModel)
self.assertEqual(field.base_field.model, IntegerArrayModel)
class TestQuerying(PostgreSQLTestCase):
def setUp(self):
self.objs = [
NullableIntegerArrayModel.objects.create(field=[1]),
NullableIntegerArrayModel.objects.create(field=[2]),
NullableIntegerArrayModel.objects.create(field=[2, 3]),
NullableIntegerArrayModel.objects.create(field=[20, 30, 40]),
NullableIntegerArrayModel.objects.create(field=None),
]
def test_exact(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__exact=[1]),
self.objs[:1]
)
def test_isnull(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__isnull=True),
self.objs[-1:]
)
def test_gt(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__gt=[0]),
self.objs[:4]
)
def test_lt(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__lt=[2]),
self.objs[:1]
)
def test_in(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__in=[[1], [2]]),
self.objs[:2]
)
def test_contained_by(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__contained_by=[1, 2]),
self.objs[:2]
)
def test_contains(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__contains=[2]),
self.objs[1:3]
)
def test_contains_charfield(self):
# Regression for #22907
self.assertSequenceEqual(
CharArrayModel.objects.filter(field__contains=['text']),
[]
)
def test_contained_by_charfield(self):
self.assertSequenceEqual(
CharArrayModel.objects.filter(field__contained_by=['text']),
[]
)
def test_overlap_charfield(self):
self.assertSequenceEqual(
CharArrayModel.objects.filter(field__overlap=['text']),
[]
)
def test_index(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__0=2),
self.objs[1:3]
)
def test_index_chained(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__0__lt=3),
self.objs[0:3]
)
def test_index_nested(self):
instance = NestedIntegerArrayModel.objects.create(field=[[1, 2], [3, 4]])
self.assertSequenceEqual(
NestedIntegerArrayModel.objects.filter(field__0__0=1),
[instance]
)
@unittest.expectedFailure
def test_index_used_on_nested_data(self):
instance = NestedIntegerArrayModel.objects.create(field=[[1, 2], [3, 4]])
self.assertSequenceEqual(
NestedIntegerArrayModel.objects.filter(field__0=[1, 2]),
[instance]
)
def test_overlap(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__overlap=[1, 2]),
self.objs[0:3]
)
def test_len(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__len__lte=2),
self.objs[0:3]
)
def test_slice(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__0_1=[2]),
self.objs[1:3]
)
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__0_2=[2, 3]),
self.objs[2:3]
)
@unittest.expectedFailure
def test_slice_nested(self):
instance = NestedIntegerArrayModel.objects.create(field=[[1, 2], [3, 4]])
self.assertSequenceEqual(
NestedIntegerArrayModel.objects.filter(field__0__0_1=[1]),
[instance]
)
def test_usage_in_subquery(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(
id__in=NullableIntegerArrayModel.objects.filter(field__len=3)
),
[self.objs[3]]
)
class TestChecks(PostgreSQLTestCase):
def test_field_checks(self):
class MyModel(PostgreSQLModel):
field = ArrayField(models.CharField())
model = MyModel()
errors = model.check()
self.assertEqual(len(errors), 1)
self.assertEqual(errors[0].id, 'postgres.E001')
def test_invalid_base_fields(self):
class MyModel(PostgreSQLModel):
field = ArrayField(models.ManyToManyField('postgres_tests.IntegerArrayModel'))
model = MyModel()
errors = model.check()
self.assertEqual(len(errors), 1)
self.assertEqual(errors[0].id, 'postgres.E002')
@unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific tests")
class TestMigrations(TransactionTestCase):
available_apps = ['postgres_tests']
def test_deconstruct(self):
field = ArrayField(models.IntegerField())
name, path, args, kwargs = field.deconstruct()
new = ArrayField(*args, **kwargs)
self.assertEqual(type(new.base_field), type(field.base_field))
def test_deconstruct_with_size(self):
field = ArrayField(models.IntegerField(), size=3)
name, path, args, kwargs = field.deconstruct()
new = ArrayField(*args, **kwargs)
self.assertEqual(new.size, field.size)
def test_deconstruct_args(self):
field = ArrayField(models.CharField(max_length=20))
name, path, args, kwargs = field.deconstruct()
new = ArrayField(*args, **kwargs)
self.assertEqual(new.base_field.max_length, field.base_field.max_length)
def test_subclass_deconstruct(self):
field = ArrayField(models.IntegerField())
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, 'django.contrib.postgres.fields.ArrayField')
field = ArrayFieldSubclass()
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, 'postgres_tests.models.ArrayFieldSubclass')
@override_settings(MIGRATION_MODULES={
"postgres_tests": "postgres_tests.array_default_migrations",
})
def test_adding_field_with_default(self):
# See #22962
table_name = 'postgres_tests_integerarraydefaultmodel'
with connection.cursor() as cursor:
self.assertNotIn(table_name, connection.introspection.table_names(cursor))
call_command('migrate', 'postgres_tests', verbosity=0)
with connection.cursor() as cursor:
self.assertIn(table_name, connection.introspection.table_names(cursor))
call_command('migrate', 'postgres_tests', 'zero', verbosity=0)
with connection.cursor() as cursor:
self.assertNotIn(table_name, connection.introspection.table_names(cursor))
@override_settings(MIGRATION_MODULES={
"postgres_tests": "postgres_tests.array_index_migrations",
})
def test_adding_arrayfield_with_index(self):
"""
ArrayField shouldn't have varchar_patterns_ops or text_patterns_ops indexes.
"""
table_name = 'postgres_tests_chartextarrayindexmodel'
call_command('migrate', 'postgres_tests', verbosity=0)
with connection.cursor() as cursor:
like_constraint_field_names = [
c.rsplit('_', 2)[0][len(table_name) + 1:]
for c in connection.introspection.get_constraints(cursor, table_name)
if c.endswith('_like')
]
# Only the CharField should have a LIKE index.
self.assertEqual(like_constraint_field_names, ['char2'])
with connection.cursor() as cursor:
indexes = connection.introspection.get_indexes(cursor, table_name)
# All fields should have regular indexes.
self.assertIn('char', indexes)
self.assertIn('char2', indexes)
self.assertIn('text', indexes)
call_command('migrate', 'postgres_tests', 'zero', verbosity=0)
with connection.cursor() as cursor:
self.assertNotIn(table_name, connection.introspection.table_names(cursor))
class TestSerialization(PostgreSQLTestCase):
test_data = (
'[{"fields": {"field": "[\\"1\\", \\"2\\"]"}, "model": "postgres_tests.integerarraymodel", "pk": null}]'
)
def test_dumping(self):
instance = IntegerArrayModel(field=[1, 2])
data = serializers.serialize('json', [instance])
self.assertEqual(json.loads(data), json.loads(self.test_data))
def test_loading(self):
instance = list(serializers.deserialize('json', self.test_data))[0].object
self.assertEqual(instance.field, [1, 2])
class TestValidation(PostgreSQLTestCase):
def test_unbounded(self):
field = ArrayField(models.IntegerField())
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean([1, None], None)
self.assertEqual(cm.exception.code, 'item_invalid')
self.assertEqual(
cm.exception.message % cm.exception.params,
'Item 1 in the array did not validate: This field cannot be null.'
)
def test_blank_true(self):
field = ArrayField(models.IntegerField(blank=True, null=True))
# This should not raise a validation error
field.clean([1, None], None)
def test_with_size(self):
field = ArrayField(models.IntegerField(), size=3)
field.clean([1, 2, 3], None)
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean([1, 2, 3, 4], None)
self.assertEqual(cm.exception.messages[0], 'List contains 4 items, it should contain no more than 3.')
def test_nested_array_mismatch(self):
field = ArrayField(ArrayField(models.IntegerField()))
field.clean([[1, 2], [3, 4]], None)
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean([[1, 2], [3, 4, 5]], None)
self.assertEqual(cm.exception.code, 'nested_array_mismatch')
self.assertEqual(cm.exception.messages[0], 'Nested arrays must have the same length.')
def test_with_validators(self):
field = ArrayField(models.IntegerField(validators=[validators.MinValueValidator(1)]))
field.clean([1, 2], None)
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean([0], None)
self.assertEqual(cm.exception.code, 'item_invalid')
self.assertEqual(
cm.exception.messages[0],
'Item 0 in the array did not validate: Ensure this value is greater than or equal to 1.'
)
class TestSimpleFormField(PostgreSQLTestCase):
def test_valid(self):
field = SimpleArrayField(forms.CharField())
value = field.clean('a,b,c')
self.assertEqual(value, ['a', 'b', 'c'])
def test_to_python_fail(self):
field = SimpleArrayField(forms.IntegerField())
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean('a,b,9')
self.assertEqual(cm.exception.messages[0], 'Item 0 in the array did not validate: Enter a whole number.')
def test_validate_fail(self):
field = SimpleArrayField(forms.CharField(required=True))
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean('a,b,')
self.assertEqual(cm.exception.messages[0], 'Item 2 in the array did not validate: This field is required.')
def test_validators_fail(self):
field = SimpleArrayField(forms.RegexField('[a-e]{2}'))
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean('a,bc,de')
self.assertEqual(cm.exception.messages[0], 'Item 0 in the array did not validate: Enter a valid value.')
def test_delimiter(self):
field = SimpleArrayField(forms.CharField(), delimiter='|')
value = field.clean('a|b|c')
self.assertEqual(value, ['a', 'b', 'c'])
def test_delimiter_with_nesting(self):
field = SimpleArrayField(SimpleArrayField(forms.CharField()), delimiter='|')
value = field.clean('a,b|c,d')
self.assertEqual(value, [['a', 'b'], ['c', 'd']])
def test_prepare_value(self):
field = SimpleArrayField(forms.CharField())
value = field.prepare_value(['a', 'b', 'c'])
self.assertEqual(value, 'a,b,c')
def test_max_length(self):
field = SimpleArrayField(forms.CharField(), max_length=2)
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean('a,b,c')
self.assertEqual(cm.exception.messages[0], 'List contains 3 items, it should contain no more than 2.')
def test_min_length(self):
field = SimpleArrayField(forms.CharField(), min_length=4)
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean('a,b,c')
self.assertEqual(cm.exception.messages[0], 'List contains 3 items, it should contain no fewer than 4.')
def test_required(self):
field = SimpleArrayField(forms.CharField(), required=True)
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean('')
self.assertEqual(cm.exception.messages[0], 'This field is required.')
def test_model_field_formfield(self):
model_field = ArrayField(models.CharField(max_length=27))
form_field = model_field.formfield()
self.assertIsInstance(form_field, SimpleArrayField)
self.assertIsInstance(form_field.base_field, forms.CharField)
self.assertEqual(form_field.base_field.max_length, 27)
def test_model_field_formfield_size(self):
model_field = ArrayField(models.CharField(max_length=27), size=4)
form_field = model_field.formfield()
self.assertIsInstance(form_field, SimpleArrayField)
self.assertEqual(form_field.max_length, 4)
class TestSplitFormField(PostgreSQLTestCase):
def test_valid(self):
class SplitForm(forms.Form):
array = SplitArrayField(forms.CharField(), size=3)
data = {'array_0': 'a', 'array_1': 'b', 'array_2': 'c'}
form = SplitForm(data)
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data, {'array': ['a', 'b', 'c']})
def test_required(self):
class SplitForm(forms.Form):
array = SplitArrayField(forms.CharField(), required=True, size=3)
data = {'array_0': '', 'array_1': '', 'array_2': ''}
form = SplitForm(data)
self.assertFalse(form.is_valid())
self.assertEqual(form.errors, {'array': ['This field is required.']})
def test_remove_trailing_nulls(self):
class SplitForm(forms.Form):
array = SplitArrayField(forms.CharField(required=False), size=5, remove_trailing_nulls=True)
data = {'array_0': 'a', 'array_1': '', 'array_2': 'b', 'array_3': '', 'array_4': ''}
form = SplitForm(data)
self.assertTrue(form.is_valid(), form.errors)
self.assertEqual(form.cleaned_data, {'array': ['a', '', 'b']})
def test_required_field(self):
class SplitForm(forms.Form):
array = SplitArrayField(forms.CharField(), size=3)
data = {'array_0': 'a', 'array_1': 'b', 'array_2': ''}
form = SplitForm(data)
self.assertFalse(form.is_valid())
self.assertEqual(form.errors, {'array': ['Item 2 in the array did not validate: This field is required.']})
def test_rendering(self):
class SplitForm(forms.Form):
array = SplitArrayField(forms.CharField(), size=3)
self.assertHTMLEqual(str(SplitForm()), '''
<tr>
<th><label for="id_array_0">Array:</label></th>
<td>
<input id="id_array_0" name="array_0" type="text" />
<input id="id_array_1" name="array_1" type="text" />
<input id="id_array_2" name="array_2" type="text" />
</td>
</tr>
''')
| bsd-3-clause | -3,580,162,065,559,395,300 | 36.91619 | 115 | 0.631719 | false |
MatthieuBizien/scikit-learn | sklearn/model_selection/__init__.py | 53 | 1587 | from ._split import BaseCrossValidator
from ._split import KFold
from ._split import LabelKFold
from ._split import StratifiedKFold
from ._split import LeaveOneLabelOut
from ._split import LeaveOneOut
from ._split import LeavePLabelOut
from ._split import LeavePOut
from ._split import ShuffleSplit
from ._split import LabelShuffleSplit
from ._split import StratifiedShuffleSplit
from ._split import PredefinedSplit
from ._split import train_test_split
from ._split import check_cv
from ._validation import cross_val_score
from ._validation import cross_val_predict
from ._validation import learning_curve
from ._validation import permutation_test_score
from ._validation import validation_curve
from ._search import GridSearchCV
from ._search import RandomizedSearchCV
from ._search import ParameterGrid
from ._search import ParameterSampler
from ._search import fit_grid_point
__all__ = ('BaseCrossValidator',
'GridSearchCV',
'KFold',
'LabelKFold',
'LabelShuffleSplit',
'LeaveOneLabelOut',
'LeaveOneOut',
'LeavePLabelOut',
'LeavePOut',
'ParameterGrid',
'ParameterSampler',
'PredefinedSplit',
'RandomizedSearchCV',
'ShuffleSplit',
'StratifiedKFold',
'StratifiedShuffleSplit',
'check_cv',
'cross_val_predict',
'cross_val_score',
'fit_grid_point',
'learning_curve',
'permutation_test_score',
'train_test_split',
'validation_curve')
| bsd-3-clause | -3,220,648,532,765,314,000 | 30.117647 | 47 | 0.666037 | false |
Ichag/odoo | openerp/tools/amount_to_text.py | 393 | 7719 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
#-------------------------------------------------------------
# French
#-------------------------------------------------------------
to_19_fr = ( u'zéro', 'un', 'deux', 'trois', 'quatre', 'cinq', 'six',
'sept', 'huit', 'neuf', 'dix', 'onze', 'douze', 'treize',
'quatorze', 'quinze', 'seize', 'dix-sept', 'dix-huit', 'dix-neuf' )
tens_fr = ( 'vingt', 'trente', 'quarante', 'Cinquante', 'Soixante', 'Soixante-dix', 'Quatre-vingts', 'Quatre-vingt Dix')
denom_fr = ( '',
'Mille', 'Millions', 'Milliards', 'Billions', 'Quadrillions',
'Quintillion', 'Sextillion', 'Septillion', 'Octillion', 'Nonillion',
'Décillion', 'Undecillion', 'Duodecillion', 'Tredecillion', 'Quattuordecillion',
'Sexdecillion', 'Septendecillion', 'Octodecillion', 'Icosillion', 'Vigintillion' )
def _convert_nn_fr(val):
""" convert a value < 100 to French
"""
if val < 20:
return to_19_fr[val]
for (dcap, dval) in ((k, 20 + (10 * v)) for (v, k) in enumerate(tens_fr)):
if dval + 10 > val:
if val % 10:
return dcap + '-' + to_19_fr[val % 10]
return dcap
def _convert_nnn_fr(val):
""" convert a value < 1000 to french
special cased because it is the level that kicks
off the < 100 special case. The rest are more general. This also allows you to
get strings in the form of 'forty-five hundred' if called directly.
"""
word = ''
(mod, rem) = (val % 100, val // 100)
if rem > 0:
word = to_19_fr[rem] + ' Cent'
if mod > 0:
word += ' '
if mod > 0:
word += _convert_nn_fr(mod)
return word
def french_number(val):
if val < 100:
return _convert_nn_fr(val)
if val < 1000:
return _convert_nnn_fr(val)
for (didx, dval) in ((v - 1, 1000 ** v) for v in range(len(denom_fr))):
if dval > val:
mod = 1000 ** didx
l = val // mod
r = val - (l * mod)
ret = _convert_nnn_fr(l) + ' ' + denom_fr[didx]
if r > 0:
ret = ret + ', ' + french_number(r)
return ret
def amount_to_text_fr(number, currency):
number = '%.2f' % number
units_name = currency
list = str(number).split('.')
start_word = french_number(abs(int(list[0])))
end_word = french_number(int(list[1]))
cents_number = int(list[1])
cents_name = (cents_number > 1) and ' Cents' or ' Cent'
final_result = start_word +' '+units_name+' '+ end_word +' '+cents_name
return final_result
#-------------------------------------------------------------
# Dutch
#-------------------------------------------------------------
to_19_nl = ( 'Nul', 'Een', 'Twee', 'Drie', 'Vier', 'Vijf', 'Zes',
'Zeven', 'Acht', 'Negen', 'Tien', 'Elf', 'Twaalf', 'Dertien',
'Veertien', 'Vijftien', 'Zestien', 'Zeventien', 'Achttien', 'Negentien' )
tens_nl = ( 'Twintig', 'Dertig', 'Veertig', 'Vijftig', 'Zestig', 'Zeventig', 'Tachtig', 'Negentig')
denom_nl = ( '',
'Duizend', 'Miljoen', 'Miljard', 'Triljoen', 'Quadriljoen',
'Quintillion', 'Sextiljoen', 'Septillion', 'Octillion', 'Nonillion',
'Decillion', 'Undecillion', 'Duodecillion', 'Tredecillion', 'Quattuordecillion',
'Sexdecillion', 'Septendecillion', 'Octodecillion', 'Novemdecillion', 'Vigintillion' )
def _convert_nn_nl(val):
""" convert a value < 100 to Dutch
"""
if val < 20:
return to_19_nl[val]
for (dcap, dval) in ((k, 20 + (10 * v)) for (v, k) in enumerate(tens_nl)):
if dval + 10 > val:
if val % 10:
return dcap + '-' + to_19_nl[val % 10]
return dcap
def _convert_nnn_nl(val):
""" convert a value < 1000 to Dutch
special cased because it is the level that kicks
off the < 100 special case. The rest are more general. This also allows you to
get strings in the form of 'forty-five hundred' if called directly.
"""
word = ''
(mod, rem) = (val % 100, val // 100)
if rem > 0:
word = to_19_nl[rem] + ' Honderd'
if mod > 0:
word += ' '
if mod > 0:
word += _convert_nn_nl(mod)
return word
def dutch_number(val):
if val < 100:
return _convert_nn_nl(val)
if val < 1000:
return _convert_nnn_nl(val)
for (didx, dval) in ((v - 1, 1000 ** v) for v in range(len(denom_nl))):
if dval > val:
mod = 1000 ** didx
l = val // mod
r = val - (l * mod)
ret = _convert_nnn_nl(l) + ' ' + denom_nl[didx]
if r > 0:
ret = ret + ', ' + dutch_number(r)
return ret
def amount_to_text_nl(number, currency):
number = '%.2f' % number
units_name = currency
list = str(number).split('.')
start_word = dutch_number(int(list[0]))
end_word = dutch_number(int(list[1]))
cents_number = int(list[1])
cents_name = (cents_number > 1) and 'cent' or 'cent'
final_result = start_word +' '+units_name+' '+ end_word +' '+cents_name
return final_result
#-------------------------------------------------------------
# Generic functions
#-------------------------------------------------------------
_translate_funcs = {'fr' : amount_to_text_fr, 'nl' : amount_to_text_nl}
def add_amount_to_text_function(lang, func):
_translate_funcs[lang] = func
#TODO: we should use the country AND language (ex: septante VS soixante dix)
#TODO: we should use en by default, but the translation func is yet to be implemented
def amount_to_text(nbr, lang='fr', currency='euro'):
""" Converts an integer to its textual representation, using the language set in the context if any.
Example::
1654: mille six cent cinquante-quatre.
"""
# if nbr > 1000000:
##TODO: use logger
# print "WARNING: number too large '%d', can't translate it!" % (nbr,)
# return str(nbr)
if not _translate_funcs.has_key(lang):
#TODO: use logger
print "WARNING: no translation function found for lang: '%s'" % (lang,)
#TODO: (default should be en) same as above
lang = 'fr'
return _translate_funcs[lang](abs(nbr), currency)
if __name__=='__main__':
from sys import argv
lang = 'nl'
if len(argv) < 2:
for i in range(1,200):
print i, ">>", amount_to_text(i, lang)
for i in range(200,999999,139):
print i, ">>", amount_to_text(i, lang)
else:
print amount_to_text(int(argv[1]), lang)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | 2,217,535,880,384,793,600 | 37.014778 | 121 | 0.52637 | false |
jonathanslenders/pyvim | pyvim/welcome_message.py | 1 | 1246 | """
The welcome message. This is displayed when the editor opens without any files.
"""
from __future__ import unicode_literals
from prompt_toolkit.formatted_text.utils import fragment_list_len
import prompt_toolkit
import pyvim
import platform
import sys
version = sys.version_info
pyvim_version = pyvim.__version__
__all__ = (
'WELCOME_MESSAGE_TOKENS',
'WELCOME_MESSAGE_WIDTH',
'WELCOME_MESSAGE_HEIGHT',
)
WELCOME_MESSAGE_WIDTH = 36
WELCOME_MESSAGE_TOKENS = [
('class:title', 'PyVim - Pure Python Vi clone\n'),
('', 'Still experimental\n\n'),
('', 'version '), ('class:version', pyvim_version),
('', ', prompt_toolkit '), ('class:version', prompt_toolkit.__version__),
('', '\n'),
('', 'by Jonathan Slenders\n\n'),
('', 'type :q'),
('class:key', '<Enter>'),
('', ' to exit\n'),
('', 'type :help'),
('class:key', '<Enter>'),
('', ' or '),
('class:key', '<F1>'),
('', ' for help\n\n'),
('', 'All feedback is appreciated.\n\n'),
('class:pythonversion', ' %s %i.%i.%i ' % (
platform.python_implementation(),
version[0], version[1], version[2])),
]
WELCOME_MESSAGE_HEIGHT = ''.join(t[1] for t in WELCOME_MESSAGE_TOKENS).count('\n') + 1
| bsd-3-clause | 5,135,563,662,758,781,000 | 27.318182 | 86 | 0.58427 | false |
channing/gyp | test/ninja/action_dependencies/gyptest-action-dependencies.py | 246 | 1850 | #!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verify that building an object file correctly depends on running actions in
dependent targets, but not the targets themselves.
"""
import os
import sys
import TestGyp
# NOTE(piman): This test will not work with other generators because:
# - it explicitly tests the optimization, which is not implemented (yet?) on
# other generators
# - it relies on the exact path to output object files, which is generator
# dependent, and actually, relies on the ability to build only that object file,
# which I don't think is available on all generators.
# TODO(piman): Extend to other generators when possible.
test = TestGyp.TestGyp(formats=['ninja'])
test.run_gyp('action_dependencies.gyp', chdir='src')
chdir = 'relocate/src'
test.relocate('src', chdir)
objext = '.obj' if sys.platform == 'win32' else '.o'
test.build('action_dependencies.gyp',
os.path.join('obj', 'b.b' + objext),
chdir=chdir)
# The 'a' actions should be run (letting b.c compile), but the a static library
# should not be built.
test.built_file_must_not_exist('a', type=test.STATIC_LIB, chdir=chdir)
test.built_file_must_not_exist('b', type=test.STATIC_LIB, chdir=chdir)
test.built_file_must_exist(os.path.join('obj', 'b.b' + objext), chdir=chdir)
test.build('action_dependencies.gyp',
os.path.join('obj', 'c.c' + objext),
chdir=chdir)
# 'a' and 'b' should be built, so that the 'c' action succeeds, letting c.c
# compile
test.built_file_must_exist('a', type=test.STATIC_LIB, chdir=chdir)
test.built_file_must_exist('b', type=test.EXECUTABLE, chdir=chdir)
test.built_file_must_exist(os.path.join('obj', 'c.c' + objext), chdir=chdir)
test.pass_test()
| bsd-3-clause | 4,075,630,267,932,572,700 | 33.90566 | 80 | 0.711351 | false |
kumar303/olympia | src/olympia/devhub/tests/test_forms.py | 2 | 35137 | # -*- coding: utf-8 -*-
import os
import shutil
import tempfile
from datetime import timedelta
from django.conf import settings
from django.core.files.storage import default_storage as storage
from django.utils import translation
import pytest
import six
from freezegun import freeze_time
from unittest import mock
from waffle.testutils import override_switch
from olympia import amo, core
from olympia.addons.models import Addon, Category
from olympia.amo.tests import (
addon_factory, get_random_ip, req_factory_factory, TestCase, user_factory)
from olympia.amo.tests.test_helpers import get_image_path
from olympia.amo.utils import rm_local_tmp_dir
from olympia.applications.models import AppVersion
from olympia.devhub import forms
from olympia.files.models import FileUpload
from olympia.signing.views import VersionView
from olympia.tags.models import AddonTag, Tag
class TestNewUploadForm(TestCase):
def test_firefox_default_selected(self):
upload = FileUpload.objects.create(valid=False)
data = {'upload': upload.uuid}
request = req_factory_factory('/', post=True, data=data)
request.user = user_factory()
form = forms.NewUploadForm(data, request=request)
assert form.fields['compatible_apps'].initial == [amo.FIREFOX.id]
def test_compat_apps_widget_custom_label_class_rendered(self):
"""We are setting a custom class at the label
of the compatibility apps multi-select to correctly render
images.
"""
upload = FileUpload.objects.create(valid=False)
data = {'upload': upload.uuid}
request = req_factory_factory('/', post=True, data=data)
request.user = user_factory()
form = forms.NewUploadForm(data, request=request)
result = form.fields['compatible_apps'].widget.render(
name='compatible_apps', value=amo.FIREFOX.id)
assert 'class="app firefox"' in result
result = form.fields['compatible_apps'].widget.render(
name='compatible_apps', value=amo.ANDROID.id)
assert 'class="app android"' in result
def test_only_valid_uploads(self):
upload = FileUpload.objects.create(valid=False)
upload = FileUpload.objects.create(valid=False)
data = {'upload': upload.uuid, 'compatible_apps': [amo.FIREFOX.id]}
request = req_factory_factory('/', post=True, data=data)
request.user = user_factory()
form = forms.NewUploadForm(data, request=request)
assert ('There was an error with your upload. Please try again.' in
form.errors.get('__all__')), form.errors
# Admin override makes the form ignore the brokenness
with mock.patch('olympia.access.acl.action_allowed_user') as acl:
# For the 'Addons:Edit' permission check.
acl.return_value = True
data['admin_override_validation'] = True
form = forms.NewUploadForm(data, request=request)
assert ('There was an error with your upload. Please try' not in
form.errors.get('__all__')), form.errors
upload.validation = '{"errors": 0}'
upload.save()
addon = Addon.objects.create()
data.pop('admin_override_validation')
form = forms.NewUploadForm(data, request=request, addon=addon)
assert ('There was an error with your upload. Please try again.' not in
form.errors.get('__all__')), form.errors
@mock.patch('olympia.devhub.forms.parse_addon')
def test_throttling(self, parse_addon_mock):
upload = FileUpload.objects.create(valid=True, name='foo.xpi')
data = {'upload': upload.uuid, 'compatible_apps': [amo.FIREFOX.id]}
request = req_factory_factory('/', post=True, data=data)
request.user = user_factory()
request.META['REMOTE_ADDR'] = '5.6.7.8'
with freeze_time('2019-04-08 15:16:23.42') as frozen_time:
for x in range(0, 6):
self._add_fake_throttling_action(
view_class=VersionView,
url='/',
user=request.user,
remote_addr=get_random_ip(),
)
form = forms.NewUploadForm(data, request=request)
assert not form.is_valid()
assert form.errors.get('__all__') == [
'You have submitted too many uploads recently. '
'Please try again after some time.'
]
frozen_time.tick(delta=timedelta(seconds=61))
form = forms.NewUploadForm(data, request=request)
assert form.is_valid()
# Those three patches are so files.utils.parse_addon doesn't fail on a
# non-existent file even before having a chance to call check_xpi_info.
@mock.patch('olympia.files.utils.Extractor.parse')
@mock.patch('olympia.files.utils.extract_xpi', lambda xpi, path: None)
@mock.patch('olympia.files.utils.get_file', lambda xpi: None)
# This is the one we want to test.
@mock.patch('olympia.files.utils.check_xpi_info')
def test_check_xpi_called(self, mock_check_xpi_info, mock_parse):
"""Make sure the check_xpi_info helper is called.
There's some important checks made in check_xpi_info, if we ever
refactor the form to not call it anymore, we need to make sure those
checks are run at some point.
"""
mock_parse.return_value = None
mock_check_xpi_info.return_value = {'name': 'foo', 'type': 2}
upload = FileUpload.objects.create(valid=True, name='foo.xpi')
addon = Addon.objects.create()
data = {'upload': upload.uuid, 'compatible_apps': [amo.FIREFOX.id]}
request = req_factory_factory('/', post=True, data=data)
request.user = user_factory()
form = forms.NewUploadForm(data, addon=addon, request=request)
form.clean()
assert mock_check_xpi_info.called
class TestCompatForm(TestCase):
fixtures = ['base/addon_3615']
def setUp(self):
super(TestCompatForm, self).setUp()
AppVersion.objects.create(
application=amo.ANDROID.id, version='50.0')
AppVersion.objects.create(
application=amo.ANDROID.id, version='56.0')
AppVersion.objects.create(
application=amo.FIREFOX.id, version='56.0')
AppVersion.objects.create(
application=amo.FIREFOX.id, version='56.*')
AppVersion.objects.create(
application=amo.FIREFOX.id, version='57.0')
AppVersion.objects.create(
application=amo.FIREFOX.id, version='57.*')
def test_forms(self):
version = Addon.objects.get(id=3615).current_version
formset = forms.CompatFormSet(None, queryset=version.apps.all(),
form_kwargs={'version': version})
apps = [form.app for form in formset.forms]
assert set(apps) == set(amo.APP_USAGE)
def test_form_initial(self):
version = Addon.objects.get(id=3615).current_version
current_min = version.apps.filter(application=amo.FIREFOX.id).get().min
current_max = version.apps.filter(application=amo.FIREFOX.id).get().max
formset = forms.CompatFormSet(None, queryset=version.apps.all(),
form_kwargs={'version': version})
form = formset.forms[0]
assert form.app == amo.FIREFOX
assert form.initial['application'] == amo.FIREFOX.id
assert form.initial['min'] == current_min.pk
assert form.initial['max'] == current_max.pk
def _test_form_choices_expect_all_versions(self, version):
expected_min_choices = [(u'', u'---------')] + list(
AppVersion.objects.filter(application=amo.FIREFOX.id)
.exclude(version__contains='*')
.values_list('pk', 'version')
.order_by('version_int'))
expected_max_choices = [(u'', u'---------')] + list(
AppVersion.objects.filter(application=amo.FIREFOX.id)
.values_list('pk', 'version')
.order_by('version_int'))
formset = forms.CompatFormSet(None, queryset=version.apps.all(),
form_kwargs={'version': version})
form = formset.forms[0]
assert form.app == amo.FIREFOX
assert list(form.fields['min'].choices) == expected_min_choices
assert list(form.fields['max'].choices) == expected_max_choices
def test_form_choices(self):
version = Addon.objects.get(id=3615).current_version
version.files.all().update(is_webextension=True)
del version.all_files
self._test_form_choices_expect_all_versions(version)
def test_form_choices_no_compat(self):
version = Addon.objects.get(id=3615).current_version
version.files.all().update(is_webextension=False)
version.addon.update(type=amo.ADDON_DICT)
del version.all_files
self._test_form_choices_expect_all_versions(version)
def test_form_choices_language_pack(self):
version = Addon.objects.get(id=3615).current_version
version.files.all().update(is_webextension=False)
version.addon.update(type=amo.ADDON_LPAPP)
del version.all_files
self._test_form_choices_expect_all_versions(version)
def test_form_choices_legacy(self):
version = Addon.objects.get(id=3615).current_version
version.files.all().update(is_webextension=False)
del version.all_files
firefox_57 = AppVersion.objects.get(
application=amo.FIREFOX.id, version='57.0')
firefox_57_s = AppVersion.objects.get(
application=amo.FIREFOX.id, version='57.*')
expected_min_choices = [(u'', u'---------')] + list(
AppVersion.objects.filter(application=amo.FIREFOX.id)
.exclude(version__contains='*')
.exclude(pk__in=(firefox_57.pk, firefox_57_s.pk))
.values_list('pk', 'version')
.order_by('version_int'))
expected_max_choices = [(u'', u'---------')] + list(
AppVersion.objects.filter(application=amo.FIREFOX.id)
.exclude(pk__in=(firefox_57.pk, firefox_57_s.pk))
.values_list('pk', 'version')
.order_by('version_int'))
formset = forms.CompatFormSet(None, queryset=version.apps.all(),
form_kwargs={'version': version})
form = formset.forms[0]
assert form.app == amo.FIREFOX
assert list(form.fields['min'].choices) == expected_min_choices
assert list(form.fields['max'].choices) == expected_max_choices
expected_an_choices = [(u'', u'---------')] + list(
AppVersion.objects.filter(application=amo.ANDROID.id)
.values_list('pk', 'version').order_by('version_int'))
form = formset.forms[1]
assert form.app == amo.ANDROID
assert list(form.fields['min'].choices) == expected_an_choices
assert list(form.fields['max'].choices) == expected_an_choices
def test_form_choices_mozilla_signed_legacy(self):
version = Addon.objects.get(id=3615).current_version
version.files.all().update(
is_webextension=False,
is_mozilla_signed_extension=True)
del version.all_files
self._test_form_choices_expect_all_versions(version)
def test_static_theme(self):
version = Addon.objects.get(id=3615).current_version
version.files.all().update(is_webextension=True)
version.addon.update(type=amo.ADDON_STATICTHEME)
del version.all_files
self._test_form_choices_expect_all_versions(version)
formset = forms.CompatFormSet(None, queryset=version.apps.all(),
form_kwargs={'version': version})
assert formset.can_delete is False # No deleting Firefox app plz.
assert formset.extra == 0 # And lets not extra apps be added.
class TestPreviewForm(TestCase):
fixtures = ['base/addon_3615']
def setUp(self):
super(TestPreviewForm, self).setUp()
self.dest = os.path.join(settings.TMP_PATH, 'preview')
if not os.path.exists(self.dest):
os.makedirs(self.dest)
@mock.patch('olympia.amo.models.ModelBase.update')
def test_preview_modified(self, update_mock):
addon = Addon.objects.get(pk=3615)
name = 'transparent.png'
form = forms.PreviewForm({'caption': 'test', 'upload_hash': name,
'position': 1})
with storage.open(os.path.join(self.dest, name), 'wb') as f:
shutil.copyfileobj(open(get_image_path(name), 'rb'), f)
assert form.is_valid()
form.save(addon)
assert update_mock.called
@mock.patch('olympia.amo.utils.pngcrush_image')
def test_preview_size(self, pngcrush_image_mock):
addon = Addon.objects.get(pk=3615)
name = 'teamaddons.jpg'
form = forms.PreviewForm({'caption': 'test', 'upload_hash': name,
'position': 1})
with storage.open(os.path.join(self.dest, name), 'wb') as f:
shutil.copyfileobj(open(get_image_path(name), 'rb'), f)
assert form.is_valid()
form.save(addon)
preview = addon.previews.all()[0]
assert preview.sizes == (
{u'image': [2400, 1600], u'thumbnail': [640, 427],
u'original': [3000, 2000]})
assert os.path.exists(preview.image_path)
assert os.path.exists(preview.thumbnail_path)
assert os.path.exists(preview.original_path)
assert pngcrush_image_mock.call_count == 2
assert pngcrush_image_mock.call_args_list[0][0][0] == (
preview.thumbnail_path)
assert pngcrush_image_mock.call_args_list[1][0][0] == (
preview.image_path)
class TestDistributionChoiceForm(TestCase):
@pytest.mark.needs_locales_compilation
def test_lazy_choice_labels(self):
"""Tests that the labels in `choices` are still lazy
We had a problem that the labels weren't properly marked as lazy
which led to labels being returned in mixed languages depending
on what server we hit in production.
"""
with translation.override('en-US'):
form = forms.DistributionChoiceForm()
label = form.fields['channel'].choices[0][1]
expected = 'On this site.'
label = six.text_type(label)
assert label.startswith(expected)
with translation.override('de'):
form = forms.DistributionChoiceForm()
label = form.fields['channel'].choices[0][1]
expected = 'Auf dieser Website.'
label = six.text_type(label)
assert label.startswith(expected)
class TestDescribeForm(TestCase):
fixtures = ('base/addon_3615', 'base/addon_3615_categories',
'addons/denied')
def setUp(self):
super(TestDescribeForm, self).setUp()
self.existing_name = 'Delicious Bookmarks'
self.non_existing_name = 'Does Not Exist'
self.error_msg = 'This name is already in use. Please choose another.'
self.request = req_factory_factory('/')
def test_slug_deny(self):
delicious = Addon.objects.get()
form = forms.DescribeForm(
{'slug': u'submit'}, request=self.request, instance=delicious)
assert not form.is_valid()
assert form.errors['slug'] == (
[u'The slug cannot be "submit". Please choose another.'])
def test_name_trademark_mozilla(self):
delicious = Addon.objects.get()
form = forms.DescribeForm(
{'name': u'Delicious Mozilla', 'summary': u'foô', 'slug': u'bar'},
request=self.request,
instance=delicious)
assert not form.is_valid()
assert form.errors['name'].data[0].message.startswith(
u'Add-on names cannot contain the Mozilla or Firefox trademarks.')
def test_name_trademark_firefox(self):
delicious = Addon.objects.get()
form = forms.DescribeForm(
{'name': u'Delicious Firefox', 'summary': u'foö', 'slug': u'bar'},
request=self.request,
instance=delicious)
assert not form.is_valid()
assert form.errors['name'].data[0].message.startswith(
u'Add-on names cannot contain the Mozilla or Firefox trademarks.')
@override_switch('content-optimization', active=False)
def test_name_trademark_allowed_for_prefix(self):
delicious = Addon.objects.get()
form = forms.DescribeForm(
{'name': u'Delicious for Mozilla', 'summary': u'foø',
'slug': u'bar'},
request=self.request,
instance=delicious)
assert form.is_valid()
def test_name_no_trademark(self):
delicious = Addon.objects.get()
form = forms.DescribeForm(
{'name': u'Delicious Dumdidum', 'summary': u'đoo', 'slug': u'bar'},
request=self.request,
instance=delicious)
assert form.is_valid()
def test_slug_isdigit(self):
delicious = Addon.objects.get()
form = forms.DescribeForm(
{'slug': u'123'}, request=self.request, instance=delicious)
assert not form.is_valid()
assert form.errors['slug'] == (
[u'The slug cannot be "123". Please choose another.'])
def test_bogus_support_url(self):
form = forms.DescribeForm(
{'support_url': 'javascript://something.com'},
request=self.request, instance=Addon.objects.get())
assert not form.is_valid()
assert form.errors['support_url'] == [u'Enter a valid URL.']
def test_ftp_support_url(self):
form = forms.DescribeForm(
{'support_url': 'ftp://foo.com'}, request=self.request,
instance=Addon.objects.get())
assert not form.is_valid()
assert form.errors['support_url'] == [u'Enter a valid URL.']
def test_http_support_url(self):
form = forms.DescribeForm(
{'name': u'Delicious Dumdidum', 'summary': u'foo', 'slug': u'bar',
'support_url': 'http://foo.com'},
request=self.request, instance=Addon.objects.get())
assert form.is_valid(), form.errors
def test_description_optional(self):
delicious = Addon.objects.get()
assert delicious.type == amo.ADDON_EXTENSION
with override_switch('content-optimization', active=False):
form = forms.DescribeForm(
{'name': u'Delicious for everyone', 'summary': u'foo',
'slug': u'bar'},
request=self.request, instance=delicious)
assert form.is_valid(), form.errors
with override_switch('content-optimization', active=True):
form = forms.DescribeForm(
{'name': u'Delicious for everyone', 'summary': u'foo',
'slug': u'bar'},
request=self.request, instance=delicious)
assert not form.is_valid()
# But only extensions are required to have a description
delicious.update(type=amo.ADDON_STATICTHEME)
form = forms.DescribeForm(
{'name': u'Delicious for everyone', 'summary': u'foo',
'slug': u'bar'},
request=self.request, instance=delicious)
assert form.is_valid(), form.errors
# Do it again, but this time with a description
delicious.update(type=amo.ADDON_EXTENSION)
form = forms.DescribeForm(
{'name': u'Delicious for everyone', 'summary': u'foo',
'slug': u'bar', 'description': u'its a description'},
request=self.request,
instance=delicious)
assert form.is_valid(), form.errors
def test_description_min_length(self):
delicious = Addon.objects.get()
assert delicious.type == amo.ADDON_EXTENSION
with override_switch('content-optimization', active=False):
form = forms.DescribeForm(
{'name': u'Delicious for everyone', 'summary': u'foo',
'slug': u'bar', 'description': u'123456789'},
request=self.request, instance=delicious)
assert form.is_valid(), form.errors
with override_switch('content-optimization', active=True):
form = forms.DescribeForm(
{'name': u'Delicious for everyone', 'summary': u'foo',
'slug': u'bar', 'description': u'123456789'},
request=self.request, instance=delicious)
assert not form.is_valid()
# But only extensions have a minimum length
delicious.update(type=amo.ADDON_STATICTHEME)
form = forms.DescribeForm(
{'name': u'Delicious for everyone', 'summary': u'foo',
'slug': u'bar', 'description': u'123456789'},
request=self.request, instance=delicious)
assert form.is_valid()
# Do it again, but this time with a longer description
delicious.update(type=amo.ADDON_EXTENSION)
form = forms.DescribeForm(
{'name': u'Delicious for everyone', 'summary': u'foo',
'slug': u'bar', 'description': u'1234567890'},
request=self.request,
instance=delicious)
assert form.is_valid(), form.errors
def test_name_summary_lengths(self):
delicious = Addon.objects.get()
short_data = {
'name': u'n', 'summary': u's', 'slug': u'bar',
'description': u'1234567890'}
over_70_data = {
'name': u'this is a name that hits the 50 char limit almost',
'summary': u'this is a summary that doesn`t get close to the '
u'existing 250 limit but is over 70',
'slug': u'bar', 'description': u'1234567890'}
under_70_data = {
'name': u'this is a name that is over the 50 char limit by a few',
'summary': u'ab',
'slug': u'bar', 'description': u'1234567890'}
# short name and summary - both allowed with DescribeForm
form = forms.DescribeForm(
short_data, request=self.request, instance=delicious)
assert form.is_valid()
# but not with DescribeFormContentOptimization
form = forms.DescribeFormContentOptimization(
short_data, request=self.request, instance=delicious)
assert not form.is_valid()
assert form.errors['name'] == [
u'Ensure this value has at least 2 characters (it has 1).']
assert form.errors['summary'] == [
u'Ensure this value has at least 2 characters (it has 1).']
# As are long names and summaries
form = forms.DescribeForm(
over_70_data, request=self.request, instance=delicious)
assert form.is_valid()
# but together are over 70 chars so no longer allowed
form = forms.DescribeFormContentOptimization(
over_70_data, request=self.request, instance=delicious)
assert not form.is_valid()
assert len(over_70_data['name']) + len(over_70_data['summary']) == 130
assert form.errors['name'] == [
u'Ensure name and summary combined are at most 70 characters '
u'(they have 130).']
assert 'summary' not in form.errors
# DescribeForm has a lower limit for name length
form = forms.DescribeForm(
under_70_data, request=self.request, instance=delicious)
assert not form.is_valid()
assert form.errors['name'] == [
u'Ensure this value has at most 50 characters (it has 54).']
# DescribeFormContentOptimization only cares that the total is <= 70
form = forms.DescribeFormContentOptimization(
under_70_data, request=self.request, instance=delicious)
assert form.is_valid()
assert len(under_70_data['name']) + len(under_70_data['summary']) == 56
def test_name_summary_auto_cropping(self):
delicious = Addon.objects.get()
assert delicious.default_locale == 'en-US'
summary_needs_cropping = {
'name_en-us': u'a' * 25,
'name_fr': u'b' * 30,
'summary_en-us': u'c' * 45,
'summary_fr': u'd' * 45, # 30 + 45 is > 70
'slug': u'slug',
'description_en-us': u'z' * 10,
}
form = forms.DescribeFormContentOptimization(
summary_needs_cropping, request=self.request, instance=delicious,
should_auto_crop=True)
assert form.is_valid(), form.errors
assert form.cleaned_data['name']['en-us'] == u'a' * 25 # no change
assert form.cleaned_data['summary']['en-us'] == u'c' * 45 # no change
assert form.cleaned_data['name']['fr'] == u'b' * 30 # no change
assert form.cleaned_data['summary']['fr'] == u'd' * 40 # 45 to 40
summary_needs_cropping_no_name = {
'name_en-us': u'a' * 25,
'summary_en-us': u'c' * 45,
'summary_fr': u'd' * 50,
'slug': u'slug',
'description_en-us': u'z' * 10,
}
form = forms.DescribeFormContentOptimization(
summary_needs_cropping_no_name, request=self.request,
instance=delicious, should_auto_crop=True)
assert form.is_valid(), form.errors
assert form.cleaned_data['name']['en-us'] == u'a' * 25
assert form.cleaned_data['summary']['en-us'] == u'c' * 45
assert 'fr' not in form.cleaned_data['name'] # we've not added it
assert form.cleaned_data['summary']['fr'] == u'd' * 45 # 50 to 45
name_needs_cropping = {
'name_en-us': u'a' * 67,
'name_fr': u'b' * 69,
'summary_en-us': u'c' * 2,
'summary_fr': u'd' * 3,
'slug': u'slug',
'description_en-us': u'z' * 10,
}
form = forms.DescribeFormContentOptimization(
name_needs_cropping, request=self.request,
instance=delicious, should_auto_crop=True)
assert form.is_valid(), form.errors
assert form.cleaned_data['name']['en-us'] == u'a' * 67 # no change
assert form.cleaned_data['summary']['en-us'] == u'c' * 2 # no change
assert form.cleaned_data['name']['fr'] == u'b' * 68 # 69 to 68
assert form.cleaned_data['summary']['fr'] == u'd' * 2 # 3 to 2
name_needs_cropping_no_summary = {
'name_en-us': u'a' * 50,
'name_fr': u'b' * 69,
'summary_en-us': u'c' * 20,
'slug': u'slug',
'description_en-us': u'z' * 10,
}
form = forms.DescribeFormContentOptimization(
name_needs_cropping_no_summary, request=self.request,
instance=delicious, should_auto_crop=True)
assert form.is_valid(), form.errors
assert form.cleaned_data['name']['en-us'] == u'a' * 50 # no change
assert form.cleaned_data['summary']['en-us'] == u'c' * 20 # no change
assert form.cleaned_data['name']['fr'] == u'b' * 50 # 69 to 50
assert 'fr' not in form.cleaned_data['summary']
class TestAdditionalDetailsForm(TestCase):
fixtures = ['base/addon_3615', 'base/users']
def setUp(self):
super(TestAdditionalDetailsForm, self).setUp()
self.addon = Addon.objects.get(pk=3615)
self.data = {
'default_locale': 'en-US',
'homepage': str(self.addon.homepage),
}
self.user = self.addon.authors.all()[0]
core.set_user(self.user)
self.request = req_factory_factory('/')
def test_locales(self):
form = forms.AdditionalDetailsForm(
request=self.request, instance=self.addon)
assert form.fields['default_locale'].choices[0][0] == 'af'
def add_tags(self, tags):
data = self.data.copy()
data.update({'tags': tags})
form = forms.AdditionalDetailsForm(
data=data, request=self.request, instance=self.addon)
assert form.is_valid()
form.save(self.addon)
return form
def get_tag_text(self):
return [t.tag_text for t in self.addon.tags.all()]
def test_tags(self):
self.add_tags('foo, bar')
assert self.get_tag_text() == ['bar', 'foo']
def test_tags_xss(self):
self.add_tags('<script>alert("foo")</script>, bar')
assert self.get_tag_text() == ['bar', 'scriptalertfooscript']
def test_tags_case_spaces(self):
self.add_tags('foo, bar')
self.add_tags('foo, bar , Bar, BAR, b a r ')
assert self.get_tag_text() == ['b a r', 'bar', 'foo']
def test_tags_spaces(self):
self.add_tags('foo, bar beer')
assert self.get_tag_text() == ['bar beer', 'foo']
def test_tags_unicode(self):
self.add_tags(u'Österreich')
assert self.get_tag_text() == [u'Österreich'.lower()]
def add_restricted(self, *args):
if not args:
args = ['i_am_a_restricted_tag']
for arg in args:
tag = Tag.objects.create(tag_text=arg, restricted=True)
AddonTag.objects.create(tag=tag, addon=self.addon)
def test_tags_restricted(self):
self.add_restricted()
self.add_tags('foo, bar')
form = forms.AdditionalDetailsForm(
data=self.data, request=self.request, instance=self.addon)
assert form.fields['tags'].initial == 'bar, foo'
assert self.get_tag_text() == ['bar', 'foo', 'i_am_a_restricted_tag']
self.add_tags('')
assert self.get_tag_text() == ['i_am_a_restricted_tag']
def test_tags_error(self):
self.add_restricted('i_am_a_restricted_tag', 'sdk')
data = self.data.copy()
data.update({'tags': 'i_am_a_restricted_tag'})
form = forms.AdditionalDetailsForm(
data=data, request=self.request, instance=self.addon)
assert form.errors['tags'][0] == (
'"i_am_a_restricted_tag" is a reserved tag and cannot be used.')
data.update({'tags': 'i_am_a_restricted_tag, sdk'})
form = forms.AdditionalDetailsForm(
data=data, request=self.request, instance=self.addon)
assert form.errors['tags'][0] == (
'"i_am_a_restricted_tag", "sdk" are reserved tags and'
' cannot be used.')
@mock.patch('olympia.access.acl.action_allowed')
def test_tags_admin_restricted(self, action_allowed):
action_allowed.return_value = True
self.add_restricted('i_am_a_restricted_tag')
self.add_tags('foo, bar')
assert self.get_tag_text() == ['bar', 'foo']
self.add_tags('foo, bar, i_am_a_restricted_tag')
assert self.get_tag_text() == ['bar', 'foo', 'i_am_a_restricted_tag']
form = forms.AdditionalDetailsForm(
data=self.data, request=self.request, instance=self.addon)
assert form.fields['tags'].initial == 'bar, foo, i_am_a_restricted_tag'
@mock.patch('olympia.access.acl.action_allowed')
def test_tags_admin_restricted_count(self, action_allowed):
action_allowed.return_value = True
self.add_restricted()
self.add_tags('i_am_a_restricted_tag, %s' % (', '.join('tag-test-%s' %
i for i in range(0, 20))))
def test_tags_restricted_count(self):
self.add_restricted()
self.add_tags(', '.join('tag-test-%s' % i for i in range(0, 20)))
def test_tags_slugified_count(self):
self.add_tags(', '.join('tag-test' for i in range(0, 21)))
assert self.get_tag_text() == ['tag-test']
def test_tags_limit(self):
self.add_tags(' %s' % ('t' * 128))
def test_tags_long(self):
tag = ' -%s' % ('t' * 128)
data = self.data.copy()
data.update({"tags": tag})
form = forms.AdditionalDetailsForm(
data=data, request=self.request, instance=self.addon)
assert not form.is_valid()
assert form.errors['tags'] == [
'All tags must be 128 characters or less after invalid characters'
' are removed.']
def test_bogus_homepage(self):
form = forms.AdditionalDetailsForm(
{'homepage': 'javascript://something.com'}, request=self.request,
instance=self.addon)
assert not form.is_valid()
assert form.errors['homepage'] == [u'Enter a valid URL.']
def test_ftp_homepage(self):
form = forms.AdditionalDetailsForm(
{'homepage': 'ftp://foo.com'}, request=self.request,
instance=self.addon)
assert not form.is_valid()
assert form.errors['homepage'] == [u'Enter a valid URL.']
def test_homepage_is_not_required(self):
form = forms.AdditionalDetailsForm(
{'default_locale': 'en-US'},
request=self.request, instance=self.addon)
assert form.is_valid()
class TestIconForm(TestCase):
fixtures = ['base/addon_3615']
def setUp(self):
super(TestIconForm, self).setUp()
self.temp_dir = tempfile.mkdtemp(dir=settings.TMP_PATH)
self.addon = Addon.objects.get(pk=3615)
class DummyRequest:
FILES = None
self.request = DummyRequest()
self.icon_path = os.path.join(settings.TMP_PATH, 'icon')
if not os.path.exists(self.icon_path):
os.makedirs(self.icon_path)
def tearDown(self):
rm_local_tmp_dir(self.temp_dir)
super(TestIconForm, self).tearDown()
def get_icon_paths(self):
path = os.path.join(self.addon.get_icon_dir(), str(self.addon.id))
return ['%s-%s.png' % (path, size) for size in amo.ADDON_ICON_SIZES]
@mock.patch('olympia.amo.models.ModelBase.update')
def test_icon_modified(self, update_mock):
name = 'transparent.png'
form = forms.AddonFormMedia({'icon_upload_hash': name},
request=self.request,
instance=self.addon)
dest = os.path.join(self.icon_path, name)
with storage.open(dest, 'wb') as f:
shutil.copyfileobj(open(get_image_path(name), 'rb'), f)
assert form.is_valid()
form.save(addon=self.addon)
assert update_mock.called
class TestCategoryForm(TestCase):
def test_no_possible_categories(self):
Category.objects.create(type=amo.ADDON_SEARCH,
application=amo.FIREFOX.id)
addon = addon_factory(type=amo.ADDON_SEARCH)
request = req_factory_factory('/')
form = forms.CategoryFormSet(addon=addon, request=request)
apps = [f.app for f in form.forms]
assert apps == [amo.FIREFOX]
| bsd-3-clause | 7,490,874,804,458,575,000 | 41.531477 | 79 | 0.591016 | false |
Intel-tensorflow/tensorflow | tensorflow/python/tools/saved_model_utils_test.py | 6 | 5038 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for SavedModel utils."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from tensorflow.python.framework import ops
from tensorflow.python.framework import test_util
from tensorflow.python.lib.io import file_io
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
from tensorflow.python.saved_model import builder as saved_model_builder
from tensorflow.python.saved_model import tag_constants
from tensorflow.python.tools import saved_model_utils
def tearDownModule():
file_io.delete_recursively(test.get_temp_dir())
class SavedModelUtilTest(test.TestCase):
def _init_and_validate_variable(self, sess, variable_name, variable_value):
v = variables.Variable(variable_value, name=variable_name)
sess.run(variables.global_variables_initializer())
self.assertEqual(variable_value, v.eval())
@test_util.deprecated_graph_mode_only
def testReadSavedModelValid(self):
saved_model_dir = os.path.join(test.get_temp_dir(), "valid_saved_model")
builder = saved_model_builder.SavedModelBuilder(saved_model_dir)
with self.session(graph=ops.Graph()) as sess:
self._init_and_validate_variable(sess, "v", 42)
builder.add_meta_graph_and_variables(sess, [tag_constants.TRAINING])
builder.save()
actual_saved_model_pb = saved_model_utils.read_saved_model(saved_model_dir)
self.assertEqual(len(actual_saved_model_pb.meta_graphs), 1)
self.assertEqual(
len(actual_saved_model_pb.meta_graphs[0].meta_info_def.tags), 1)
self.assertEqual(actual_saved_model_pb.meta_graphs[0].meta_info_def.tags[0],
tag_constants.TRAINING)
def testReadSavedModelInvalid(self):
saved_model_dir = os.path.join(test.get_temp_dir(), "invalid_saved_model")
with self.assertRaisesRegex(
IOError, "SavedModel file does not exist at: %s" % saved_model_dir):
saved_model_utils.read_saved_model(saved_model_dir)
def testGetSavedModelTagSets(self):
saved_model_dir = os.path.join(test.get_temp_dir(), "test_tags")
builder = saved_model_builder.SavedModelBuilder(saved_model_dir)
# Force test to run in graph mode since SavedModelBuilder.save requires a
# session to work.
with ops.Graph().as_default():
# Graph with a single variable. SavedModel invoked to:
# - add with weights.
# - a single tag (from predefined constants).
with self.session(graph=ops.Graph()) as sess:
self._init_and_validate_variable(sess, "v", 42)
builder.add_meta_graph_and_variables(sess, [tag_constants.TRAINING])
# Graph that updates the single variable. SavedModel invoked to:
# - simply add the model (weights are not updated).
# - a single tag (from predefined constants).
with self.session(graph=ops.Graph()) as sess:
self._init_and_validate_variable(sess, "v", 43)
builder.add_meta_graph([tag_constants.SERVING])
# Graph that updates the single variable. SavedModel is invoked:
# - to add the model (weights are not updated).
# - multiple predefined tags.
with self.session(graph=ops.Graph()) as sess:
self._init_and_validate_variable(sess, "v", 44)
builder.add_meta_graph([tag_constants.SERVING, tag_constants.GPU])
# Graph that updates the single variable. SavedModel is invoked:
# - to add the model (weights are not updated).
# - multiple predefined tags for serving on TPU.
with self.session(graph=ops.Graph()) as sess:
self._init_and_validate_variable(sess, "v", 44)
builder.add_meta_graph([tag_constants.SERVING, tag_constants.TPU])
# Graph that updates the single variable. SavedModel is invoked:
# - to add the model (weights are not updated).
# - multiple custom tags.
with self.session(graph=ops.Graph()) as sess:
self._init_and_validate_variable(sess, "v", 45)
builder.add_meta_graph(["foo", "bar"])
# Save the SavedModel to disk.
builder.save()
actual_tags = saved_model_utils.get_saved_model_tag_sets(saved_model_dir)
expected_tags = [["train"], ["serve"], ["serve", "gpu"], ["serve", "tpu"],
["foo", "bar"]]
self.assertEqual(expected_tags, actual_tags)
if __name__ == "__main__":
test.main()
| apache-2.0 | 7,437,827,679,493,981,000 | 42.059829 | 80 | 0.689956 | false |
infobloxopen/neutron | neutron/tests/unit/api/test_extensions.py | 11 | 33104 | # Copyright (c) 2011 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
import mock
from oslo_config import cfg
from oslo_log import log as logging
from oslo_serialization import jsonutils
import routes
import webob
import webob.exc as webexc
import webtest
import neutron
from neutron.api import extensions
from neutron.api.v2 import attributes
from neutron.common import config
from neutron.common import exceptions
from neutron.db import db_base_plugin_v2
from neutron import manager
from neutron.plugins.common import constants
from neutron.plugins.ml2 import plugin as ml2_plugin
from neutron import quota
from neutron.tests import base
from neutron.tests.unit.api.v2 import test_base
from neutron.tests.unit import extension_stubs as ext_stubs
import neutron.tests.unit.extensions
from neutron.tests.unit.extensions import extendedattribute as extattr
from neutron.tests.unit import testlib_api
from neutron import wsgi
LOG = logging.getLogger(__name__)
_uuid = test_base._uuid
_get_path = test_base._get_path
extensions_path = ':'.join(neutron.tests.unit.extensions.__path__)
class ExtensionsTestApp(wsgi.Router):
def __init__(self, options={}):
mapper = routes.Mapper()
controller = ext_stubs.StubBaseAppController()
mapper.resource("dummy_resource", "/dummy_resources",
controller=controller)
super(ExtensionsTestApp, self).__init__(mapper)
class FakePluginWithExtension(db_base_plugin_v2.NeutronDbPluginV2):
"""A fake plugin used only for extension testing in this file."""
supported_extension_aliases = ["FOXNSOX"]
def method_to_support_foxnsox_extension(self, context):
self._log("method_to_support_foxnsox_extension", context)
class PluginInterfaceTest(base.BaseTestCase):
def test_issubclass_hook(self):
class A(object):
def f(self):
pass
class B(extensions.PluginInterface):
@abc.abstractmethod
def f(self):
pass
self.assertTrue(issubclass(A, B))
def test_issubclass_hook_class_without_abstract_methods(self):
class A(object):
def f(self):
pass
class B(extensions.PluginInterface):
def f(self):
pass
self.assertFalse(issubclass(A, B))
def test_issubclass_hook_not_all_methods_implemented(self):
class A(object):
def f(self):
pass
class B(extensions.PluginInterface):
@abc.abstractmethod
def f(self):
pass
@abc.abstractmethod
def g(self):
pass
self.assertFalse(issubclass(A, B))
class ResourceExtensionTest(base.BaseTestCase):
class ResourceExtensionController(wsgi.Controller):
def index(self, request):
return "resource index"
def show(self, request, id):
return {'data': {'id': id}}
def notimplemented_function(self, request, id):
return webob.exc.HTTPNotImplemented()
def custom_member_action(self, request, id):
return {'member_action': 'value'}
def custom_collection_action(self, request, **kwargs):
return {'collection': 'value'}
class DummySvcPlugin(wsgi.Controller):
def get_plugin_type(self):
return constants.DUMMY
def index(self, request, **kwargs):
return "resource index"
def custom_member_action(self, request, **kwargs):
return {'member_action': 'value'}
def collection_action(self, request, **kwargs):
return {'collection': 'value'}
def show(self, request, id):
return {'data': {'id': id}}
def test_exceptions_notimplemented(self):
controller = self.ResourceExtensionController()
member = {'notimplemented_function': "GET"}
res_ext = extensions.ResourceExtension('tweedles', controller,
member_actions=member)
test_app = _setup_extensions_test_app(SimpleExtensionManager(res_ext))
# Ideally we would check for a 501 code here but webtest doesn't take
# anything that is below 200 or above 400 so we can't actually check
# it. It throws webtest.AppError instead.
try:
test_app.get("/tweedles/some_id/notimplemented_function")
# Shouldn't be reached
self.assertTrue(False)
except webtest.AppError as e:
self.assertIn('501', e.message)
def test_resource_can_be_added_as_extension(self):
res_ext = extensions.ResourceExtension(
'tweedles', self.ResourceExtensionController())
test_app = _setup_extensions_test_app(SimpleExtensionManager(res_ext))
index_response = test_app.get("/tweedles")
self.assertEqual(200, index_response.status_int)
self.assertEqual("resource index", index_response.body)
show_response = test_app.get("/tweedles/25266")
self.assertEqual({'data': {'id': "25266"}}, show_response.json)
def test_resource_gets_prefix_of_plugin(self):
class DummySvcPlugin(wsgi.Controller):
def index(self, request):
return ""
def get_plugin_type(self):
return constants.DUMMY
res_ext = extensions.ResourceExtension(
'tweedles', DummySvcPlugin(), path_prefix="/dummy_svc")
test_app = _setup_extensions_test_app(SimpleExtensionManager(res_ext))
index_response = test_app.get("/dummy_svc/tweedles")
self.assertEqual(200, index_response.status_int)
def test_resource_extension_with_custom_member_action(self):
controller = self.ResourceExtensionController()
member = {'custom_member_action': "GET"}
res_ext = extensions.ResourceExtension('tweedles', controller,
member_actions=member)
test_app = _setup_extensions_test_app(SimpleExtensionManager(res_ext))
response = test_app.get("/tweedles/some_id/custom_member_action")
self.assertEqual(200, response.status_int)
self.assertEqual(jsonutils.loads(response.body)['member_action'],
"value")
def test_resource_ext_with_custom_member_action_gets_plugin_prefix(self):
controller = self.DummySvcPlugin()
member = {'custom_member_action': "GET"}
collections = {'collection_action': "GET"}
res_ext = extensions.ResourceExtension('tweedles', controller,
path_prefix="/dummy_svc",
member_actions=member,
collection_actions=collections)
test_app = _setup_extensions_test_app(SimpleExtensionManager(res_ext))
response = test_app.get("/dummy_svc/tweedles/1/custom_member_action")
self.assertEqual(200, response.status_int)
self.assertEqual(jsonutils.loads(response.body)['member_action'],
"value")
response = test_app.get("/dummy_svc/tweedles/collection_action")
self.assertEqual(200, response.status_int)
self.assertEqual(jsonutils.loads(response.body)['collection'],
"value")
def test_plugin_prefix_with_parent_resource(self):
controller = self.DummySvcPlugin()
parent = dict(member_name="tenant",
collection_name="tenants")
member = {'custom_member_action': "GET"}
collections = {'collection_action': "GET"}
res_ext = extensions.ResourceExtension('tweedles', controller, parent,
path_prefix="/dummy_svc",
member_actions=member,
collection_actions=collections)
test_app = _setup_extensions_test_app(SimpleExtensionManager(res_ext))
index_response = test_app.get("/dummy_svc/tenants/1/tweedles")
self.assertEqual(200, index_response.status_int)
response = test_app.get("/dummy_svc/tenants/1/"
"tweedles/1/custom_member_action")
self.assertEqual(200, response.status_int)
self.assertEqual(jsonutils.loads(response.body)['member_action'],
"value")
response = test_app.get("/dummy_svc/tenants/2/"
"tweedles/collection_action")
self.assertEqual(200, response.status_int)
self.assertEqual(jsonutils.loads(response.body)['collection'],
"value")
def test_resource_extension_for_get_custom_collection_action(self):
controller = self.ResourceExtensionController()
collections = {'custom_collection_action': "GET"}
res_ext = extensions.ResourceExtension('tweedles', controller,
collection_actions=collections)
test_app = _setup_extensions_test_app(SimpleExtensionManager(res_ext))
response = test_app.get("/tweedles/custom_collection_action")
self.assertEqual(200, response.status_int)
LOG.debug(jsonutils.loads(response.body))
self.assertEqual(jsonutils.loads(response.body)['collection'], "value")
def test_resource_extension_for_put_custom_collection_action(self):
controller = self.ResourceExtensionController()
collections = {'custom_collection_action': "PUT"}
res_ext = extensions.ResourceExtension('tweedles', controller,
collection_actions=collections)
test_app = _setup_extensions_test_app(SimpleExtensionManager(res_ext))
response = test_app.put("/tweedles/custom_collection_action")
self.assertEqual(200, response.status_int)
self.assertEqual(jsonutils.loads(response.body)['collection'], 'value')
def test_resource_extension_for_post_custom_collection_action(self):
controller = self.ResourceExtensionController()
collections = {'custom_collection_action': "POST"}
res_ext = extensions.ResourceExtension('tweedles', controller,
collection_actions=collections)
test_app = _setup_extensions_test_app(SimpleExtensionManager(res_ext))
response = test_app.post("/tweedles/custom_collection_action")
self.assertEqual(200, response.status_int)
self.assertEqual(jsonutils.loads(response.body)['collection'], 'value')
def test_resource_extension_for_delete_custom_collection_action(self):
controller = self.ResourceExtensionController()
collections = {'custom_collection_action': "DELETE"}
res_ext = extensions.ResourceExtension('tweedles', controller,
collection_actions=collections)
test_app = _setup_extensions_test_app(SimpleExtensionManager(res_ext))
response = test_app.delete("/tweedles/custom_collection_action")
self.assertEqual(200, response.status_int)
self.assertEqual(jsonutils.loads(response.body)['collection'], 'value')
def test_resource_ext_for_formatted_req_on_custom_collection_action(self):
controller = self.ResourceExtensionController()
collections = {'custom_collection_action': "GET"}
res_ext = extensions.ResourceExtension('tweedles', controller,
collection_actions=collections)
test_app = _setup_extensions_test_app(SimpleExtensionManager(res_ext))
response = test_app.get("/tweedles/custom_collection_action.json")
self.assertEqual(200, response.status_int)
self.assertEqual(jsonutils.loads(response.body)['collection'], "value")
def test_resource_ext_for_nested_resource_custom_collection_action(self):
controller = self.ResourceExtensionController()
collections = {'custom_collection_action': "GET"}
parent = dict(collection_name='beetles', member_name='beetle')
res_ext = extensions.ResourceExtension('tweedles', controller,
collection_actions=collections,
parent=parent)
test_app = _setup_extensions_test_app(SimpleExtensionManager(res_ext))
response = test_app.get("/beetles/beetle_id"
"/tweedles/custom_collection_action")
self.assertEqual(200, response.status_int)
self.assertEqual(jsonutils.loads(response.body)['collection'], "value")
def test_resource_extension_with_custom_member_action_and_attr_map(self):
controller = self.ResourceExtensionController()
member = {'custom_member_action': "GET"}
params = {
'tweedles': {
'id': {'allow_post': False, 'allow_put': False,
'validate': {'type:uuid': None},
'is_visible': True},
'name': {'allow_post': True, 'allow_put': True,
'validate': {'type:string': None},
'default': '', 'is_visible': True},
}
}
res_ext = extensions.ResourceExtension('tweedles', controller,
member_actions=member,
attr_map=params)
test_app = _setup_extensions_test_app(SimpleExtensionManager(res_ext))
response = test_app.get("/tweedles/some_id/custom_member_action")
self.assertEqual(200, response.status_int)
self.assertEqual(jsonutils.loads(response.body)['member_action'],
"value")
def test_returns_404_for_non_existent_extension(self):
test_app = _setup_extensions_test_app(SimpleExtensionManager(None))
response = test_app.get("/non_extistant_extension", status='*')
self.assertEqual(404, response.status_int)
class ActionExtensionTest(base.BaseTestCase):
def setUp(self):
super(ActionExtensionTest, self).setUp()
self.extension_app = _setup_extensions_test_app()
def test_extended_action_for_adding_extra_data(self):
action_name = 'FOXNSOX:add_tweedle'
action_params = dict(name='Beetle')
req_body = jsonutils.dumps({action_name: action_params})
response = self.extension_app.post('/dummy_resources/1/action',
req_body,
content_type='application/json')
self.assertEqual("Tweedle Beetle Added.", response.body)
def test_extended_action_for_deleting_extra_data(self):
action_name = 'FOXNSOX:delete_tweedle'
action_params = dict(name='Bailey')
req_body = jsonutils.dumps({action_name: action_params})
response = self.extension_app.post("/dummy_resources/1/action",
req_body,
content_type='application/json')
self.assertEqual("Tweedle Bailey Deleted.", response.body)
def test_returns_404_for_non_existent_action(self):
non_existent_action = 'blah_action'
action_params = dict(name="test")
req_body = jsonutils.dumps({non_existent_action: action_params})
response = self.extension_app.post("/dummy_resources/1/action",
req_body,
content_type='application/json',
status='*')
self.assertEqual(404, response.status_int)
def test_returns_404_for_non_existent_resource(self):
action_name = 'add_tweedle'
action_params = dict(name='Beetle')
req_body = jsonutils.dumps({action_name: action_params})
response = self.extension_app.post("/asdf/1/action", req_body,
content_type='application/json',
status='*')
self.assertEqual(404, response.status_int)
class RequestExtensionTest(base.BaseTestCase):
def test_headers_can_be_extended(self):
def extend_headers(req, res):
assert req.headers['X-NEW-REQUEST-HEADER'] == "sox"
res.headers['X-NEW-RESPONSE-HEADER'] = "response_header_data"
return res
app = self._setup_app_with_request_handler(extend_headers, 'GET')
response = app.get("/dummy_resources/1",
headers={'X-NEW-REQUEST-HEADER': "sox"})
self.assertEqual(response.headers['X-NEW-RESPONSE-HEADER'],
"response_header_data")
def test_extend_get_resource_response(self):
def extend_response_data(req, res):
data = jsonutils.loads(res.body)
data['FOXNSOX:extended_key'] = req.GET.get('extended_key')
res.body = jsonutils.dumps(data)
return res
app = self._setup_app_with_request_handler(extend_response_data, 'GET')
response = app.get("/dummy_resources/1?extended_key=extended_data")
self.assertEqual(200, response.status_int)
response_data = jsonutils.loads(response.body)
self.assertEqual('extended_data',
response_data['FOXNSOX:extended_key'])
self.assertEqual('knox', response_data['fort'])
def test_get_resources(self):
app = _setup_extensions_test_app()
response = app.get("/dummy_resources/1?chewing=newblue")
response_data = jsonutils.loads(response.body)
self.assertEqual('newblue', response_data['FOXNSOX:googoose'])
self.assertEqual("Pig Bands!", response_data['FOXNSOX:big_bands'])
def test_edit_previously_uneditable_field(self):
def _update_handler(req, res):
data = jsonutils.loads(res.body)
data['uneditable'] = req.params['uneditable']
res.body = jsonutils.dumps(data)
return res
base_app = webtest.TestApp(setup_base_app(self))
response = base_app.put("/dummy_resources/1",
{'uneditable': "new_value"})
self.assertEqual(response.json['uneditable'], "original_value")
ext_app = self._setup_app_with_request_handler(_update_handler,
'PUT')
ext_response = ext_app.put("/dummy_resources/1",
{'uneditable': "new_value"})
self.assertEqual(ext_response.json['uneditable'], "new_value")
def _setup_app_with_request_handler(self, handler, verb):
req_ext = extensions.RequestExtension(verb,
'/dummy_resources/:(id)',
handler)
manager = SimpleExtensionManager(None, None, req_ext)
return _setup_extensions_test_app(manager)
class ExtensionManagerTest(base.BaseTestCase):
def test_invalid_extensions_are_not_registered(self):
class InvalidExtension(object):
"""Invalid extension.
This Extension doesn't implement extension methods :
get_name, get_description, get_namespace and get_updated
"""
def get_alias(self):
return "invalid_extension"
ext_mgr = extensions.ExtensionManager('')
ext_mgr.add_extension(InvalidExtension())
ext_mgr.add_extension(ext_stubs.StubExtension("valid_extension"))
self.assertIn('valid_extension', ext_mgr.extensions)
self.assertNotIn('invalid_extension', ext_mgr.extensions)
class PluginAwareExtensionManagerTest(base.BaseTestCase):
def test_unsupported_extensions_are_not_loaded(self):
stub_plugin = ext_stubs.StubPlugin(supported_extensions=["e1", "e3"])
plugin_info = {constants.CORE: stub_plugin}
with mock.patch("neutron.api.extensions.PluginAwareExtensionManager."
"check_if_plugin_extensions_loaded"):
ext_mgr = extensions.PluginAwareExtensionManager('', plugin_info)
ext_mgr.add_extension(ext_stubs.StubExtension("e1"))
ext_mgr.add_extension(ext_stubs.StubExtension("e2"))
ext_mgr.add_extension(ext_stubs.StubExtension("e3"))
self.assertIn("e1", ext_mgr.extensions)
self.assertNotIn("e2", ext_mgr.extensions)
self.assertIn("e3", ext_mgr.extensions)
def test_extensions_are_not_loaded_for_plugins_unaware_of_extensions(self):
class ExtensionUnawarePlugin(object):
"""This plugin does not implement supports_extension method.
Extensions will not be loaded when this plugin is used.
"""
pass
plugin_info = {constants.CORE: ExtensionUnawarePlugin()}
ext_mgr = extensions.PluginAwareExtensionManager('', plugin_info)
ext_mgr.add_extension(ext_stubs.StubExtension("e1"))
self.assertNotIn("e1", ext_mgr.extensions)
def test_extensions_not_loaded_for_plugin_without_expected_interface(self):
class PluginWithoutExpectedIface(object):
"""Does not implement get_foo method as expected by extension."""
supported_extension_aliases = ["supported_extension"]
plugin_info = {constants.CORE: PluginWithoutExpectedIface()}
with mock.patch("neutron.api.extensions.PluginAwareExtensionManager."
"check_if_plugin_extensions_loaded"):
ext_mgr = extensions.PluginAwareExtensionManager('', plugin_info)
ext_mgr.add_extension(ext_stubs.ExtensionExpectingPluginInterface(
"supported_extension"))
self.assertNotIn("e1", ext_mgr.extensions)
def test_extensions_are_loaded_for_plugin_with_expected_interface(self):
class PluginWithExpectedInterface(object):
"""Implements get_foo method as expected by extension."""
supported_extension_aliases = ["supported_extension"]
def get_foo(self, bar=None):
pass
plugin_info = {constants.CORE: PluginWithExpectedInterface()}
with mock.patch("neutron.api.extensions.PluginAwareExtensionManager."
"check_if_plugin_extensions_loaded"):
ext_mgr = extensions.PluginAwareExtensionManager('', plugin_info)
ext_mgr.add_extension(ext_stubs.ExtensionExpectingPluginInterface(
"supported_extension"))
self.assertIn("supported_extension", ext_mgr.extensions)
def test_extensions_expecting_neutron_plugin_interface_are_loaded(self):
class ExtensionForQuamtumPluginInterface(ext_stubs.StubExtension):
"""This Extension does not implement get_plugin_interface method.
This will work with any plugin implementing NeutronPluginBase
"""
pass
stub_plugin = ext_stubs.StubPlugin(supported_extensions=["e1"])
plugin_info = {constants.CORE: stub_plugin}
with mock.patch("neutron.api.extensions.PluginAwareExtensionManager."
"check_if_plugin_extensions_loaded"):
ext_mgr = extensions.PluginAwareExtensionManager('', plugin_info)
ext_mgr.add_extension(ExtensionForQuamtumPluginInterface("e1"))
self.assertIn("e1", ext_mgr.extensions)
def test_extensions_without_need_for__plugin_interface_are_loaded(self):
class ExtensionWithNoNeedForPluginInterface(ext_stubs.StubExtension):
"""This Extension does not need any plugin interface.
This will work with any plugin implementing NeutronPluginBase
"""
def get_plugin_interface(self):
return None
stub_plugin = ext_stubs.StubPlugin(supported_extensions=["e1"])
plugin_info = {constants.CORE: stub_plugin}
with mock.patch("neutron.api.extensions.PluginAwareExtensionManager."
"check_if_plugin_extensions_loaded"):
ext_mgr = extensions.PluginAwareExtensionManager('', plugin_info)
ext_mgr.add_extension(ExtensionWithNoNeedForPluginInterface("e1"))
self.assertIn("e1", ext_mgr.extensions)
def test_extension_loaded_for_non_core_plugin(self):
class NonCorePluginExtenstion(ext_stubs.StubExtension):
def get_plugin_interface(self):
return None
stub_plugin = ext_stubs.StubPlugin(supported_extensions=["e1"])
plugin_info = {constants.DUMMY: stub_plugin}
with mock.patch("neutron.api.extensions.PluginAwareExtensionManager."
"check_if_plugin_extensions_loaded"):
ext_mgr = extensions.PluginAwareExtensionManager('', plugin_info)
ext_mgr.add_extension(NonCorePluginExtenstion("e1"))
self.assertIn("e1", ext_mgr.extensions)
def test_unloaded_supported_extensions_raises_exception(self):
stub_plugin = ext_stubs.StubPlugin(
supported_extensions=["unloaded_extension"])
plugin_info = {constants.CORE: stub_plugin}
self.assertRaises(exceptions.ExtensionsNotFound,
extensions.PluginAwareExtensionManager,
'', plugin_info)
class ExtensionControllerTest(testlib_api.WebTestCase):
def setUp(self):
super(ExtensionControllerTest, self).setUp()
self.test_app = _setup_extensions_test_app()
def test_index_gets_all_registerd_extensions(self):
response = self.test_app.get("/extensions." + self.fmt)
res_body = self.deserialize(response)
foxnsox = res_body["extensions"][0]
self.assertEqual(foxnsox["alias"], "FOXNSOX")
self.assertEqual(foxnsox["namespace"],
"http://www.fox.in.socks/api/ext/pie/v1.0")
def test_extension_can_be_accessed_by_alias(self):
response = self.test_app.get("/extensions/FOXNSOX." + self.fmt)
foxnsox_extension = self.deserialize(response)
foxnsox_extension = foxnsox_extension['extension']
self.assertEqual(foxnsox_extension["alias"], "FOXNSOX")
self.assertEqual(foxnsox_extension["namespace"],
"http://www.fox.in.socks/api/ext/pie/v1.0")
def test_show_returns_not_found_for_non_existent_extension(self):
response = self.test_app.get("/extensions/non_existent" + self.fmt,
status="*")
self.assertEqual(response.status_int, 404)
def app_factory(global_conf, **local_conf):
conf = global_conf.copy()
conf.update(local_conf)
return ExtensionsTestApp(conf)
def setup_base_app(test):
base.BaseTestCase.config_parse()
app = config.load_paste_app('extensions_test_app')
return app
def setup_extensions_middleware(extension_manager=None):
extension_manager = (extension_manager or
extensions.PluginAwareExtensionManager(
extensions_path,
{constants.CORE: FakePluginWithExtension()}))
base.BaseTestCase.config_parse()
app = config.load_paste_app('extensions_test_app')
return extensions.ExtensionMiddleware(app, ext_mgr=extension_manager)
def _setup_extensions_test_app(extension_manager=None):
return webtest.TestApp(setup_extensions_middleware(extension_manager))
class SimpleExtensionManager(object):
def __init__(self, resource_ext=None, action_ext=None, request_ext=None):
self.resource_ext = resource_ext
self.action_ext = action_ext
self.request_ext = request_ext
def get_resources(self):
resource_exts = []
if self.resource_ext:
resource_exts.append(self.resource_ext)
return resource_exts
def get_actions(self):
action_exts = []
if self.action_ext:
action_exts.append(self.action_ext)
return action_exts
def get_request_extensions(self):
request_extensions = []
if self.request_ext:
request_extensions.append(self.request_ext)
return request_extensions
class ExtensionExtendedAttributeTestPlugin(
ml2_plugin.Ml2Plugin):
supported_extension_aliases = [
'ext-obj-test', "extended-ext-attr"
]
def __init__(self, configfile=None):
super(ExtensionExtendedAttributeTestPlugin, self)
self.objs = []
self.objh = {}
def create_ext_test_resource(self, context, ext_test_resource):
obj = ext_test_resource['ext_test_resource']
id = _uuid()
obj['id'] = id
self.objs.append(obj)
self.objh.update({id: obj})
return obj
def get_ext_test_resources(self, context, filters=None, fields=None):
return self.objs
def get_ext_test_resource(self, context, id, fields=None):
return self.objh[id]
class ExtensionExtendedAttributeTestCase(base.BaseTestCase):
def setUp(self):
super(ExtensionExtendedAttributeTestCase, self).setUp()
plugin = (
"neutron.tests.unit.api.test_extensions."
"ExtensionExtendedAttributeTestPlugin"
)
# point config file to: neutron/tests/etc/neutron.conf.test
self.config_parse()
self.setup_coreplugin(plugin)
ext_mgr = extensions.PluginAwareExtensionManager(
extensions_path,
{constants.CORE: ExtensionExtendedAttributeTestPlugin}
)
ext_mgr.extend_resources("2.0", {})
extensions.PluginAwareExtensionManager._instance = ext_mgr
app = config.load_paste_app('extensions_test_app')
self._api = extensions.ExtensionMiddleware(app, ext_mgr=ext_mgr)
self._tenant_id = "8c70909f-b081-452d-872b-df48e6c355d1"
# Save the global RESOURCE_ATTRIBUTE_MAP
self.saved_attr_map = {}
for resource, attrs in attributes.RESOURCE_ATTRIBUTE_MAP.iteritems():
self.saved_attr_map[resource] = attrs.copy()
# Add the resources to the global attribute map
# This is done here as the setup process won't
# initialize the main API router which extends
# the global attribute map
attributes.RESOURCE_ATTRIBUTE_MAP.update(
extattr.EXTENDED_ATTRIBUTES_2_0)
self.agentscheduler_dbMinxin = manager.NeutronManager.get_plugin()
self.addCleanup(self.restore_attribute_map)
quota.QUOTAS._driver = None
cfg.CONF.set_override('quota_driver', 'neutron.quota.ConfDriver',
group='QUOTAS')
def restore_attribute_map(self):
# Restore the original RESOURCE_ATTRIBUTE_MAP
attributes.RESOURCE_ATTRIBUTE_MAP = self.saved_attr_map
def _do_request(self, method, path, data=None, params=None, action=None):
content_type = 'application/json'
body = None
if data is not None: # empty dict is valid
body = wsgi.Serializer().serialize(data, content_type)
req = testlib_api.create_request(
path, body, content_type,
method, query_string=params)
res = req.get_response(self._api)
if res.status_code >= 400:
raise webexc.HTTPClientError(detail=res.body, code=res.status_code)
if res.status_code != webexc.HTTPNoContent.code:
return res.json
def _ext_test_resource_create(self, attr=None):
data = {
"ext_test_resource": {
"tenant_id": self._tenant_id,
"name": "test",
extattr.EXTENDED_ATTRIBUTE: attr
}
}
res = self._do_request('POST', _get_path('ext_test_resources'), data)
return res['ext_test_resource']
def test_ext_test_resource_create(self):
ext_test_resource = self._ext_test_resource_create()
attr = _uuid()
ext_test_resource = self._ext_test_resource_create(attr)
self.assertEqual(ext_test_resource[extattr.EXTENDED_ATTRIBUTE], attr)
def test_ext_test_resource_get(self):
attr = _uuid()
obj = self._ext_test_resource_create(attr)
obj_id = obj['id']
res = self._do_request('GET', _get_path(
'ext_test_resources/{0}'.format(obj_id)))
obj2 = res['ext_test_resource']
self.assertEqual(obj2[extattr.EXTENDED_ATTRIBUTE], attr)
| apache-2.0 | 3,567,949,105,156,518,400 | 40.122981 | 79 | 0.620892 | false |
ravibhure/ansible | lib/ansible/utils/module_docs_fragments/dellos9.py | 75 | 2591 | #
# (c) 2015, Peter Sprygada <[email protected]>
#
# Copyright (c) 2016 Dell Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
class ModuleDocFragment(object):
# Standard files documentation fragment
DOCUMENTATION = """
options:
provider:
description:
- A dict object containing connection details.
default: null
suboptions:
host:
description:
- Specifies the DNS host name or address for connecting to the remote
device over the specified transport. The value of host is used as
the destination address for the transport.
required: true
port:
description:
- Specifies the port to use when building the connection to the remote
device.
default: 22
username:
description:
- User to authenticate the SSH session to the remote device. If the
value is not specified in the task, the value of environment variable
C(ANSIBLE_NET_USERNAME) will be used instead.
password:
description:
- Password to authenticate the SSH session to the remote device. If the
value is not specified in the task, the value of environment variable
C(ANSIBLE_NET_PASSWORD) will be used instead.
default: null
ssh_keyfile:
description:
- Path to an ssh key used to authenticate the SSH session to the remote
device. If the value is not specified in the task, the value of
environment variable C(ANSIBLE_NET_SSH_KEYFILE) will be used instead.
timeout:
description:
- Specifies idle timeout (in seconds) for the connection. Useful if the
console freezes before continuing. For example when saving
configurations.
default: 10
notes:
- For more information on using Ansible to manage Dell EMC Network devices see U(https://www.ansible.com/ansible-dell-networking).
"""
| gpl-3.0 | -7,546,888,424,774,245,000 | 37.671642 | 132 | 0.678503 | false |
adedayo/intellij-community | python/lib/Lib/site-packages/django/contrib/messages/storage/cookie.py | 89 | 5873 | from django.conf import settings
from django.contrib.messages import constants
from django.contrib.messages.storage.base import BaseStorage, Message
from django.http import CompatCookie
from django.utils import simplejson as json
from django.utils.crypto import salted_hmac, constant_time_compare
class MessageEncoder(json.JSONEncoder):
"""
Compactly serializes instances of the ``Message`` class as JSON.
"""
message_key = '__json_message'
def default(self, obj):
if isinstance(obj, Message):
message = [self.message_key, obj.level, obj.message]
if obj.extra_tags:
message.append(obj.extra_tags)
return message
return super(MessageEncoder, self).default(obj)
class MessageDecoder(json.JSONDecoder):
"""
Decodes JSON that includes serialized ``Message`` instances.
"""
def process_messages(self, obj):
if isinstance(obj, list) and obj:
if obj[0] == MessageEncoder.message_key:
return Message(*obj[1:])
return [self.process_messages(item) for item in obj]
if isinstance(obj, dict):
return dict([(key, self.process_messages(value))
for key, value in obj.iteritems()])
return obj
def decode(self, s, **kwargs):
decoded = super(MessageDecoder, self).decode(s, **kwargs)
return self.process_messages(decoded)
class CookieStorage(BaseStorage):
"""
Stores messages in a cookie.
"""
cookie_name = 'messages'
# We should be able to store 4K in a cookie, but Internet Explorer
# imposes 4K as the *total* limit for a domain. To allow other
# cookies, we go for 3/4 of 4K.
max_cookie_size = 3072
not_finished = '__messagesnotfinished__'
def _get(self, *args, **kwargs):
"""
Retrieves a list of messages from the messages cookie. If the
not_finished sentinel value is found at the end of the message list,
remove it and return a result indicating that not all messages were
retrieved by this storage.
"""
data = self.request.COOKIES.get(self.cookie_name)
messages = self._decode(data)
all_retrieved = not (messages and messages[-1] == self.not_finished)
if messages and not all_retrieved:
# remove the sentinel value
messages.pop()
return messages, all_retrieved
def _update_cookie(self, encoded_data, response):
"""
Either sets the cookie with the encoded data if there is any data to
store, or deletes the cookie.
"""
if encoded_data:
response.set_cookie(self.cookie_name, encoded_data)
else:
response.delete_cookie(self.cookie_name)
def _store(self, messages, response, remove_oldest=True, *args, **kwargs):
"""
Stores the messages to a cookie, returning a list of any messages which
could not be stored.
If the encoded data is larger than ``max_cookie_size``, removes
messages until the data fits (these are the messages which are
returned), and add the not_finished sentinel value to indicate as much.
"""
unstored_messages = []
encoded_data = self._encode(messages)
if self.max_cookie_size:
# data is going to be stored eventually by CompatCookie, which
# adds it's own overhead, which we must account for.
cookie = CompatCookie() # create outside the loop
def stored_length(val):
return len(cookie.value_encode(val)[1])
while encoded_data and stored_length(encoded_data) > self.max_cookie_size:
if remove_oldest:
unstored_messages.append(messages.pop(0))
else:
unstored_messages.insert(0, messages.pop())
encoded_data = self._encode(messages + [self.not_finished],
encode_empty=unstored_messages)
self._update_cookie(encoded_data, response)
return unstored_messages
def _hash(self, value):
"""
Creates an HMAC/SHA1 hash based on the value and the project setting's
SECRET_KEY, modified to make it unique for the present purpose.
"""
key_salt = 'django.contrib.messages'
return salted_hmac(key_salt, value).hexdigest()
def _encode(self, messages, encode_empty=False):
"""
Returns an encoded version of the messages list which can be stored as
plain text.
Since the data will be retrieved from the client-side, the encoded data
also contains a hash to ensure that the data was not tampered with.
"""
if messages or encode_empty:
encoder = MessageEncoder(separators=(',', ':'))
value = encoder.encode(messages)
return '%s$%s' % (self._hash(value), value)
def _decode(self, data):
"""
Safely decodes a encoded text stream back into a list of messages.
If the encoded text stream contained an invalid hash or was in an
invalid format, ``None`` is returned.
"""
if not data:
return None
bits = data.split('$', 1)
if len(bits) == 2:
hash, value = bits
if constant_time_compare(hash, self._hash(value)):
try:
# If we get here (and the JSON decode works), everything is
# good. In any other case, drop back and return None.
return json.loads(value, cls=MessageDecoder)
except ValueError:
pass
# Mark the data as used (so it gets removed) since something was wrong
# with the data.
self.used = True
return None
| apache-2.0 | 5,716,911,495,833,867,000 | 38.153333 | 86 | 0.606164 | false |
rahul67/hue | desktop/core/ext-py/requests-2.6.0/requests/packages/urllib3/connection.py | 371 | 8967 | import datetime
import sys
import socket
from socket import timeout as SocketTimeout
import warnings
from .packages import six
try: # Python 3
from http.client import HTTPConnection as _HTTPConnection, HTTPException
except ImportError:
from httplib import HTTPConnection as _HTTPConnection, HTTPException
class DummyConnection(object):
"Used to detect a failed ConnectionCls import."
pass
try: # Compiled with SSL?
HTTPSConnection = DummyConnection
import ssl
BaseSSLError = ssl.SSLError
except (ImportError, AttributeError): # Platform-specific: No SSL.
ssl = None
class BaseSSLError(BaseException):
pass
try: # Python 3:
# Not a no-op, we're adding this to the namespace so it can be imported.
ConnectionError = ConnectionError
except NameError: # Python 2:
class ConnectionError(Exception):
pass
from .exceptions import (
ConnectTimeoutError,
SystemTimeWarning,
SecurityWarning,
)
from .packages.ssl_match_hostname import match_hostname
from .util.ssl_ import (
resolve_cert_reqs,
resolve_ssl_version,
ssl_wrap_socket,
assert_fingerprint,
)
from .util import connection
port_by_scheme = {
'http': 80,
'https': 443,
}
RECENT_DATE = datetime.date(2014, 1, 1)
class HTTPConnection(_HTTPConnection, object):
"""
Based on httplib.HTTPConnection but provides an extra constructor
backwards-compatibility layer between older and newer Pythons.
Additional keyword parameters are used to configure attributes of the connection.
Accepted parameters include:
- ``strict``: See the documentation on :class:`urllib3.connectionpool.HTTPConnectionPool`
- ``source_address``: Set the source address for the current connection.
.. note:: This is ignored for Python 2.6. It is only applied for 2.7 and 3.x
- ``socket_options``: Set specific options on the underlying socket. If not specified, then
defaults are loaded from ``HTTPConnection.default_socket_options`` which includes disabling
Nagle's algorithm (sets TCP_NODELAY to 1) unless the connection is behind a proxy.
For example, if you wish to enable TCP Keep Alive in addition to the defaults,
you might pass::
HTTPConnection.default_socket_options + [
(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1),
]
Or you may want to disable the defaults by passing an empty list (e.g., ``[]``).
"""
default_port = port_by_scheme['http']
#: Disable Nagle's algorithm by default.
#: ``[(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]``
default_socket_options = [(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]
#: Whether this connection verifies the host's certificate.
is_verified = False
def __init__(self, *args, **kw):
if six.PY3: # Python 3
kw.pop('strict', None)
# Pre-set source_address in case we have an older Python like 2.6.
self.source_address = kw.get('source_address')
if sys.version_info < (2, 7): # Python 2.6
# _HTTPConnection on Python 2.6 will balk at this keyword arg, but
# not newer versions. We can still use it when creating a
# connection though, so we pop it *after* we have saved it as
# self.source_address.
kw.pop('source_address', None)
#: The socket options provided by the user. If no options are
#: provided, we use the default options.
self.socket_options = kw.pop('socket_options', self.default_socket_options)
# Superclass also sets self.source_address in Python 2.7+.
_HTTPConnection.__init__(self, *args, **kw)
def _new_conn(self):
""" Establish a socket connection and set nodelay settings on it.
:return: New socket connection.
"""
extra_kw = {}
if self.source_address:
extra_kw['source_address'] = self.source_address
if self.socket_options:
extra_kw['socket_options'] = self.socket_options
try:
conn = connection.create_connection(
(self.host, self.port), self.timeout, **extra_kw)
except SocketTimeout:
raise ConnectTimeoutError(
self, "Connection to %s timed out. (connect timeout=%s)" %
(self.host, self.timeout))
return conn
def _prepare_conn(self, conn):
self.sock = conn
# the _tunnel_host attribute was added in python 2.6.3 (via
# http://hg.python.org/cpython/rev/0f57b30a152f) so pythons 2.6(0-2) do
# not have them.
if getattr(self, '_tunnel_host', None):
# TODO: Fix tunnel so it doesn't depend on self.sock state.
self._tunnel()
# Mark this connection as not reusable
self.auto_open = 0
def connect(self):
conn = self._new_conn()
self._prepare_conn(conn)
class HTTPSConnection(HTTPConnection):
default_port = port_by_scheme['https']
def __init__(self, host, port=None, key_file=None, cert_file=None,
strict=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, **kw):
HTTPConnection.__init__(self, host, port, strict=strict,
timeout=timeout, **kw)
self.key_file = key_file
self.cert_file = cert_file
# Required property for Google AppEngine 1.9.0 which otherwise causes
# HTTPS requests to go out as HTTP. (See Issue #356)
self._protocol = 'https'
def connect(self):
conn = self._new_conn()
self._prepare_conn(conn)
self.sock = ssl.wrap_socket(conn, self.key_file, self.cert_file)
class VerifiedHTTPSConnection(HTTPSConnection):
"""
Based on httplib.HTTPSConnection but wraps the socket with
SSL certification.
"""
cert_reqs = None
ca_certs = None
ssl_version = None
assert_fingerprint = None
def set_cert(self, key_file=None, cert_file=None,
cert_reqs=None, ca_certs=None,
assert_hostname=None, assert_fingerprint=None):
self.key_file = key_file
self.cert_file = cert_file
self.cert_reqs = cert_reqs
self.ca_certs = ca_certs
self.assert_hostname = assert_hostname
self.assert_fingerprint = assert_fingerprint
def connect(self):
# Add certificate verification
conn = self._new_conn()
resolved_cert_reqs = resolve_cert_reqs(self.cert_reqs)
resolved_ssl_version = resolve_ssl_version(self.ssl_version)
hostname = self.host
if getattr(self, '_tunnel_host', None):
# _tunnel_host was added in Python 2.6.3
# (See: http://hg.python.org/cpython/rev/0f57b30a152f)
self.sock = conn
# Calls self._set_hostport(), so self.host is
# self._tunnel_host below.
self._tunnel()
# Mark this connection as not reusable
self.auto_open = 0
# Override the host with the one we're requesting data from.
hostname = self._tunnel_host
is_time_off = datetime.date.today() < RECENT_DATE
if is_time_off:
warnings.warn((
'System time is way off (before {0}). This will probably '
'lead to SSL verification errors').format(RECENT_DATE),
SystemTimeWarning
)
# Wrap socket using verification with the root certs in
# trusted_root_certs
self.sock = ssl_wrap_socket(conn, self.key_file, self.cert_file,
cert_reqs=resolved_cert_reqs,
ca_certs=self.ca_certs,
server_hostname=hostname,
ssl_version=resolved_ssl_version)
if self.assert_fingerprint:
assert_fingerprint(self.sock.getpeercert(binary_form=True),
self.assert_fingerprint)
elif resolved_cert_reqs != ssl.CERT_NONE \
and self.assert_hostname is not False:
cert = self.sock.getpeercert()
if not cert.get('subjectAltName', ()):
warnings.warn((
'Certificate has no `subjectAltName`, falling back to check for a `commonName` for now. '
'This feature is being removed by major browsers and deprecated by RFC 2818. '
'(See https://github.com/shazow/urllib3/issues/497 for details.)'),
SecurityWarning
)
match_hostname(cert, self.assert_hostname or hostname)
self.is_verified = (resolved_cert_reqs == ssl.CERT_REQUIRED
or self.assert_fingerprint is not None)
if ssl:
# Make a copy for testing.
UnverifiedHTTPSConnection = HTTPSConnection
HTTPSConnection = VerifiedHTTPSConnection
| apache-2.0 | -2,531,924,569,686,760,400 | 33.225191 | 109 | 0.615256 | false |
ansible/ansible | lib/ansible/module_utils/facts/hardware/hpux.py | 159 | 8351 | # This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import re
from ansible.module_utils.facts.hardware.base import Hardware, HardwareCollector
class HPUXHardware(Hardware):
"""
HP-UX-specific subclass of Hardware. Defines memory and CPU facts:
- memfree_mb
- memtotal_mb
- swapfree_mb
- swaptotal_mb
- processor
- processor_cores
- processor_count
- model
- firmware
"""
platform = 'HP-UX'
def populate(self, collected_facts=None):
hardware_facts = {}
cpu_facts = self.get_cpu_facts(collected_facts=collected_facts)
memory_facts = self.get_memory_facts()
hw_facts = self.get_hw_facts()
hardware_facts.update(cpu_facts)
hardware_facts.update(memory_facts)
hardware_facts.update(hw_facts)
return hardware_facts
def get_cpu_facts(self, collected_facts=None):
cpu_facts = {}
collected_facts = collected_facts or {}
if collected_facts.get('ansible_architecture') in ['9000/800', '9000/785']:
rc, out, err = self.module.run_command("ioscan -FkCprocessor | wc -l", use_unsafe_shell=True)
cpu_facts['processor_count'] = int(out.strip())
# Working with machinfo mess
elif collected_facts.get('ansible_architecture') == 'ia64':
if collected_facts.get('ansible_distribution_version') == "B.11.23":
rc, out, err = self.module.run_command("/usr/contrib/bin/machinfo | grep 'Number of CPUs'", use_unsafe_shell=True)
if out:
cpu_facts['processor_count'] = int(out.strip().split('=')[1])
rc, out, err = self.module.run_command("/usr/contrib/bin/machinfo | grep 'processor family'", use_unsafe_shell=True)
if out:
cpu_facts['processor'] = re.search('.*(Intel.*)', out).groups()[0].strip()
rc, out, err = self.module.run_command("ioscan -FkCprocessor | wc -l", use_unsafe_shell=True)
cpu_facts['processor_cores'] = int(out.strip())
if collected_facts.get('ansible_distribution_version') == "B.11.31":
# if machinfo return cores strings release B.11.31 > 1204
rc, out, err = self.module.run_command("/usr/contrib/bin/machinfo | grep core | wc -l", use_unsafe_shell=True)
if out.strip() == '0':
rc, out, err = self.module.run_command("/usr/contrib/bin/machinfo | grep Intel", use_unsafe_shell=True)
cpu_facts['processor_count'] = int(out.strip().split(" ")[0])
# If hyperthreading is active divide cores by 2
rc, out, err = self.module.run_command("/usr/sbin/psrset | grep LCPU", use_unsafe_shell=True)
data = re.sub(' +', ' ', out).strip().split(' ')
if len(data) == 1:
hyperthreading = 'OFF'
else:
hyperthreading = data[1]
rc, out, err = self.module.run_command("/usr/contrib/bin/machinfo | grep logical", use_unsafe_shell=True)
data = out.strip().split(" ")
if hyperthreading == 'ON':
cpu_facts['processor_cores'] = int(data[0]) / 2
else:
if len(data) == 1:
cpu_facts['processor_cores'] = cpu_facts['processor_count']
else:
cpu_facts['processor_cores'] = int(data[0])
rc, out, err = self.module.run_command("/usr/contrib/bin/machinfo | grep Intel |cut -d' ' -f4-", use_unsafe_shell=True)
cpu_facts['processor'] = out.strip()
else:
rc, out, err = self.module.run_command("/usr/contrib/bin/machinfo | egrep 'socket[s]?$' | tail -1", use_unsafe_shell=True)
cpu_facts['processor_count'] = int(out.strip().split(" ")[0])
rc, out, err = self.module.run_command("/usr/contrib/bin/machinfo | grep -e '[0-9] core' | tail -1", use_unsafe_shell=True)
cpu_facts['processor_cores'] = int(out.strip().split(" ")[0])
rc, out, err = self.module.run_command("/usr/contrib/bin/machinfo | grep Intel", use_unsafe_shell=True)
cpu_facts['processor'] = out.strip()
return cpu_facts
def get_memory_facts(self, collected_facts=None):
memory_facts = {}
collected_facts = collected_facts or {}
pagesize = 4096
rc, out, err = self.module.run_command("/usr/bin/vmstat | tail -1", use_unsafe_shell=True)
data = int(re.sub(' +', ' ', out).split(' ')[5].strip())
memory_facts['memfree_mb'] = pagesize * data // 1024 // 1024
if collected_facts.get('ansible_architecture') in ['9000/800', '9000/785']:
try:
rc, out, err = self.module.run_command("grep Physical /var/adm/syslog/syslog.log")
data = re.search('.*Physical: ([0-9]*) Kbytes.*', out).groups()[0].strip()
memory_facts['memtotal_mb'] = int(data) // 1024
except AttributeError:
# For systems where memory details aren't sent to syslog or the log has rotated, use parsed
# adb output. Unfortunately /dev/kmem doesn't have world-read, so this only works as root.
if os.access("/dev/kmem", os.R_OK):
rc, out, err = self.module.run_command("echo 'phys_mem_pages/D' | adb -k /stand/vmunix /dev/kmem | tail -1 | awk '{print $2}'",
use_unsafe_shell=True)
if not err:
data = out
memory_facts['memtotal_mb'] = int(data) / 256
else:
rc, out, err = self.module.run_command("/usr/contrib/bin/machinfo | grep Memory", use_unsafe_shell=True)
data = re.search(r'Memory[\ :=]*([0-9]*).*MB.*', out).groups()[0].strip()
memory_facts['memtotal_mb'] = int(data)
rc, out, err = self.module.run_command("/usr/sbin/swapinfo -m -d -f -q")
memory_facts['swaptotal_mb'] = int(out.strip())
rc, out, err = self.module.run_command("/usr/sbin/swapinfo -m -d -f | egrep '^dev|^fs'", use_unsafe_shell=True)
swap = 0
for line in out.strip().splitlines():
swap += int(re.sub(' +', ' ', line).split(' ')[3].strip())
memory_facts['swapfree_mb'] = swap
return memory_facts
def get_hw_facts(self, collected_facts=None):
hw_facts = {}
collected_facts = collected_facts or {}
rc, out, err = self.module.run_command("model")
hw_facts['model'] = out.strip()
if collected_facts.get('ansible_architecture') == 'ia64':
separator = ':'
if collected_facts.get('ansible_distribution_version') == "B.11.23":
separator = '='
rc, out, err = self.module.run_command("/usr/contrib/bin/machinfo |grep -i 'Firmware revision' | grep -v BMC", use_unsafe_shell=True)
hw_facts['firmware_version'] = out.split(separator)[1].strip()
rc, out, err = self.module.run_command("/usr/contrib/bin/machinfo |grep -i 'Machine serial number' ", use_unsafe_shell=True)
if rc == 0 and out:
hw_facts['product_serial'] = out.split(separator)[1].strip()
return hw_facts
class HPUXHardwareCollector(HardwareCollector):
_fact_class = HPUXHardware
_platform = 'HP-UX'
required_facts = set(['platform', 'distribution'])
| gpl-3.0 | -1,512,912,057,212,967,700 | 49.612121 | 147 | 0.569632 | false |
carlosdiazsuarez/movierecommender | MovieRecommender/connectors/imdb/imdb_connector.py | 1 | 2413 | '''
Google Movie Showtimes parser class for Python.
This script provides a Python class that can be used to parse Google Movie
Showtimes (www.google.com/movies) pages into dictionary objects.
@author Vaidik Kapoor
@version 0.1
'''
import httplib, urllib, BeautifulSoup, re
from copy import deepcopy
from BeautifulSoup import BeautifulSoup
'''
GoogleMovieShowtimes class
This class is used for getting response from www.google.com/movies
'''
class IMDB:
def __init__(self):
return
def getIDs(self, name):
self.params = {'q': name.encode("latin_1")}
params = deepcopy(self.params)
for key, val in params.iteritems():
if val == '':
self.params.pop(key)
params = urllib.urlencode(self.params)
conn = httplib.HTTPConnection('www.imdb.com')
conn.request("GET", "/find?" + params, "")
response = conn.getresponse()
self.response_code = response.status
self.response = response.getheaders
self.response_body = ""
while 1:
data = response.read()
if not data:
break
self.response_body += data
if (self.response_code == 200):
self.html = BeautifulSoup(self.response_body)
results = self.html.findAll('td', attrs={'class': 'result_text'})
self.ids = []
for td in results:
self.ids.append(td.a.attrs[0][1])
return self.ids
def getTitleExtra(self, title):
conn = httplib.HTTPConnection('www.imdb.com')
conn.request("GET", title, "")
response = conn.getresponse()
self.response_code = response.status
self.response = response.getheaders
self.response_body = ""
while 1:
data = response.read()
if not data:
break
self.response_body += data
if (self.response_code == 200):
self.html = BeautifulSoup(self.response_body)
results = self.html.findAll('span', attrs={'class': 'title-extra'})
self.titles = []
for span in results:
title = span.contents[0]
title = title.replace('\n', ' ')
title = title.replace('"', ' ')
title = title.strip()
self.titles.append(title)
return self.titles | gpl-2.0 | 5,144,483,261,318,524,000 | 27.4 | 75 | 0.566515 | false |
guewen/OpenUpgrade | addons/crm_project_issue/project_issue.py | 380 | 2373 |
from openerp.osv import osv, fields
class crm_lead_to_project_issue_wizard(osv.TransientModel):
""" wizard to convert a Lead into a Project Issue and move the Mail Thread """
_name = "crm.lead2projectissue.wizard"
_inherit = 'crm.partner.binding'
_columns = {
"lead_id": fields.many2one("crm.lead", "Lead", domain=[("type", "=", "lead")]),
"project_id": fields.many2one("project.project", "Project", domain=[("use_issues", "=", True)])
}
_defaults = {
"lead_id": lambda self, cr, uid, context=None: context.get('active_id')
}
def action_lead_to_project_issue(self, cr, uid, ids, context=None):
# get the wizards and models
wizards = self.browse(cr, uid, ids, context=context)
Lead = self.pool["crm.lead"]
Issue = self.pool["project.issue"]
for wizard in wizards:
# get the lead to transform
lead = wizard.lead_id
partner = self._find_matching_partner(cr, uid, context=context)
if not partner and (lead.partner_name or lead.contact_name):
partner_ids = Lead.handle_partner_assignation(cr, uid, [lead.id], context=context)
partner = partner_ids[lead.id]
# create new project.issue
vals = {
"name": lead.name,
"description": lead.description,
"email_from": lead.email_from,
"project_id": wizard.project_id.id,
"partner_id": partner,
"user_id": None
}
issue_id = Issue.create(cr, uid, vals, context=None)
# move the mail thread
Lead.message_change_thread(cr, uid, lead.id, issue_id, "project.issue", context=context)
# delete the lead
Lead.unlink(cr, uid, [lead.id], context=None)
# return the action to go to the form view of the new Issue
view_id = self.pool.get('ir.ui.view').search(cr, uid, [('model', '=', 'project.issue'), ('name', '=', 'project_issue_form_view')])
return {
'name': 'Issue created',
'view_type': 'form',
'view_mode': 'form',
'view_id': view_id,
'res_model': 'project.issue',
'type': 'ir.actions.act_window',
'res_id': issue_id,
'context': context
}
| agpl-3.0 | -7,116,647,927,106,207,000 | 39.220339 | 138 | 0.550358 | false |
gingerswede/ITSecCardGame | src/View/Game.py | 1 | 2031 | '''
IDE: Eclipse (PyDev)
Python version: 2.7
Operating system: Windows 8.1
@author: Emil Carlsson
@copyright: 2015 Emil Carlsson
@license: This program is distributed under the terms of the GNU General Public License
'''
from View import GlobalFunc
from View.Board import Board
class GameView(object):
__root = None
__controller = None
__boardView = None
def __init__(self, root, gameController, *args, **kwargs):
self.__root = root
self.__controller = gameController
def StartNewGame(self, player=None, opponent=None):
GlobalFunc.RemoveAllChildren(self.__root)
self.__boardView = Board(self.__root, self.__controller, player, opponent)
def OutOfMoves(self):
self.__boardView.AddInformation("You are out of moves.\nPlease finnish your turn.")
def ResetInformation(self):
self.__boardView.ResetInformation()
def MaxHandSize(self):
self.__boardView.AddInformation("Maximum hand size reached.\nPlease play a card if possible.")
def MaxVisibleHandSize(self):
self.__boardView.AddInformation("Maximum amount of visible cards reached.")
def RefreshBoard(self, playerOne, playerTwo):
self.__boardView.RefreshBoard(playerOne, playerTwo)
def RemoveFrame(self, frame):
frame.destroy()
def PlayerLost(self):
self.__boardView.AddInformation("You lost!\nGame Over!")
def PlayerWon(self):
self.__boardView.AddInformation("You won!\nGame Over!")
def OutOfCards(self):
self.__boardView.AddInformation("You are out of cards in your deck.")
def CardNotInHand(self):
self.__boardView.AddInformation("Card not on your hand.")
#TODO: Does not show
def WaitingForOpponent(self):
self.__boardView.AddInformation("Waiting for opponent...")
def AppendMessage(self, message):
self.__boardView.AppendInformation(message) | gpl-3.0 | 7,349,038,575,419,678,000 | 31.253968 | 102 | 0.64451 | false |
jimarnold/gomatic | gomatic/go_cd_configurator.py | 1 | 10148 | #!/usr/bin/env python
import json
import time
import xml.etree.ElementTree as ET
import argparse
import sys
import subprocess
import requests
from decimal import Decimal
from gomatic.gocd.pipelines import Pipeline, PipelineGroup
from gomatic.gocd.agents import Agent
from gomatic.xml_operations import Ensurance, PossiblyMissingElement, move_all_to_end, prettify
class GoCdConfigurator(object):
def __init__(self, host_rest_client):
self.__host_rest_client = host_rest_client
self.__set_initial_config_xml()
def __set_initial_config_xml(self):
self.__initial_config, self._initial_md5 = self.__current_config_response()
self.__xml_root = ET.fromstring(self.__initial_config)
def __repr__(self):
return "GoCdConfigurator(%s)" % self.__host_rest_client
def as_python(self, pipeline, with_save=True):
result = "#!/usr/bin/env python\nfrom gomatic import *\n\nconfigurator = " + str(self) + "\n"
result += "pipeline = configurator"
result += pipeline.as_python_commands_applied_to_server()
save_part = ""
if with_save:
save_part = "\n\nconfigurator.save_updated_config(save_config_locally=True, dry_run=True)"
return result + save_part
@property
def current_config(self):
return self.__current_config_response()[0]
def __current_config_response(self):
config_url = "/go/admin/restful/configuration/file/GET/xml"
response = self.__host_rest_client.get(config_url)
if response.status_code != 200:
raise Exception("Failed to get {} status {}\n:{}".format(config_url, response.status_code, response.text))
return response.text, response.headers['x-cruise-config-md5']
def reorder_elements_to_please_go(self):
move_all_to_end(self.__xml_root, 'pipelines')
move_all_to_end(self.__xml_root, 'templates')
move_all_to_end(self.__xml_root, 'environments')
move_all_to_end(self.__xml_root, 'agents')
for pipeline in self.pipelines:
pipeline.reorder_elements_to_please_go()
for template in self.templates:
template.reorder_elements_to_please_go()
@property
def config(self):
self.reorder_elements_to_please_go()
return ET.tostring(self.__xml_root, 'utf-8')
@property
def artifacts_dir(self):
return self.__possibly_missing_server_element().attribute('artifactsdir')
@artifacts_dir.setter
def artifacts_dir(self, artifacts_dir):
self.__server_element_ensurance().set('artifactsdir', artifacts_dir)
@property
def site_url(self):
return self.__possibly_missing_server_element().attribute('siteUrl')
@site_url.setter
def site_url(self, site_url):
self.__server_element_ensurance().set('siteUrl', site_url)
@property
def agent_auto_register_key(self):
return self.__possibly_missing_server_element().attribute('agentAutoRegisterKey')
@agent_auto_register_key.setter
def agent_auto_register_key(self, agent_auto_register_key):
self.__server_element_ensurance().set('agentAutoRegisterKey', agent_auto_register_key)
@property
def purge_start(self):
return self.__server_decimal_attribute('purgeStart')
@purge_start.setter
def purge_start(self, purge_start_decimal):
assert isinstance(purge_start_decimal, Decimal)
self.__server_element_ensurance().set('purgeStart', str(purge_start_decimal))
@property
def purge_upto(self):
return self.__server_decimal_attribute('purgeUpto')
@purge_upto.setter
def purge_upto(self, purge_upto_decimal):
assert isinstance(purge_upto_decimal, Decimal)
self.__server_element_ensurance().set('purgeUpto', str(purge_upto_decimal))
def __server_decimal_attribute(self, attribute_name):
attribute = self.__possibly_missing_server_element().attribute(attribute_name)
return Decimal(attribute) if attribute else None
def __possibly_missing_server_element(self):
return PossiblyMissingElement(self.__xml_root).possibly_missing_child('server')
def __server_element_ensurance(self):
return Ensurance(self.__xml_root).ensure_child('server')
@property
def pipeline_groups(self):
return [PipelineGroup(e, self) for e in self.__xml_root.findall('pipelines')]
def ensure_pipeline_group(self, group_name):
pipeline_group_element = Ensurance(self.__xml_root).ensure_child_with_attribute("pipelines", "group", group_name)
return PipelineGroup(pipeline_group_element.element, self)
def ensure_removal_of_pipeline_group(self, group_name):
matching = [g for g in self.pipeline_groups if g.name == group_name]
for group in matching:
self.__xml_root.remove(group.element)
return self
def remove_all_pipeline_groups(self):
for e in self.__xml_root.findall('pipelines'):
self.__xml_root.remove(e)
return self
@property
def agents(self):
return [Agent(e) for e in PossiblyMissingElement(self.__xml_root).possibly_missing_child('agents').findall('agent')]
def ensure_removal_of_agent(self, hostname):
matching = [agent for agent in self.agents if agent.hostname == hostname]
for agent in matching:
Ensurance(self.__xml_root).ensure_child('agents').element.remove(agent._element)
return self
@property
def pipelines(self):
result = []
groups = self.pipeline_groups
for group in groups:
result.extend(group.pipelines)
return result
@property
def templates(self):
return [Pipeline(e, 'templates') for e in PossiblyMissingElement(self.__xml_root).possibly_missing_child('templates').findall('pipeline')]
def ensure_template(self, template_name):
pipeline_element = Ensurance(self.__xml_root).ensure_child('templates').ensure_child_with_attribute('pipeline', 'name', template_name).element
return Pipeline(pipeline_element, 'templates')
def ensure_replacement_of_template(self, template_name):
template = self.ensure_template(template_name)
template.make_empty()
return template
def ensure_removal_of_template(self, template_name):
matching = [template for template in self.templates if template.name == template_name]
root = Ensurance(self.__xml_root)
templates_element = root.ensure_child('templates').element
for template in matching:
templates_element.remove(template.element)
if len(self.templates) == 0:
root.element.remove(templates_element)
return self
@property
def git_urls(self):
return [pipeline.git_url for pipeline in self.pipelines if pipeline.has_single_git_material]
@property
def has_changes(self):
return prettify(self.__initial_config) != prettify(self.config)
def save_updated_config(self, save_config_locally=False, dry_run=False):
config_before = prettify(self.__initial_config)
config_after = prettify(self.config)
if save_config_locally:
open('config-before.xml', 'w').write(config_before.encode('utf-8'))
open('config-after.xml', 'w').write(config_after.encode('utf-8'))
def has_kdiff3():
try:
return subprocess.call(["kdiff3", "-version"]) == 0
except:
return False
if dry_run and config_before != config_after and has_kdiff3():
subprocess.call(["kdiff3", "config-before.xml", "config-after.xml"])
data = {
'xmlFile': self.config,
'md5': self._initial_md5
}
if not dry_run and config_before != config_after:
self.__host_rest_client.post('/go/admin/restful/configuration/file/POST/xml', data)
self.__set_initial_config_xml()
class HostRestClient(object):
def __init__(self, host):
self.__host = host
def __repr__(self):
return 'HostRestClient("%s")' % self.__host
def __path(self, path):
return ('http://%s' % self.__host) + path
def get(self, path):
result = requests.get(self.__path(path))
count = 0
while ((result.status_code == 503) or (result.status_code == 504)) and (count < 5):
result = requests.get(self.__path(path))
time.sleep(1)
count += 1
return result
def post(self, path, data):
url = self.__path(path)
result = requests.post(url, data)
if result.status_code != 200:
try:
result_json = json.loads(result.text.replace("\\'", "'"))
message = result_json.get('result', result.text)
raise RuntimeError("Could not post config to Go server (%s) [status code=%s]:\n%s" % (url, result.status_code, message))
except ValueError:
raise RuntimeError("Could not post config to Go server (%s) [status code=%s] (and result was not json):\n%s" % (url, result.status_code, result))
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Gomatic is an API for configuring GoCD. '
'Run python -m gomatic.go_cd_configurator to reverse engineer code to configure an existing pipeline.')
parser.add_argument('-s', '--server', help='the go server (e.g. "localhost:8153" or "my.gocd.com")')
parser.add_argument('-p', '--pipeline', help='the name of the pipeline to reverse-engineer the config for')
args = parser.parse_args()
if len(sys.argv) == 1:
parser.print_help()
sys.exit(1)
go_server = GoCdConfigurator(HostRestClient(args.server))
matching_pipelines = [p for p in go_server.pipelines if p.name == args.pipeline]
if len(matching_pipelines) != 1:
raise RuntimeError("Should have found one matching pipeline but found %s" % matching_pipelines)
pipeline = matching_pipelines[0]
print(go_server.as_python(pipeline))
| mit | 6,057,670,478,306,261,000 | 37.732824 | 161 | 0.640323 | false |
hynnet/hiwifi-openwrt-HC5661-HC5761 | staging_dir/host/lib/python2.7/test/test_base64.py | 113 | 8195 | import unittest
from test import test_support
import base64
class LegacyBase64TestCase(unittest.TestCase):
def test_encodestring(self):
eq = self.assertEqual
eq(base64.encodestring("www.python.org"), "d3d3LnB5dGhvbi5vcmc=\n")
eq(base64.encodestring("a"), "YQ==\n")
eq(base64.encodestring("ab"), "YWI=\n")
eq(base64.encodestring("abc"), "YWJj\n")
eq(base64.encodestring(""), "")
eq(base64.encodestring("abcdefghijklmnopqrstuvwxyz"
"ABCDEFGHIJKLMNOPQRSTUVWXYZ"
"0123456789!@#0^&*();:<>,. []{}"),
"YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE"
"RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0\nNT"
"Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ==\n")
def test_decodestring(self):
eq = self.assertEqual
eq(base64.decodestring("d3d3LnB5dGhvbi5vcmc=\n"), "www.python.org")
eq(base64.decodestring("YQ==\n"), "a")
eq(base64.decodestring("YWI=\n"), "ab")
eq(base64.decodestring("YWJj\n"), "abc")
eq(base64.decodestring("YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE"
"RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0\nNT"
"Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ==\n"),
"abcdefghijklmnopqrstuvwxyz"
"ABCDEFGHIJKLMNOPQRSTUVWXYZ"
"0123456789!@#0^&*();:<>,. []{}")
eq(base64.decodestring(''), '')
def test_encode(self):
eq = self.assertEqual
from cStringIO import StringIO
infp = StringIO('abcdefghijklmnopqrstuvwxyz'
'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
'0123456789!@#0^&*();:<>,. []{}')
outfp = StringIO()
base64.encode(infp, outfp)
eq(outfp.getvalue(),
'YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE'
'RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0\nNT'
'Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ==\n')
def test_decode(self):
from cStringIO import StringIO
infp = StringIO('d3d3LnB5dGhvbi5vcmc=')
outfp = StringIO()
base64.decode(infp, outfp)
self.assertEqual(outfp.getvalue(), 'www.python.org')
class BaseXYTestCase(unittest.TestCase):
def test_b64encode(self):
eq = self.assertEqual
# Test default alphabet
eq(base64.b64encode("www.python.org"), "d3d3LnB5dGhvbi5vcmc=")
eq(base64.b64encode('\x00'), 'AA==')
eq(base64.b64encode("a"), "YQ==")
eq(base64.b64encode("ab"), "YWI=")
eq(base64.b64encode("abc"), "YWJj")
eq(base64.b64encode(""), "")
eq(base64.b64encode("abcdefghijklmnopqrstuvwxyz"
"ABCDEFGHIJKLMNOPQRSTUVWXYZ"
"0123456789!@#0^&*();:<>,. []{}"),
"YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE"
"RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0NT"
"Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ==")
# Test with arbitrary alternative characters
eq(base64.b64encode('\xd3V\xbeo\xf7\x1d', altchars='*$'), '01a*b$cd')
# Test standard alphabet
eq(base64.standard_b64encode("www.python.org"), "d3d3LnB5dGhvbi5vcmc=")
eq(base64.standard_b64encode("a"), "YQ==")
eq(base64.standard_b64encode("ab"), "YWI=")
eq(base64.standard_b64encode("abc"), "YWJj")
eq(base64.standard_b64encode(""), "")
eq(base64.standard_b64encode("abcdefghijklmnopqrstuvwxyz"
"ABCDEFGHIJKLMNOPQRSTUVWXYZ"
"0123456789!@#0^&*();:<>,. []{}"),
"YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE"
"RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0NT"
"Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ==")
# Test with 'URL safe' alternative characters
eq(base64.urlsafe_b64encode('\xd3V\xbeo\xf7\x1d'), '01a-b_cd')
def test_b64decode(self):
eq = self.assertEqual
eq(base64.b64decode("d3d3LnB5dGhvbi5vcmc="), "www.python.org")
eq(base64.b64decode('AA=='), '\x00')
eq(base64.b64decode("YQ=="), "a")
eq(base64.b64decode("YWI="), "ab")
eq(base64.b64decode("YWJj"), "abc")
eq(base64.b64decode("YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE"
"RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0\nNT"
"Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ=="),
"abcdefghijklmnopqrstuvwxyz"
"ABCDEFGHIJKLMNOPQRSTUVWXYZ"
"0123456789!@#0^&*();:<>,. []{}")
eq(base64.b64decode(''), '')
# Test with arbitrary alternative characters
eq(base64.b64decode('01a*b$cd', altchars='*$'), '\xd3V\xbeo\xf7\x1d')
# Test standard alphabet
eq(base64.standard_b64decode("d3d3LnB5dGhvbi5vcmc="), "www.python.org")
eq(base64.standard_b64decode("YQ=="), "a")
eq(base64.standard_b64decode("YWI="), "ab")
eq(base64.standard_b64decode("YWJj"), "abc")
eq(base64.standard_b64decode(""), "")
eq(base64.standard_b64decode("YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE"
"RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0NT"
"Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ=="),
"abcdefghijklmnopqrstuvwxyz"
"ABCDEFGHIJKLMNOPQRSTUVWXYZ"
"0123456789!@#0^&*();:<>,. []{}")
# Test with 'URL safe' alternative characters
eq(base64.urlsafe_b64decode('01a-b_cd'), '\xd3V\xbeo\xf7\x1d')
def test_b64decode_error(self):
self.assertRaises(TypeError, base64.b64decode, 'abc')
def test_b32encode(self):
eq = self.assertEqual
eq(base64.b32encode(''), '')
eq(base64.b32encode('\x00'), 'AA======')
eq(base64.b32encode('a'), 'ME======')
eq(base64.b32encode('ab'), 'MFRA====')
eq(base64.b32encode('abc'), 'MFRGG===')
eq(base64.b32encode('abcd'), 'MFRGGZA=')
eq(base64.b32encode('abcde'), 'MFRGGZDF')
def test_b32decode(self):
eq = self.assertEqual
eq(base64.b32decode(''), '')
eq(base64.b32decode('AA======'), '\x00')
eq(base64.b32decode('ME======'), 'a')
eq(base64.b32decode('MFRA===='), 'ab')
eq(base64.b32decode('MFRGG==='), 'abc')
eq(base64.b32decode('MFRGGZA='), 'abcd')
eq(base64.b32decode('MFRGGZDF'), 'abcde')
def test_b32decode_casefold(self):
eq = self.assertEqual
eq(base64.b32decode('', True), '')
eq(base64.b32decode('ME======', True), 'a')
eq(base64.b32decode('MFRA====', True), 'ab')
eq(base64.b32decode('MFRGG===', True), 'abc')
eq(base64.b32decode('MFRGGZA=', True), 'abcd')
eq(base64.b32decode('MFRGGZDF', True), 'abcde')
# Lower cases
eq(base64.b32decode('me======', True), 'a')
eq(base64.b32decode('mfra====', True), 'ab')
eq(base64.b32decode('mfrgg===', True), 'abc')
eq(base64.b32decode('mfrggza=', True), 'abcd')
eq(base64.b32decode('mfrggzdf', True), 'abcde')
# Expected exceptions
self.assertRaises(TypeError, base64.b32decode, 'me======')
# Mapping zero and one
eq(base64.b32decode('MLO23456'), 'b\xdd\xad\xf3\xbe')
eq(base64.b32decode('M1023456', map01='L'), 'b\xdd\xad\xf3\xbe')
eq(base64.b32decode('M1023456', map01='I'), 'b\x1d\xad\xf3\xbe')
def test_b32decode_error(self):
self.assertRaises(TypeError, base64.b32decode, 'abc')
self.assertRaises(TypeError, base64.b32decode, 'ABCDEF==')
def test_b16encode(self):
eq = self.assertEqual
eq(base64.b16encode('\x01\x02\xab\xcd\xef'), '0102ABCDEF')
eq(base64.b16encode('\x00'), '00')
def test_b16decode(self):
eq = self.assertEqual
eq(base64.b16decode('0102ABCDEF'), '\x01\x02\xab\xcd\xef')
eq(base64.b16decode('00'), '\x00')
# Lower case is not allowed without a flag
self.assertRaises(TypeError, base64.b16decode, '0102abcdef')
# Case fold
eq(base64.b16decode('0102abcdef', True), '\x01\x02\xab\xcd\xef')
def test_main():
test_support.run_unittest(__name__)
if __name__ == '__main__':
test_main()
| gpl-2.0 | 7,345,585,698,987,152,000 | 41.46114 | 79 | 0.582184 | false |
uzh/msregistry | app/main/views.py | 1 | 1040 | # Copyright (C) 2016 University of Zurich. All rights reserved.
#
# This file is part of MSRegistry Backend.
#
# MSRegistry Backend is free software: you can redistribute it and/or
# modify it under the terms of the version 3 of the GNU Affero General
# Public License as published by the Free Software Foundation, or any
# other later version.
#
# MSRegistry Backend is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the version
# 3 of the GNU Affero General Public License for more details.
#
# You should have received a copy of the version 3 of the GNU Affero
# General Public License along with MSRegistry Backend. If not, see
# <http://www.gnu.org/licenses/>.
__author__ = "Filippo Panessa <[email protected]>"
__copyright__ = ("Copyright (c) 2016 S3IT, Zentrale Informatik,"
" University of Zurich")
from . import main
@main.route('/', methods=['GET', 'POST'])
def index():
return '', 200
| agpl-3.0 | -3,505,700,021,584,941,600 | 33.666667 | 71 | 0.736538 | false |
suyashphadtare/sajil-final-erp | erpnext/erpnext/projects/utils.py | 37 | 1205 | # Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
@frappe.whitelist()
def get_time_log_list(doctype, txt, searchfield, start, page_len, filters):
return frappe.db.get_values("Time Log", filters, ["name", "activity_type", "owner"])
@frappe.whitelist()
def query_task(doctype, txt, searchfield, start, page_len, filters):
from frappe.widgets.reportview import build_match_conditions
search_string = "%%%s%%" % txt
order_by_string = "%s%%" % txt
match_conditions = build_match_conditions("Task")
match_conditions = ("and" + match_conditions) if match_conditions else ""
return frappe.db.sql("""select name, subject from `tabTask`
where (`%s` like %s or `subject` like %s) %s
order by
case when `subject` like %s then 0 else 1 end,
case when `%s` like %s then 0 else 1 end,
`%s`,
subject
limit %s, %s""" %
(searchfield, "%s", "%s", match_conditions, "%s",
searchfield, "%s", searchfield, "%s", "%s"),
(search_string, search_string, order_by_string, order_by_string, start, page_len)) | agpl-3.0 | -611,594,828,541,197,200 | 36.6875 | 85 | 0.687137 | false |
romain-li/edx-platform | lms/envs/test_static_optimized.py | 26 | 2169 | """
Settings used when generating static assets for use in tests.
For example, Bok Choy uses two different settings files:
1. test_static_optimized is used when invoking collectstatic
2. bok_choy is used when running CMS and LMS
Note: it isn't possible to have a single settings file, because Django doesn't
support both generating static assets to a directory and also serving static
from the same directory.
"""
# Start with the common settings
from .common import * # pylint: disable=wildcard-import, unused-wildcard-import
# Use an in-memory database since this settings file is only used for updating assets
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'ATOMIC_REQUESTS': True,
},
'student_module_history': {
'ENGINE': 'django.db.backends.sqlite3',
},
}
# Provide a dummy XQUEUE_INTERFACE setting as LMS expects it to exist on start up
XQUEUE_INTERFACE = {
"url": "https://sandbox-xqueue.edx.org",
"django_auth": {
"username": "lms",
"password": "***REMOVED***"
},
"basic_auth": ('anant', 'agarwal'),
}
######################### PIPELINE ####################################
# Use RequireJS optimized storage
STATICFILES_STORAGE = 'openedx.core.lib.django_require.staticstorage.OptimizedCachedRequireJsStorage'
# Revert to the default set of finders as we don't want to dynamically pick up files from the pipeline
STATICFILES_FINDERS = [
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
'openedx.core.lib.xblock_pipeline.finder.XBlockPipelineFinder',
]
# Redirect to the test_root folder within the repo
TEST_ROOT = REPO_ROOT / "test_root"
LOG_DIR = (TEST_ROOT / "log").abspath()
# Store the static files under test root so that they don't overwrite existing static assets
STATIC_ROOT = (TEST_ROOT / "staticfiles" / "lms").abspath()
# Disable uglify when tests are running (used by build.js).
# 1. Uglify is by far the slowest part of the build process
# 2. Having full source code makes debugging tests easier for developers
os.environ['REQUIRE_BUILD_PROFILE_OPTIMIZE'] = 'none'
| agpl-3.0 | 7,274,013,550,475,303,000 | 35.15 | 102 | 0.705394 | false |
thaumos/ansible | lib/ansible/modules/cloud/azure/azure_rm_hdinsightcluster.py | 12 | 20099 | #!/usr/bin/python
#
# Copyright (c) 2019 Zim Kalinowski, (@zikalino)
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: azure_rm_hdinsightcluster
version_added: "2.8"
short_description: Manage Azure HDInsight Cluster instance.
description:
- Create, update and delete instance of Azure HDInsight Cluster.
options:
resource_group:
description:
- The name of the resource group.
required: True
name:
description:
- The name of the cluster.
required: True
location:
description:
- Resource location. If not set, location from the resource group will be used as default.
cluster_version:
description:
- The version of the cluster. For example I(3.6)
os_type:
description:
- The type of operating system.
choices:
- 'linux'
tier:
description:
- The cluster tier.
choices:
- 'standard'
- 'premium'
cluster_definition:
description:
- The cluster definition.
suboptions:
kind:
description:
- The type of cluster.
choices:
- hadoop
- spark
- hbase
- storm
gateway_rest_username:
description:
- Gateway REST user name.
gateway_rest_password:
description:
- Gateway REST password.
compute_profile_roles:
description:
- The list of roles in the cluster.
type: list
suboptions:
name:
description:
- The name of the role.
choices:
- 'headnode'
- 'workernode'
- 'zookepernode'
min_instance_count:
description:
- The minimum instance count of the cluster.
target_instance_count:
description:
- The instance count of the cluster.
vm_size:
description:
- The size of the VM
linux_profile:
description:
- The Linux OS profile.
suboptions:
username:
description:
- User name
password:
description:
- Password
storage_accounts:
description:
- The list of storage accounts in the cluster.
type: list
suboptions:
name:
description:
- Blob storage endpoint.
is_default:
description:
- Whether or not the storage account is the default storage account.
container:
description:
- The container in the storage account.
key:
description:
- The storage account access key.
state:
description:
- Assert the state of the cluster.
- Use C(present) to create or update a cluster and C(absent) to delete it.
default: present
choices:
- absent
- present
extends_documentation_fragment:
- azure
- azure_tags
author:
- "Zim Kalinowski (@zikalino)"
'''
EXAMPLES = '''
- name: Create instance of HDInsight Cluster
azure_rm_hdinsightcluster:
resource_group: myResourceGroup
name: myCluster
location: eastus2
cluster_version: 3.6
os_type: linux
tier: standard
cluster_definition:
kind: spark
gateway_rest_username: http-user
gateway_rest_password: MuABCPassword!!@123
storage_accounts:
- name: myStorageAccount.blob.core.windows.net
is_default: yes
container: myContainer
key: GExmaxH4lDNdHA9nwAsCt8t4AOQas2y9vXQP1kKALTram7Q3/5xLVIab3+nYG1x63Xyak9/VXxQyNBHA9pDWw==
compute_profile_roles:
- name: headnode
target_instance_count: 2
hardware_profile:
vm_size: Standard_D3
linux_profile:
username: sshuser
password: MuABCPassword!!@123
- name: workernode
target_instance_count: 2
vm_size: Standard_D3
linux_profile:
username: sshuser
password: MuABCPassword!!@123
'''
RETURN = '''
id:
description:
- Fully qualified resource id of the cluster.
returned: always
type: str
sample: /subscriptions/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/resourceGroups/myResourceGroup/providers/Microsoft.HDInsight/clusters/myCluster
'''
import time
from ansible.module_utils.azure_rm_common import AzureRMModuleBase
try:
from msrestazure.azure_exceptions import CloudError
from msrest.polling import LROPoller
from msrestazure.azure_operation import AzureOperationPoller
from azure.mgmt.hdinsight import HDInsightManagementClient
from msrest.serialization import Model
except ImportError:
# This is handled in azure_rm_common
pass
class Actions:
NoAction, Create, Update, Delete = range(4)
class AzureRMClusters(AzureRMModuleBase):
"""Configuration class for an Azure RM Cluster resource"""
def __init__(self):
self.module_arg_spec = dict(
resource_group=dict(
type='str',
required=True
),
name=dict(
type='str',
required=True
),
location=dict(
type='str'
),
cluster_version=dict(
type='str'
),
os_type=dict(
type='str',
choices=['linux']
),
tier=dict(
type='str',
choices=['standard',
'premium']
),
cluster_definition=dict(
type='dict'
),
compute_profile_roles=dict(
type='list'
),
storage_accounts=dict(
type='list'
),
state=dict(
type='str',
default='present',
choices=['present', 'absent']
)
)
self.resource_group = None
self.name = None
self.parameters = dict()
self.results = dict(changed=False)
self.mgmt_client = None
self.state = None
self.to_do = Actions.NoAction
self.tags_changed = False
self.new_instance_count = None
super(AzureRMClusters, self).__init__(derived_arg_spec=self.module_arg_spec,
supports_check_mode=True,
supports_tags=True)
def exec_module(self, **kwargs):
"""Main module execution method"""
for key in list(self.module_arg_spec.keys()) + ['tags']:
if hasattr(self, key):
setattr(self, key, kwargs[key])
elif kwargs[key] is not None:
self.parameters[key] = kwargs[key]
dict_expand(self.parameters, ['cluster_version'], 'properties')
dict_camelize(self.parameters, ['os_type'], True)
dict_expand(self.parameters, ['os_type'], 'properties')
dict_camelize(self.parameters, ['tier'], True)
dict_expand(self.parameters, ['tier'], 'properties')
dict_rename(self.parameters, ['cluster_definition', 'gateway_rest_username'], 'restAuthCredential.username')
dict_rename(self.parameters, ['cluster_definition', 'gateway_rest_password'], 'restAuthCredential.password')
dict_expand(self.parameters, ['cluster_definition', 'restAuthCredential.username'], 'gateway')
dict_expand(self.parameters, ['cluster_definition', 'restAuthCredential.password'], 'gateway')
dict_expand(self.parameters, ['cluster_definition', 'gateway'], 'configurations')
dict_expand(self.parameters, ['cluster_definition'], 'properties')
dict_expand(self.parameters, ['compute_profile_roles', 'vm_size'], 'hardware_profile')
dict_rename(self.parameters, ['compute_profile_roles', 'linux_profile'], 'linux_operating_system_profile')
dict_expand(self.parameters, ['compute_profile_roles', 'linux_operating_system_profile'], 'os_profile')
dict_rename(self.parameters, ['compute_profile_roles'], 'roles')
dict_expand(self.parameters, ['roles'], 'compute_profile')
dict_expand(self.parameters, ['compute_profile'], 'properties')
dict_rename(self.parameters, ['storage_accounts'], 'storageaccounts')
dict_expand(self.parameters, ['storageaccounts'], 'storage_profile')
dict_expand(self.parameters, ['storage_profile'], 'properties')
response = None
self.mgmt_client = self.get_mgmt_svc_client(HDInsightManagementClient,
base_url=self._cloud_environment.endpoints.resource_manager)
resource_group = self.get_resource_group(self.resource_group)
if "location" not in self.parameters:
self.parameters["location"] = resource_group.location
old_response = self.get_cluster()
if not old_response:
self.log("Cluster instance doesn't exist")
if self.state == 'absent':
self.log("Old instance didn't exist")
else:
self.to_do = Actions.Create
else:
self.log("Cluster instance already exists")
if self.state == 'absent':
self.to_do = Actions.Delete
elif self.state == 'present':
compare_result = {}
if (not default_compare(self.parameters, old_response, '', compare_result)):
if compare_result.pop('/properties/compute_profile/roles/*/target_instance_count', False):
# check if it's workernode
new_count = 0
old_count = 0
for role in self.parameters['properties']['compute_profile']['roles']:
if role['name'] == 'workernode':
new_count = role['target_instance_count']
for role in old_response['properties']['compute_profile']['roles']:
if role['name'] == 'workernode':
old_count = role['target_instance_count']
if old_count != new_count:
self.new_instance_count = new_count
self.to_do = Actions.Update
if compare_result.pop('/tags', False):
self.to_do = Actions.Update
self.tags_changed = True
if compare_result:
for k in compare_result.keys():
self.module.warn("property '" + k + "' cannot be updated (" + compare_result[k] + ")")
self.module.warn("only tags and target_instance_count can be updated")
if (self.to_do == Actions.Create) or (self.to_do == Actions.Update):
self.log("Need to Create / Update the Cluster instance")
self.results['changed'] = True
if self.check_mode:
return self.results
response = self.create_update_cluster()
self.log("Creation / Update done")
elif self.to_do == Actions.Delete:
self.log("Cluster instance deleted")
self.results['changed'] = True
if self.check_mode:
return self.results
self.delete_cluster()
else:
self.log("Cluster instance unchanged")
self.results['changed'] = False
response = old_response
if self.state == 'present':
self.results.update(self.format_item(response))
return self.results
def create_update_cluster(self):
'''
Creates or updates Cluster with the specified configuration.
:return: deserialized Cluster instance state dictionary
'''
self.log("Creating / Updating the Cluster instance {0}".format(self.name))
try:
if self.to_do == Actions.Create:
response = self.mgmt_client.clusters.create(resource_group_name=self.resource_group,
cluster_name=self.name,
parameters=self.parameters)
if isinstance(response, LROPoller) or isinstance(response, AzureOperationPoller):
response = self.get_poller_result(response)
else:
if self.tags_changed:
response = self.mgmt_client.clusters.update(resource_group_name=self.resource_group,
cluster_name=self.name,
tags=self.parameters.get('tags'))
if isinstance(response, LROPoller) or isinstance(response, AzureOperationPoller):
response = self.get_poller_result(response)
if self.new_instance_count:
response = self.mgmt_client.clusters.resize(resource_group_name=self.resource_group,
cluster_name=self.name,
target_instance_count=self.new_instance_count)
if isinstance(response, LROPoller) or isinstance(response, AzureOperationPoller):
response = self.get_poller_result(response)
except CloudError as exc:
self.fail("Error creating or updating Cluster instance: {0}".format(str(exc)))
return response.as_dict() if response else {}
def delete_cluster(self):
'''
Deletes specified Cluster instance in the specified subscription and resource group.
:return: True
'''
self.log("Deleting the Cluster instance {0}".format(self.name))
try:
response = self.mgmt_client.clusters.delete(resource_group_name=self.resource_group,
cluster_name=self.name)
except CloudError as e:
self.fail("Error deleting the Cluster instance: {0}".format(str(e)))
return True
def get_cluster(self):
'''
Gets the properties of the specified Cluster.
:return: deserialized Cluster instance state dictionary
'''
self.log("Checking if the Cluster instance {0} is present".format(self.name))
found = False
try:
response = self.mgmt_client.clusters.get(resource_group_name=self.resource_group,
cluster_name=self.name)
found = True
self.log("Response : {0}".format(response))
self.log("Cluster instance : {0} found".format(response.name))
except Exception as e:
self.log('Did not find the Cluster instance.')
if found is True:
return response.as_dict()
return False
def format_item(self, d):
d = {
'id': d.get('id', None)
}
return d
def default_compare(new, old, path, result):
if new is None:
match = True
elif isinstance(new, dict):
match = True
if not isinstance(old, dict):
result[path] = 'old dict is null'
match = False
else:
for k in new.keys():
if not default_compare(new.get(k), old.get(k, None), path + '/' + k, result):
match = False
elif isinstance(new, list):
if not isinstance(old, list) or len(new) != len(old):
result[path] = 'length is different or null'
match = False
elif len(old) == 0:
match = True
else:
match = True
if isinstance(old[0], dict):
key = None
if 'id' in old[0] and 'id' in new[0]:
key = 'id'
elif 'name' in old[0] and 'name' in new[0]:
key = 'name'
else:
key = list(old[0])[0]
new = sorted(new, key=lambda x: x.get(key, ''))
old = sorted(old, key=lambda x: x.get(key, ''))
else:
new = sorted(new)
old = sorted(old)
for i in range(len(new)):
if not default_compare(new[i], old[i], path + '/*', result):
match = False
return match
else:
if path.endswith('password'):
match = True
else:
if path == '/location' or path.endswith('location_name'):
new = new.replace(' ', '').lower()
old = new.replace(' ', '').lower()
if new == old:
match = True
else:
result[path] = str(new) + ' != ' + str(old)
match = False
return match
def dict_camelize(d, path, camelize_first):
if isinstance(d, list):
for i in range(len(d)):
dict_camelize(d[i], path, camelize_first)
elif isinstance(d, dict):
if len(path) == 1:
old_value = d.get(path[0], None)
if old_value is not None:
d[path[0]] = _snake_to_camel(old_value, camelize_first)
else:
sd = d.get(path[0], None)
if sd is not None:
dict_camelize(sd, path[1:], camelize_first)
def dict_upper(d, path):
if isinstance(d, list):
for i in range(len(d)):
dict_upper(d[i], path)
elif isinstance(d, dict):
if len(path) == 1:
old_value = d.get(path[0], None)
if old_value is not None:
d[path[0]] = old_value.upper()
else:
sd = d.get(path[0], None)
if sd is not None:
dict_upper(sd, path[1:])
def dict_rename(d, path, new_name):
if isinstance(d, list):
for i in range(len(d)):
dict_rename(d[i], path, new_name)
elif isinstance(d, dict):
if len(path) == 1:
old_value = d.pop(path[0], None)
if old_value is not None:
d[new_name] = old_value
else:
sd = d.get(path[0], None)
if sd is not None:
dict_rename(sd, path[1:], new_name)
def dict_expand(d, path, outer_dict_name):
if isinstance(d, list):
for i in range(len(d)):
dict_expand(d[i], path, outer_dict_name)
elif isinstance(d, dict):
if len(path) == 1:
old_value = d.pop(path[0], None)
if old_value is not None:
d[outer_dict_name] = d.get(outer_dict_name, {})
d[outer_dict_name][path[0]] = old_value
else:
sd = d.get(path[0], None)
if sd is not None:
dict_expand(sd, path[1:], outer_dict_name)
def _snake_to_camel(snake, capitalize_first=False):
if capitalize_first:
return ''.join(x.capitalize() or '_' for x in snake.split('_'))
else:
return snake.split('_')[0] + ''.join(x.capitalize() or '_' for x in snake.split('_')[1:])
def main():
"""Main execution"""
AzureRMClusters()
if __name__ == '__main__':
main()
| gpl-3.0 | -3,524,846,650,251,953,700 | 35.214414 | 143 | 0.524603 | false |
edxzw/edx-platform | cms/djangoapps/contentstore/views/tests/test_programs.py | 19 | 6364 | """Tests covering the Programs listing on the Studio home."""
import json
from django.conf import settings
from django.core.urlresolvers import reverse
import httpretty
import mock
from oauth2_provider.tests.factories import ClientFactory
from provider.constants import CONFIDENTIAL
from openedx.core.djangoapps.programs.models import ProgramsApiConfig
from openedx.core.djangoapps.programs.tests.mixins import ProgramsApiConfigMixin, ProgramsDataMixin
from student.tests.factories import UserFactory
from xmodule.modulestore.tests.django_utils import SharedModuleStoreTestCase
class TestProgramListing(ProgramsApiConfigMixin, ProgramsDataMixin, SharedModuleStoreTestCase):
"""Verify Program listing behavior."""
def setUp(self):
super(TestProgramListing, self).setUp()
ClientFactory(name=ProgramsApiConfig.OAUTH2_CLIENT_NAME, client_type=CONFIDENTIAL)
self.staff = UserFactory(is_staff=True)
self.client.login(username=self.staff.username, password='test')
self.studio_home = reverse('home')
@httpretty.activate
def test_programs_config_disabled(self):
"""Verify that the programs tab and creation button aren't rendered when config is disabled."""
self.create_config(enable_studio_tab=False)
self.mock_programs_api()
response = self.client.get(self.studio_home)
self.assertNotIn("You haven't created any programs yet.", response.content)
for program_name in self.PROGRAM_NAMES:
self.assertNotIn(program_name, response.content)
@httpretty.activate
def test_programs_requires_staff(self):
"""
Verify that the programs tab and creation button aren't rendered unless the user has
global staff permissions.
"""
student = UserFactory(is_staff=False)
self.client.login(username=student.username, password='test')
self.create_config()
self.mock_programs_api()
response = self.client.get(self.studio_home)
self.assertNotIn("You haven't created any programs yet.", response.content)
@httpretty.activate
def test_programs_displayed(self):
"""Verify that the programs tab and creation button can be rendered when config is enabled."""
self.create_config()
# When no data is provided, expect creation prompt.
self.mock_programs_api(data={'results': []})
response = self.client.get(self.studio_home)
self.assertIn("You haven't created any programs yet.", response.content)
# When data is provided, expect a program listing.
self.mock_programs_api()
response = self.client.get(self.studio_home)
for program_name in self.PROGRAM_NAMES:
self.assertIn(program_name, response.content)
class TestProgramAuthoringView(ProgramsApiConfigMixin, SharedModuleStoreTestCase):
"""Verify the behavior of the program authoring app's host view."""
def setUp(self):
super(TestProgramAuthoringView, self).setUp()
self.staff = UserFactory(is_staff=True)
self.programs_path = reverse('programs')
def _assert_status(self, status_code):
"""Verify the status code returned by the Program authoring view."""
response = self.client.get(self.programs_path)
self.assertEquals(response.status_code, status_code)
return response
def test_authoring_login_required(self):
"""Verify that accessing the view requires the user to be authenticated."""
response = self.client.get(self.programs_path)
self.assertRedirects(
response,
'{login_url}?next={programs}'.format(
login_url=settings.LOGIN_URL,
programs=self.programs_path
)
)
def test_authoring_header(self):
"""Verify that the header contains the expected text."""
self.client.login(username=self.staff.username, password='test')
self.create_config()
response = self._assert_status(200)
self.assertIn("Program Administration", response.content)
def test_authoring_access(self):
"""
Verify that a 404 is returned if Programs authoring is disabled, or the user does not have
global staff permissions.
"""
self.client.login(username=self.staff.username, password='test')
self._assert_status(404)
# Enable Programs authoring interface
self.create_config()
student = UserFactory(is_staff=False)
self.client.login(username=student.username, password='test')
self._assert_status(404)
class TestProgramsIdTokenView(ProgramsApiConfigMixin, SharedModuleStoreTestCase):
"""Tests for the programs id_token endpoint."""
def setUp(self):
super(TestProgramsIdTokenView, self).setUp()
self.user = UserFactory()
self.client.login(username=self.user.username, password='test')
self.path = reverse('programs_id_token')
def test_config_disabled(self):
"""Ensure the endpoint returns 404 when Programs authoring is disabled."""
self.create_config(enable_studio_tab=False)
response = self.client.get(self.path)
self.assertEqual(response.status_code, 404)
def test_not_logged_in(self):
"""Ensure the endpoint denies access to unauthenticated users."""
self.create_config()
self.client.logout()
response = self.client.get(self.path)
self.assertEqual(response.status_code, 302)
self.assertIn(settings.LOGIN_URL, response['Location'])
@mock.patch('cms.djangoapps.contentstore.views.program.get_id_token', return_value='test-id-token')
def test_config_enabled(self, mock_get_id_token):
"""
Ensure the endpoint responds with a valid JSON payload when authoring
is enabled.
"""
self.create_config()
response = self.client.get(self.path)
self.assertEqual(response.status_code, 200)
payload = json.loads(response.content)
self.assertEqual(payload, {"id_token": "test-id-token"})
# this comparison is a little long-handed because we need to compare user instances directly
user, client_name = mock_get_id_token.call_args[0]
self.assertEqual(user, self.user)
self.assertEqual(client_name, "programs")
| agpl-3.0 | -4,329,714,381,491,999,000 | 38.042945 | 103 | 0.684161 | false |
2mny/mylar | lib/js2py/prototypes/jsarray.py | 27 | 14886 | import six
if six.PY3:
xrange = range
import functools
def to_arr(this):
"""Returns Python array from Js array"""
return [this.get(str(e)) for e in xrange(len(this))]
ARR_STACK = set({})
class ArrayPrototype:
def toString():
# this function is wrong but I will leave it here fore debugging purposes.
func = this.get('join')
if not func.is_callable():
@this.Js
def func():
return '[object %s]'%this.Class
return func.call(this, ())
def toLocaleString():
array = this.to_object()
arr_len = array.get('length').to_uint32()
# separator is simply a comma ','
if not arr_len:
return ''
res = []
for i in xrange(arr_len):
element = array[str(i)]
if element.is_undefined() or element.is_null():
res.append('')
else:
cand = element.to_object()
str_func = element.get('toLocaleString')
if not str_func.is_callable():
raise this.MakeError('TypeError', 'toLocaleString method of item at index %d is not callable'%i)
res.append(element.callprop('toLocaleString').value)
return ','.join(res)
def concat():
array = this.to_object()
A = this.Js([])
items = [array]
items.extend(to_arr(arguments))
n = 0
for E in items:
if E.Class=='Array':
k = 0
e_len = len(E)
while k<e_len:
if E.has_property(str(k)):
A.put(str(n), E.get(str(k)))
n+=1
k+=1
else:
A.put(str(n), E)
n+=1
return A
def join(separator):
ARR_STACK.add(this)
array = this.to_object()
arr_len = array.get('length').to_uint32()
separator = ',' if separator.is_undefined() else separator.to_string().value
elems = []
for e in xrange(arr_len):
elem = array.get(str(e))
if elem in ARR_STACK:
s = ''
else:
s = elem.to_string().value
elems.append(s if not (elem.is_undefined() or elem.is_null()) else '')
res = separator.join(elems)
ARR_STACK.remove(this)
return res
def pop(): #todo check
array = this.to_object()
arr_len = array.get('length').to_uint32()
if not arr_len:
array.put('length', this.Js(arr_len))
return None
ind = str(arr_len-1)
element = array.get(ind)
array.delete(ind)
array.put('length', this.Js(arr_len-1))
return element
def push(item): # todo check
array = this.to_object()
arr_len = array.get('length').to_uint32()
to_put = arguments.to_list()
i = arr_len
for i, e in enumerate(to_put, arr_len):
array.put(str(i), e)
if to_put:
i+=1
array.put('length', this.Js(i))
return i
def reverse():
array = this.to_object() # my own algorithm
vals = to_arr(array)
has_props = [array.has_property(str(e)) for e in xrange(len(array))]
vals.reverse()
has_props.reverse()
for i, val in enumerate(vals):
if has_props[i]:
array.put(str(i), val)
else:
array.delete(str(i))
return array
def shift(): #todo check
array = this.to_object()
arr_len = array.get('length').to_uint32()
if not arr_len:
array.put('length', this.Js(0))
return None
first = array.get('0')
for k in xrange(1, arr_len):
from_s, to_s = str(k), str(k-1)
if array.has_property(from_s):
array.put(to_s, array.get(from_s))
else:
array.delete(to)
array.delete(str(arr_len-1))
array.put('length', this.Js(str(arr_len-1)))
return first
def slice(start, end): # todo check
array = this.to_object()
arr_len = array.get('length').to_uint32()
relative_start = start.to_int()
k = max((arr_len + relative_start), 0) if relative_start<0 else min(relative_start, arr_len)
relative_end = arr_len if end.is_undefined() else end.to_int()
final = max((arr_len + relative_end), 0) if relative_end<0 else min(relative_end, arr_len)
res = []
n = 0
while k<final:
pk = str(k)
if array.has_property(pk):
res.append(array.get(pk))
k += 1
n += 1
return res
def sort(cmpfn):
if not this.Class in {'Array', 'Arguments'}:
return this.to_object() # do nothing
arr = []
for i in xrange(len(this)):
arr.append(this.get(six.text_type(i)))
if not arr:
return this
if not cmpfn.is_callable():
cmpfn = None
cmp = lambda a,b: sort_compare(a, b, cmpfn)
if six.PY3:
key = functools.cmp_to_key(cmp)
arr.sort(key=key)
else:
arr.sort(cmp=cmp)
for i in xrange(len(arr)):
this.put(six.text_type(i), arr[i])
return this
def splice(start, deleteCount):
# 1-8
array = this.to_object()
arr_len = array.get('length').to_uint32()
relative_start = start.to_int()
actual_start = max((arr_len + relative_start),0) if relative_start<0 else min(relative_start, arr_len)
actual_delete_count = min(max(deleteCount.to_int(),0 ), arr_len - actual_start)
k = 0
A = this.Js([])
# 9
while k<actual_delete_count:
if array.has_property(str(actual_start+k)):
A.put(str(k), array.get(str(actual_start+k)))
k += 1
# 10-11
items = to_arr(arguments)[2:]
items_len = len(items)
# 12
if items_len<actual_delete_count:
k = actual_start
while k < (arr_len-actual_delete_count):
fr = str(k+actual_delete_count)
to = str(k+items_len)
if array.has_property(fr):
array.put(to, array.get(fr))
else:
array.delete(to)
k += 1
k = arr_len
while k > (arr_len - actual_delete_count + items_len):
array.delete(str(k-1))
k -= 1
# 13
elif items_len>actual_delete_count:
k = arr_len - actual_delete_count
while k>actual_start:
fr = str(k + actual_delete_count - 1)
to = str(k + items_len - 1)
if array.has_property(fr):
array.put(to, array.get(fr))
else:
array.delete(to)
k -= 1
# 14-17
k = actual_start
while items:
E = items.pop(0)
array.put(str(k), E)
k += 1
array.put('length', this.Js(arr_len - actual_delete_count + items_len))
return A
def unshift():
array = this.to_object()
arr_len = array.get('length').to_uint32()
argCount = len(arguments)
k = arr_len
while k > 0:
fr = str(k - 1)
to = str(k + argCount - 1)
if array.has_property(fr):
array.put(to, array.get(fr))
else:
array.delete(to)
k -= 1
j = 0
items = to_arr(arguments)
while items:
E = items.pop(0)
array.put(str(j), E)
j += 1
array.put('length', this.Js(arr_len + argCount))
return arr_len + argCount
def indexOf(searchElement):
array = this.to_object()
arr_len = array.get('length').to_uint32()
if arr_len == 0:
return -1
if len(arguments)>1:
n = arguments[1].to_int()
else:
n = 0
if n >= arr_len:
return -1
if n >= 0:
k = n
else:
k = arr_len - abs(n)
if k < 0:
k = 0
while k < arr_len:
if array.has_property(str(k)):
elementK = array.get(str(k))
if searchElement.strict_equality_comparison(elementK):
return k
k += 1
return -1
def lastIndexOf(searchElement):
array = this.to_object()
arr_len = array.get('length').to_uint32()
if arr_len == 0:
return -1
if len(arguments)>1:
n = arguments[1].to_int()
else:
n = arr_len - 1
if n >= 0:
k = min(n, arr_len-1)
else:
k = arr_len - abs(n)
while k >= 0:
if array.has_property(str(k)):
elementK = array.get(str(k))
if searchElement.strict_equality_comparison(elementK):
return k
k -= 1
return -1
def every(callbackfn):
array = this.to_object()
arr_len = array.get('length').to_uint32()
if not callbackfn.is_callable():
raise this.MakeError('TypeError', 'callbackfn must be a function')
T = arguments[1]
k = 0
while k<arr_len:
if array.has_property(str(k)):
kValue = array.get(str(k))
if not callbackfn.call(T, (kValue, this.Js(k), array)).to_boolean().value:
return False
k += 1
return True
def some(callbackfn):
array = this.to_object()
arr_len = array.get('length').to_uint32()
if not callbackfn.is_callable():
raise this.MakeError('TypeError', 'callbackfn must be a function')
T = arguments[1]
k = 0
while k<arr_len:
if array.has_property(str(k)):
kValue = array.get(str(k))
if callbackfn.call(T, (kValue, this.Js(k), array)).to_boolean().value:
return True
k += 1
return False
def forEach(callbackfn):
array = this.to_object()
arr_len = array.get('length').to_uint32()
if not callbackfn.is_callable():
raise this.MakeError('TypeError', 'callbackfn must be a function')
T = arguments[1]
k = 0
while k<arr_len:
if array.has_property(str(k)):
kValue = array.get(str(k))
callbackfn.call(T, (kValue, this.Js(k), array))
k+=1
def map(callbackfn):
array = this.to_object()
arr_len = array.get('length').to_uint32()
if not callbackfn.is_callable():
raise this.MakeError('TypeError', 'callbackfn must be a function')
T = arguments[1]
A = this.Js([])
k = 0
while k<arr_len:
Pk = str(k)
if array.has_property(Pk):
kValue = array.get(Pk)
mappedValue = callbackfn.call(T, (kValue, this.Js(k), array))
A.define_own_property(Pk, {'value': mappedValue, 'writable': True,
'enumerable': True, 'configurable': True})
k += 1
return A
def filter(callbackfn):
array = this.to_object()
arr_len = array.get('length').to_uint32()
if not callbackfn.is_callable():
raise this.MakeError('TypeError', 'callbackfn must be a function')
T = arguments[1]
res = []
k = 0
while k<arr_len:
if array.has_property(str(k)):
kValue = array.get(str(k))
if callbackfn.call(T, (kValue, this.Js(k), array)).to_boolean().value:
res.append(kValue)
k += 1
return res # converted to js array automatically
def reduce(callbackfn):
array = this.to_object()
arr_len = array.get('length').to_uint32()
if not callbackfn.is_callable():
raise this.MakeError('TypeError', 'callbackfn must be a function')
if not arr_len and len(arguments)<2:
raise this.MakeError('TypeError', 'Reduce of empty array with no initial value')
k = 0
if len(arguments)>1: # initial value present
accumulator = arguments[1]
else:
kPresent = False
while not kPresent and k<arr_len:
kPresent = array.has_property(str(k))
if kPresent:
accumulator = array.get(str(k))
k += 1
if not kPresent:
raise this.MakeError('TypeError', 'Reduce of empty array with no initial value')
while k<arr_len:
if array.has_property(str(k)):
kValue = array.get(str(k))
accumulator = callbackfn.call(this.undefined, (accumulator, kValue, this.Js(k), array))
k += 1
return accumulator
def reduceRight(callbackfn):
array = this.to_object()
arr_len = array.get('length').to_uint32()
if not callbackfn.is_callable():
raise this.MakeError('TypeError', 'callbackfn must be a function')
if not arr_len and len(arguments)<2:
raise this.MakeError('TypeError', 'Reduce of empty array with no initial value')
k = arr_len - 1
if len(arguments)>1: # initial value present
accumulator = arguments[1]
else:
kPresent = False
while not kPresent and k>=0:
kPresent = array.has_property(str(k))
if kPresent:
accumulator = array.get(str(k))
k -= 1
if not kPresent:
raise this.MakeError('TypeError', 'Reduce of empty array with no initial value')
while k>=0:
if array.has_property(str(k)):
kValue = array.get(str(k))
accumulator = callbackfn.call(this.undefined, (accumulator, kValue, this.Js(k), array))
k -= 1
return accumulator
def sort_compare(a, b, comp):
if a is None:
if b is None:
return 0
return 1
if b is None:
if a is None:
return 0
return -1
if a.is_undefined():
if b.is_undefined():
return 0
return 1
if b.is_undefined():
if a.is_undefined():
return 0
return -1
if comp is not None:
res = comp.call(a.undefined, (a, b))
return res.to_int()
x, y = a.to_string(), b.to_string()
if x<y:
return -1
elif x>y:
return 1
return 0
| gpl-3.0 | 1,386,306,743,819,528,400 | 31.502183 | 116 | 0.492275 | false |
JackWoot/E2E-Messenger | Server/passlib/apache.py | 21 | 39058 | """passlib.apache - apache password support"""
# XXX: relocate this to passlib.ext.apache?
#=============================================================================
# imports
#=============================================================================
from __future__ import with_statement
# core
from hashlib import md5
import logging; log = logging.getLogger(__name__)
import os
import sys
from warnings import warn
# site
# pkg
from passlib.context import CryptContext
from passlib.exc import ExpectedStringError
from passlib.hash import htdigest
from passlib.utils import consteq, render_bytes, to_bytes, deprecated_method, is_ascii_codec
from passlib.utils.compat import b, bytes, join_bytes, str_to_bascii, u, \
unicode, BytesIO, iteritems, imap, PY3
# local
__all__ = [
'HtpasswdFile',
'HtdigestFile',
]
#=============================================================================
# constants & support
#=============================================================================
_UNSET = object()
_BCOLON = b(":")
# byte values that aren't allowed in fields.
_INVALID_FIELD_CHARS = b(":\n\r\t\x00")
#=============================================================================
# backport of OrderedDict for PY2.5
#=============================================================================
try:
from collections import OrderedDict
except ImportError:
# Python 2.5
class OrderedDict(dict):
"""hacked OrderedDict replacement.
NOTE: this doesn't provide a full OrderedDict implementation,
just the minimum needed by the Htpasswd internals.
"""
def __init__(self):
self._keys = []
def __iter__(self):
return iter(self._keys)
def __setitem__(self, key, value):
if key not in self:
self._keys.append(key)
super(OrderedDict, self).__setitem__(key, value)
def __delitem__(self, key):
super(OrderedDict, self).__delitem__(key)
self._keys.remove(key)
def iteritems(self):
return ((key, self[key]) for key in self)
# these aren't used or implemented, so disabling them for safety.
update = pop = popitem = clear = keys = iterkeys = None
#=============================================================================
# common helpers
#=============================================================================
class _CommonFile(object):
"""common framework for HtpasswdFile & HtdigestFile"""
#===================================================================
# instance attrs
#===================================================================
# charset encoding used by file (defaults to utf-8)
encoding = None
# whether users() and other public methods should return unicode or bytes?
# (defaults to False under PY2, True under PY3)
return_unicode = None
# if bound to local file, these will be set.
_path = None # local file path
_mtime = None # mtime when last loaded, or 0
# if true, automatically save to local file after changes are made.
autosave = False
# ordered dict mapping key -> value for all records in database.
# (e.g. user => hash for Htpasswd)
_records = None
#===================================================================
# alt constuctors
#===================================================================
@classmethod
def from_string(cls, data, **kwds):
"""create new object from raw string.
:type data: unicode or bytes
:arg data:
database to load, as single string.
:param \*\*kwds:
all other keywords are the same as in the class constructor
"""
if 'path' in kwds:
raise TypeError("'path' not accepted by from_string()")
self = cls(**kwds)
self.load_string(data)
return self
@classmethod
def from_path(cls, path, **kwds):
"""create new object from file, without binding object to file.
:type path: str
:arg path:
local filepath to load from
:param \*\*kwds:
all other keywords are the same as in the class constructor
"""
self = cls(**kwds)
self.load(path)
return self
#===================================================================
# init
#===================================================================
def __init__(self, path=None, new=False, autoload=True, autosave=False,
encoding="utf-8", return_unicode=PY3,
):
# set encoding
if not encoding:
warn("``encoding=None`` is deprecated as of Passlib 1.6, "
"and will cause a ValueError in Passlib 1.8, "
"use ``return_unicode=False`` instead.",
DeprecationWarning, stacklevel=2)
encoding = "utf-8"
return_unicode = False
elif not is_ascii_codec(encoding):
# htpasswd/htdigest files assumes 1-byte chars, and use ":" separator,
# so only ascii-compatible encodings are allowed.
raise ValueError("encoding must be 7-bit ascii compatible")
self.encoding = encoding
# set other attrs
self.return_unicode = return_unicode
self.autosave = autosave
self._path = path
self._mtime = 0
# init db
if not autoload:
warn("``autoload=False`` is deprecated as of Passlib 1.6, "
"and will be removed in Passlib 1.8, use ``new=True`` instead",
DeprecationWarning, stacklevel=2)
new = True
if path and not new:
self.load()
else:
self._records = OrderedDict()
def __repr__(self):
tail = ''
if self.autosave:
tail += ' autosave=True'
if self._path:
tail += ' path=%r' % self._path
if self.encoding != "utf-8":
tail += ' encoding=%r' % self.encoding
return "<%s 0x%0x%s>" % (self.__class__.__name__, id(self), tail)
# NOTE: ``path`` is a property so that ``_mtime`` is wiped when it's set.
def _get_path(self):
return self._path
def _set_path(self, value):
if value != self._path:
self._mtime = 0
self._path = value
path = property(_get_path, _set_path)
@property
def mtime(self):
"modify time when last loaded (if bound to a local file)"
return self._mtime
#===================================================================
# loading
#===================================================================
def load_if_changed(self):
"""Reload from ``self.path`` only if file has changed since last load"""
if not self._path:
raise RuntimeError("%r is not bound to a local file" % self)
if self._mtime and self._mtime == os.path.getmtime(self._path):
return False
self.load()
return True
def load(self, path=None, force=True):
"""Load state from local file.
If no path is specified, attempts to load from ``self.path``.
:type path: str
:arg path: local file to load from
:type force: bool
:param force:
if ``force=False``, only load from ``self.path`` if file
has changed since last load.
.. deprecated:: 1.6
This keyword will be removed in Passlib 1.8;
Applications should use :meth:`load_if_changed` instead.
"""
if path is not None:
with open(path, "rb") as fh:
self._mtime = 0
self._load_lines(fh)
elif not force:
warn("%(name)s.load(force=False) is deprecated as of Passlib 1.6,"
"and will be removed in Passlib 1.8; "
"use %(name)s.load_if_changed() instead." %
dict(name=self.__class__.__name__),
DeprecationWarning, stacklevel=2)
return self.load_if_changed()
elif self._path:
with open(self._path, "rb") as fh:
self._mtime = os.path.getmtime(self._path)
self._load_lines(fh)
else:
raise RuntimeError("%s().path is not set, an explicit path is required" %
self.__class__.__name__)
return True
def load_string(self, data):
"Load state from unicode or bytes string, replacing current state"
data = to_bytes(data, self.encoding, "data")
self._mtime = 0
self._load_lines(BytesIO(data))
def _load_lines(self, lines):
"load from sequence of lists"
# XXX: found reference that "#" comment lines may be supported by
# htpasswd, should verify this, and figure out how to handle them.
# if true, this would also affect what can be stored in user field.
# XXX: if multiple entries for a key, should we use the first one
# or the last one? going w/ first entry for now.
# XXX: how should this behave if parsing fails? currently
# it will contain everything that was loaded up to error.
# could clear / restore old state instead.
parse = self._parse_record
records = self._records = OrderedDict()
for idx, line in enumerate(lines):
key, value = parse(line, idx+1)
if key not in records:
records[key] = value
def _parse_record(cls, record, lineno): # pragma: no cover - abstract method
"parse line of file into (key, value) pair"
raise NotImplementedError("should be implemented in subclass")
#===================================================================
# saving
#===================================================================
def _autosave(self):
"subclass helper to call save() after any changes"
if self.autosave and self._path:
self.save()
def save(self, path=None):
"""Save current state to file.
If no path is specified, attempts to save to ``self.path``.
"""
if path is not None:
with open(path, "wb") as fh:
fh.writelines(self._iter_lines())
elif self._path:
self.save(self._path)
self._mtime = os.path.getmtime(self._path)
else:
raise RuntimeError("%s().path is not set, cannot autosave" %
self.__class__.__name__)
def to_string(self):
"Export current state as a string of bytes"
return join_bytes(self._iter_lines())
def _iter_lines(self):
"iterator yielding lines of database"
return (self._render_record(key,value) for key,value in iteritems(self._records))
def _render_record(cls, key, value): # pragma: no cover - abstract method
"given key/value pair, encode as line of file"
raise NotImplementedError("should be implemented in subclass")
#===================================================================
# field encoding
#===================================================================
def _encode_user(self, user):
"user-specific wrapper for _encode_field()"
return self._encode_field(user, "user")
def _encode_realm(self, realm): # pragma: no cover - abstract method
"realm-specific wrapper for _encode_field()"
return self._encode_field(realm, "realm")
def _encode_field(self, value, param="field"):
"""convert field to internal representation.
internal representation is always bytes. byte strings are left as-is,
unicode strings encoding using file's default encoding (or ``utf-8``
if no encoding has been specified).
:raises UnicodeEncodeError:
if unicode value cannot be encoded using default encoding.
:raises ValueError:
if resulting byte string contains a forbidden character,
or is too long (>255 bytes).
:returns:
encoded identifer as bytes
"""
if isinstance(value, unicode):
value = value.encode(self.encoding)
elif not isinstance(value, bytes):
raise ExpectedStringError(value, param)
if len(value) > 255:
raise ValueError("%s must be at most 255 characters: %r" %
(param, value))
if any(c in _INVALID_FIELD_CHARS for c in value):
raise ValueError("%s contains invalid characters: %r" %
(param, value,))
return value
def _decode_field(self, value):
"""decode field from internal representation to format
returns by users() method, etc.
:raises UnicodeDecodeError:
if unicode value cannot be decoded using default encoding.
(usually indicates wrong encoding set for file).
:returns:
field as unicode or bytes, as appropriate.
"""
assert isinstance(value, bytes), "expected value to be bytes"
if self.return_unicode:
return value.decode(self.encoding)
else:
return value
# FIXME: htpasswd doc says passwords limited to 255 chars under Windows & MPE,
# and that longer ones are truncated. this may be side-effect of those
# platforms supporting the 'plaintext' scheme. these classes don't currently
# check for this.
#===================================================================
# eoc
#===================================================================
#=============================================================================
# htpasswd editing
#=============================================================================
# FIXME: apr_md5_crypt technically the default only for windows, netware and tpf.
# TODO: find out if htpasswd's "crypt" mode is a crypt() *call* or just des_crypt implementation.
# if the former, we can support anything supported by passlib.hosts.host_context,
# allowing more secure hashes than apr_md5_crypt to be used.
# could perhaps add this behavior as an option to the constructor.
# c.f. http://httpd.apache.org/docs/2.2/programs/htpasswd.html
htpasswd_context = CryptContext([
"apr_md5_crypt", # man page notes supported everywhere, default on Windows, Netware, TPF
"des_crypt", # man page notes server does NOT support this on Windows, Netware, TPF
"ldap_sha1", # man page notes only for transitioning <-> ldap
"plaintext" # man page notes server ONLY supports this on Windows, Netware, TPF
])
class HtpasswdFile(_CommonFile):
"""class for reading & writing Htpasswd files.
The class constructor accepts the following arguments:
:type path: filepath
:param path:
Specifies path to htpasswd file, use to implicitly load from and save to.
This class has two modes of operation:
1. It can be "bound" to a local file by passing a ``path`` to the class
constructor. In this case it will load the contents of the file when
created, and the :meth:`load` and :meth:`save` methods will automatically
load from and save to that file if they are called without arguments.
2. Alternately, it can exist as an independant object, in which case
:meth:`load` and :meth:`save` will require an explicit path to be
provided whenever they are called. As well, ``autosave`` behavior
will not be available.
This feature is new in Passlib 1.6, and is the default if no
``path`` value is provided to the constructor.
This is also exposed as a readonly instance attribute.
:type new: bool
:param new:
Normally, if *path* is specified, :class:`HtpasswdFile` will
immediately load the contents of the file. However, when creating
a new htpasswd file, applications can set ``new=True`` so that
the existing file (if any) will not be loaded.
.. versionadded:: 1.6
This feature was previously enabled by setting ``autoload=False``.
That alias has been deprecated, and will be removed in Passlib 1.8
:type autosave: bool
:param autosave:
Normally, any changes made to an :class:`HtpasswdFile` instance
will not be saved until :meth:`save` is explicitly called. However,
if ``autosave=True`` is specified, any changes made will be
saved to disk immediately (assuming *path* has been set).
This is also exposed as a writeable instance attribute.
:type encoding: str
:param encoding:
Optionally specify character encoding used to read/write file
and hash passwords. Defaults to ``utf-8``, though ``latin-1``
is the only other commonly encountered encoding.
This is also exposed as a readonly instance attribute.
:type default_scheme: str
:param default_scheme:
Optionally specify default scheme to use when encoding new passwords.
Must be one of ``"apr_md5_crypt"``, ``"des_crypt"``, ``"ldap_sha1"``,
``"plaintext"``. It defaults to ``"apr_md5_crypt"``.
.. versionadded:: 1.6
This keyword was previously named ``default``. That alias
has been deprecated, and will be removed in Passlib 1.8.
:type context: :class:`~passlib.context.CryptContext`
:param context:
:class:`!CryptContext` instance used to encrypt
and verify the hashes found in the htpasswd file.
The default value is a pre-built context which supports all
of the hashes officially allowed in an htpasswd file.
This is also exposed as a readonly instance attribute.
.. warning::
This option may be used to add support for non-standard hash
formats to an htpasswd file. However, the resulting file
will probably not be usuable by another application,
and particularly not by Apache.
:param autoload:
Set to ``False`` to prevent the constructor from automatically
loaded the file from disk.
.. deprecated:: 1.6
This has been replaced by the *new* keyword.
Instead of setting ``autoload=False``, you should use
``new=True``. Support for this keyword will be removed
in Passlib 1.8.
:param default:
Change the default algorithm used to encrypt new passwords.
.. deprecated:: 1.6
This has been renamed to *default_scheme* for clarity.
Support for this alias will be removed in Passlib 1.8.
Loading & Saving
================
.. automethod:: load
.. automethod:: load_if_changed
.. automethod:: load_string
.. automethod:: save
.. automethod:: to_string
Inspection
================
.. automethod:: users
.. automethod:: check_password
.. automethod:: get_hash
Modification
================
.. automethod:: set_password
.. automethod:: delete
Alternate Constructors
======================
.. automethod:: from_string
Attributes
==========
.. attribute:: path
Path to local file that will be used as the default
for all :meth:`load` and :meth:`save` operations.
May be written to, initialized by the *path* constructor keyword.
.. attribute:: autosave
Writeable flag indicating whether changes will be automatically
written to *path*.
Errors
======
:raises ValueError:
All of the methods in this class will raise a :exc:`ValueError` if
any user name contains a forbidden character (one of ``:\\r\\n\\t\\x00``),
or is longer than 255 characters.
"""
#===================================================================
# instance attrs
#===================================================================
# NOTE: _records map stores <user> for the key, and <hash> for the value,
# both in bytes which use self.encoding
#===================================================================
# init & serialization
#===================================================================
def __init__(self, path=None, default_scheme=None, context=htpasswd_context,
**kwds):
if 'default' in kwds:
warn("``default`` is deprecated as of Passlib 1.6, "
"and will be removed in Passlib 1.8, it has been renamed "
"to ``default_scheem``.",
DeprecationWarning, stacklevel=2)
default_scheme = kwds.pop("default")
if default_scheme:
context = context.copy(default=default_scheme)
self.context = context
super(HtpasswdFile, self).__init__(path, **kwds)
def _parse_record(self, record, lineno):
# NOTE: should return (user, hash) tuple
result = record.rstrip().split(_BCOLON)
if len(result) != 2:
raise ValueError("malformed htpasswd file (error reading line %d)"
% lineno)
return result
def _render_record(self, user, hash):
return render_bytes("%s:%s\n", user, hash)
#===================================================================
# public methods
#===================================================================
def users(self):
"Return list of all users in database"
return [self._decode_field(user) for user in self._records]
##def has_user(self, user):
## "check whether entry is present for user"
## return self._encode_user(user) in self._records
##def rename(self, old, new):
## """rename user account"""
## old = self._encode_user(old)
## new = self._encode_user(new)
## hash = self._records.pop(old)
## self._records[new] = hash
## self._autosave()
def set_password(self, user, password):
"""Set password for user; adds user if needed.
:returns:
* ``True`` if existing user was updated.
* ``False`` if user account was added.
.. versionchanged:: 1.6
This method was previously called ``update``, it was renamed
to prevent ambiguity with the dictionary method.
The old alias is deprecated, and will be removed in Passlib 1.8.
"""
user = self._encode_user(user)
hash = self.context.encrypt(password)
if PY3:
hash = hash.encode(self.encoding)
existing = (user in self._records)
self._records[user] = hash
self._autosave()
return existing
@deprecated_method(deprecated="1.6", removed="1.8",
replacement="set_password")
def update(self, user, password):
"set password for user"
return self.set_password(user, password)
def get_hash(self, user):
"""Return hash stored for user, or ``None`` if user not found.
.. versionchanged:: 1.6
This method was previously named ``find``, it was renamed
for clarity. The old name is deprecated, and will be removed
in Passlib 1.8.
"""
try:
return self._records[self._encode_user(user)]
except KeyError:
return None
@deprecated_method(deprecated="1.6", removed="1.8",
replacement="get_hash")
def find(self, user):
"return hash for user"
return self.get_hash(user)
# XXX: rename to something more explicit, like delete_user()?
def delete(self, user):
"""Delete user's entry.
:returns:
* ``True`` if user deleted.
* ``False`` if user not found.
"""
try:
del self._records[self._encode_user(user)]
except KeyError:
return False
self._autosave()
return True
def check_password(self, user, password):
"""Verify password for specified user.
:returns:
* ``None`` if user not found.
* ``False`` if user found, but password does not match.
* ``True`` if user found and password matches.
.. versionchanged:: 1.6
This method was previously called ``verify``, it was renamed
to prevent ambiguity with the :class:`!CryptContext` method.
The old alias is deprecated, and will be removed in Passlib 1.8.
"""
user = self._encode_user(user)
hash = self._records.get(user)
if hash is None:
return None
if isinstance(password, unicode):
# NOTE: encoding password to match file, making the assumption
# that server will use same encoding to hash the password.
password = password.encode(self.encoding)
ok, new_hash = self.context.verify_and_update(password, hash)
if ok and new_hash is not None:
# rehash user's password if old hash was deprecated
self._records[user] = new_hash
self._autosave()
return ok
@deprecated_method(deprecated="1.6", removed="1.8",
replacement="check_password")
def verify(self, user, password):
"verify password for user"
return self.check_password(user, password)
#===================================================================
# eoc
#===================================================================
#=============================================================================
# htdigest editing
#=============================================================================
class HtdigestFile(_CommonFile):
"""class for reading & writing Htdigest files.
The class constructor accepts the following arguments:
:type path: filepath
:param path:
Specifies path to htdigest file, use to implicitly load from and save to.
This class has two modes of operation:
1. It can be "bound" to a local file by passing a ``path`` to the class
constructor. In this case it will load the contents of the file when
created, and the :meth:`load` and :meth:`save` methods will automatically
load from and save to that file if they are called without arguments.
2. Alternately, it can exist as an independant object, in which case
:meth:`load` and :meth:`save` will require an explicit path to be
provided whenever they are called. As well, ``autosave`` behavior
will not be available.
This feature is new in Passlib 1.6, and is the default if no
``path`` value is provided to the constructor.
This is also exposed as a readonly instance attribute.
:type default_realm: str
:param default_realm:
If ``default_realm`` is set, all the :class:`HtdigestFile`
methods that require a realm will use this value if one is not
provided explicitly. If unset, they will raise an error stating
that an explicit realm is required.
This is also exposed as a writeable instance attribute.
.. versionadded:: 1.6
:type new: bool
:param new:
Normally, if *path* is specified, :class:`HtdigestFile` will
immediately load the contents of the file. However, when creating
a new htpasswd file, applications can set ``new=True`` so that
the existing file (if any) will not be loaded.
.. versionadded:: 1.6
This feature was previously enabled by setting ``autoload=False``.
That alias has been deprecated, and will be removed in Passlib 1.8
:type autosave: bool
:param autosave:
Normally, any changes made to an :class:`HtdigestFile` instance
will not be saved until :meth:`save` is explicitly called. However,
if ``autosave=True`` is specified, any changes made will be
saved to disk immediately (assuming *path* has been set).
This is also exposed as a writeable instance attribute.
:type encoding: str
:param encoding:
Optionally specify character encoding used to read/write file
and hash passwords. Defaults to ``utf-8``, though ``latin-1``
is the only other commonly encountered encoding.
This is also exposed as a readonly instance attribute.
:param autoload:
Set to ``False`` to prevent the constructor from automatically
loaded the file from disk.
.. deprecated:: 1.6
This has been replaced by the *new* keyword.
Instead of setting ``autoload=False``, you should use
``new=True``. Support for this keyword will be removed
in Passlib 1.8.
Loading & Saving
================
.. automethod:: load
.. automethod:: load_if_changed
.. automethod:: load_string
.. automethod:: save
.. automethod:: to_string
Inspection
==========
.. automethod:: realms
.. automethod:: users
.. automethod:: check_password(user[, realm], password)
.. automethod:: get_hash
Modification
============
.. automethod:: set_password(user[, realm], password)
.. automethod:: delete
.. automethod:: delete_realm
Alternate Constructors
======================
.. automethod:: from_string
Attributes
==========
.. attribute:: default_realm
The default realm that will be used if one is not provided
to methods that require it. By default this is ``None``,
in which case an explicit realm must be provided for every
method call. Can be written to.
.. attribute:: path
Path to local file that will be used as the default
for all :meth:`load` and :meth:`save` operations.
May be written to, initialized by the *path* constructor keyword.
.. attribute:: autosave
Writeable flag indicating whether changes will be automatically
written to *path*.
Errors
======
:raises ValueError:
All of the methods in this class will raise a :exc:`ValueError` if
any user name or realm contains a forbidden character (one of ``:\\r\\n\\t\\x00``),
or is longer than 255 characters.
"""
#===================================================================
# instance attrs
#===================================================================
# NOTE: _records map stores (<user>,<realm>) for the key,
# and <hash> as the value, all as <self.encoding> bytes.
# NOTE: unlike htpasswd, this class doesn't use a CryptContext,
# as only one hash format is supported: htdigest.
# optionally specify default realm that will be used if none
# is provided to a method call. otherwise realm is always required.
default_realm = None
#===================================================================
# init & serialization
#===================================================================
def __init__(self, path=None, default_realm=None, **kwds):
self.default_realm = default_realm
super(HtdigestFile, self).__init__(path, **kwds)
def _parse_record(self, record, lineno):
result = record.rstrip().split(_BCOLON)
if len(result) != 3:
raise ValueError("malformed htdigest file (error reading line %d)"
% lineno)
user, realm, hash = result
return (user, realm), hash
def _render_record(self, key, hash):
user, realm = key
return render_bytes("%s:%s:%s\n", user, realm, hash)
def _encode_realm(self, realm):
# override default _encode_realm to fill in default realm field
if realm is None:
realm = self.default_realm
if realm is None:
raise TypeError("you must specify a realm explicitly, "
"or set the default_realm attribute")
return self._encode_field(realm, "realm")
#===================================================================
# public methods
#===================================================================
def realms(self):
"""Return list of all realms in database"""
realms = set(key[1] for key in self._records)
return [self._decode_field(realm) for realm in realms]
def users(self, realm=None):
"""Return list of all users in specified realm.
* uses ``self.default_realm`` if no realm explicitly provided.
* returns empty list if realm not found.
"""
realm = self._encode_realm(realm)
return [self._decode_field(key[0]) for key in self._records
if key[1] == realm]
##def has_user(self, user, realm=None):
## "check if user+realm combination exists"
## user = self._encode_user(user)
## realm = self._encode_realm(realm)
## return (user,realm) in self._records
##def rename_realm(self, old, new):
## """rename all accounts in realm"""
## old = self._encode_realm(old)
## new = self._encode_realm(new)
## keys = [key for key in self._records if key[1] == old]
## for key in keys:
## hash = self._records.pop(key)
## self._records[key[0],new] = hash
## self._autosave()
## return len(keys)
##def rename(self, old, new, realm=None):
## """rename user account"""
## old = self._encode_user(old)
## new = self._encode_user(new)
## realm = self._encode_realm(realm)
## hash = self._records.pop((old,realm))
## self._records[new,realm] = hash
## self._autosave()
def set_password(self, user, realm=None, password=_UNSET):
"""Set password for user; adds user & realm if needed.
If ``self.default_realm`` has been set, this may be called
with the syntax ``set_password(user, password)``,
otherwise it must be called with all three arguments:
``set_password(user, realm, password)``.
:returns:
* ``True`` if existing user was updated
* ``False`` if user account added.
"""
if password is _UNSET:
# called w/ two args - (user, password), use default realm
realm, password = None, realm
user = self._encode_user(user)
realm = self._encode_realm(realm)
key = (user, realm)
existing = (key in self._records)
hash = htdigest.encrypt(password, user, realm, encoding=self.encoding)
if PY3:
hash = hash.encode(self.encoding)
self._records[key] = hash
self._autosave()
return existing
@deprecated_method(deprecated="1.6", removed="1.8",
replacement="set_password")
def update(self, user, realm, password):
"set password for user"
return self.set_password(user, realm, password)
# XXX: rename to something more explicit, like get_hash()?
def get_hash(self, user, realm=None):
"""Return :class:`~passlib.hash.htdigest` hash stored for user.
* uses ``self.default_realm`` if no realm explicitly provided.
* returns ``None`` if user or realm not found.
.. versionchanged:: 1.6
This method was previously named ``find``, it was renamed
for clarity. The old name is deprecated, and will be removed
in Passlib 1.8.
"""
key = (self._encode_user(user), self._encode_realm(realm))
hash = self._records.get(key)
if hash is None:
return None
if PY3:
hash = hash.decode(self.encoding)
return hash
@deprecated_method(deprecated="1.6", removed="1.8",
replacement="get_hash")
def find(self, user, realm):
"return hash for user"
return self.get_hash(user, realm)
# XXX: rename to something more explicit, like delete_user()?
def delete(self, user, realm=None):
"""Delete user's entry for specified realm.
if realm is not specified, uses ``self.default_realm``.
:returns:
* ``True`` if user deleted,
* ``False`` if user not found in realm.
"""
key = (self._encode_user(user), self._encode_realm(realm))
try:
del self._records[key]
except KeyError:
return False
self._autosave()
return True
def delete_realm(self, realm):
"""Delete all users for specified realm.
if realm is not specified, uses ``self.default_realm``.
:returns: number of users deleted (0 if realm not found)
"""
realm = self._encode_realm(realm)
records = self._records
keys = [key for key in records if key[1] == realm]
for key in keys:
del records[key]
self._autosave()
return len(keys)
def check_password(self, user, realm=None, password=_UNSET):
"""Verify password for specified user + realm.
If ``self.default_realm`` has been set, this may be called
with the syntax ``check_password(user, password)``,
otherwise it must be called with all three arguments:
``check_password(user, realm, password)``.
:returns:
* ``None`` if user or realm not found.
* ``False`` if user found, but password does not match.
* ``True`` if user found and password matches.
.. versionchanged:: 1.6
This method was previously called ``verify``, it was renamed
to prevent ambiguity with the :class:`!CryptContext` method.
The old alias is deprecated, and will be removed in Passlib 1.8.
"""
if password is _UNSET:
# called w/ two args - (user, password), use default realm
realm, password = None, realm
user = self._encode_user(user)
realm = self._encode_realm(realm)
hash = self._records.get((user,realm))
if hash is None:
return None
return htdigest.verify(password, hash, user, realm,
encoding=self.encoding)
@deprecated_method(deprecated="1.6", removed="1.8",
replacement="check_password")
def verify(self, user, realm, password):
"verify password for user"
return self.check_password(user, realm, password)
#===================================================================
# eoc
#===================================================================
#=============================================================================
# eof
#=============================================================================
| gpl-2.0 | -4,691,034,082,114,196,000 | 36.664417 | 97 | 0.547237 | false |
guorendong/iridium-browser-ubuntu | tools/json_schema_compiler/js_externs_generator.py | 12 | 11188 | # Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Generator that produces an externs file for the Closure Compiler.
Note: This is a work in progress, and generated externs may require tweaking.
See https://developers.google.com/closure/compiler/docs/api-tutorial3#externs
"""
from code import Code
from model import *
from schema_util import *
import os
from datetime import datetime
import re
LICENSE = ("""// Copyright %s The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
""" % datetime.now().year)
class JsExternsGenerator(object):
def Generate(self, namespace):
return _Generator(namespace).Generate()
class _Generator(object):
def __init__(self, namespace):
self._namespace = namespace
def Generate(self):
"""Generates a Code object with the schema for the entire namespace.
"""
c = Code()
(c.Append(LICENSE)
.Append()
.Append('/** @fileoverview Externs generated from namespace: %s */' %
self._namespace.name)
.Append())
c.Cblock(self._GenerateNamespaceObject())
for js_type in self._namespace.types.values():
c.Cblock(self._GenerateType(js_type))
for function in self._namespace.functions.values():
c.Cblock(self._GenerateFunction(function))
for event in self._namespace.events.values():
c.Cblock(self._GenerateEvent(event))
return c
def _GenerateType(self, js_type):
"""Given a Type object, returns the Code for this type's definition.
"""
c = Code()
if js_type.property_type is PropertyType.ENUM:
c.Concat(self._GenerateEnumJsDoc(js_type))
else:
c.Concat(self._GenerateTypeJsDoc(js_type))
return c
def _GenerateEnumJsDoc(self, js_type):
""" Given an Enum Type object, returns the Code for the enum's definition.
"""
c = Code()
(c.Sblock(line='/**', line_prefix=' * ')
.Append('@enum {string}')
.Append(self._GenerateSeeLink('type', js_type.simple_name))
.Eblock(' */'))
c.Append('chrome.%s.%s = {' % (self._namespace.name, js_type.name))
def get_property_name(e):
# Enum properties are normified to be in ALL_CAPS_STYLE.
# Assume enum '1ring-rulesThemAll'.
# Transform to '1ring-rules_Them_All'.
e = re.sub(r'([a-z])([A-Z])', r'\1_\2', e)
# Transform to '1ring_rules_Them_All'.
e = re.sub(r'\W', '_', e)
# Transform to '_1ring_rules_Them_All'.
e = re.sub(r'^(\d)', r'_\1', e)
# Transform to '_1RING_RULES_THEM_ALL'.
return e.upper()
c.Append('\n'.join(
[" %s: '%s'," % (get_property_name(v.name), v.name)
for v in js_type.enum_values]))
c.Append('};')
return c
def _IsTypeConstructor(self, js_type):
"""Returns true if the given type should be a @constructor. If this returns
false, the type is a typedef.
"""
return any(prop.type_.property_type is PropertyType.FUNCTION
for prop in js_type.properties.values())
def _GenerateTypeJsDoc(self, js_type):
"""Generates the documentation for a type as a Code.
Returns an empty code object if the object has no documentation.
"""
c = Code()
c.Sblock(line='/**', line_prefix=' * ')
if js_type.description:
for line in js_type.description.splitlines():
c.Append(line)
is_constructor = self._IsTypeConstructor(js_type)
if is_constructor:
c.Comment('@constructor', comment_prefix = ' * ', wrap_indent=4)
else:
c.Concat(self._GenerateTypedef(js_type.properties))
c.Append(self._GenerateSeeLink('type', js_type.simple_name))
c.Eblock(' */')
var = 'var ' + js_type.simple_name
if is_constructor: var += ' = function() {}'
var += ';'
c.Append(var)
return c
def _GenerateTypedef(self, properties):
"""Given an OrderedDict of properties, returns a Code containing a @typedef.
"""
if not properties: return Code()
c = Code()
c.Append('@typedef {')
c.Concat(self._GenerateObjectDefinition(properties), new_line=False)
c.Append('}', new_line=False)
return c
def _GenerateObjectDefinition(self, properties):
"""Given an OrderedDict of properties, returns a Code containing the
description of an object.
"""
if not properties: return Code()
c = Code()
c.Sblock('{')
first = True
for field, prop in properties.items():
# Avoid trailing comma.
# TODO(devlin): This will be unneeded, if/when
# https://github.com/google/closure-compiler/issues/796 is fixed.
if not first:
c.Append(',', new_line=False)
first = False
js_type = self._TypeToJsType(prop.type_)
if prop.optional:
js_type = (Code().
Append('(').
Concat(js_type, new_line=False).
Append('|undefined)', new_line=False))
c.Append('%s: ' % field, strip_right=False)
c.Concat(js_type, new_line=False)
c.Eblock('}')
return c
def _GenerateFunctionJsDoc(self, function):
"""Generates the documentation for a function as a Code.
Returns an empty code object if the object has no documentation.
"""
c = Code()
c.Sblock(line='/**', line_prefix=' * ')
if function.description:
c.Comment(function.description, comment_prefix='')
def append_field(c, tag, js_type, name, optional, description):
c.Append('@%s {' % tag)
c.Concat(js_type, new_line=False)
if optional:
c.Append('=', new_line=False)
c.Append('} %s' % name, new_line=False)
if description:
c.Comment(' %s' % description, comment_prefix='',
wrap_indent=4, new_line=False)
for param in function.params:
append_field(c, 'param', self._TypeToJsType(param.type_), param.name,
param.optional, param.description)
if function.callback:
append_field(c, 'param', self._FunctionToJsFunction(function.callback),
function.callback.name, function.callback.optional,
function.callback.description)
if function.returns:
append_field(c, 'return', self._TypeToJsType(function.returns),
'', False, function.returns.description)
if function.deprecated:
c.Append('@deprecated %s' % function.deprecated)
c.Append(self._GenerateSeeLink('method', function.name))
c.Eblock(' */')
return c
def _FunctionToJsFunction(self, function):
"""Converts a model.Function to a JS type (i.e., function([params])...)"""
c = Code()
c.Append('function(')
for i, param in enumerate(function.params):
c.Concat(self._TypeToJsType(param.type_), new_line=False)
if i is not len(function.params) - 1:
c.Append(', ', new_line=False, strip_right=False)
c.Append('):', new_line=False)
if function.returns:
c.Concat(self._TypeToJsType(function.returns), new_line=False)
else:
c.Append('void', new_line=False)
return c
def _TypeToJsType(self, js_type):
"""Converts a model.Type to a JS type (number, Array, etc.)"""
if js_type.property_type in (PropertyType.INTEGER, PropertyType.DOUBLE):
return Code().Append('number')
if js_type.property_type is PropertyType.OBJECT:
if js_type.properties:
return self._GenerateObjectDefinition(js_type.properties)
return Code().Append('Object')
if js_type.property_type is PropertyType.ARRAY:
return (Code().Append('!Array<').
Concat(self._TypeToJsType(js_type.item_type), new_line=False).
Append('>', new_line=False))
if js_type.property_type is PropertyType.REF:
ref_type = js_type.ref_type
# Enums are defined as chrome.fooAPI.MyEnum, but types are defined simply
# as MyType.
if self._namespace.types[ref_type].property_type is PropertyType.ENUM:
ref_type = '!chrome.%s.%s' % (self._namespace.name, ref_type)
return Code().Append(ref_type)
if js_type.property_type is PropertyType.CHOICES:
c = Code()
c.Append('(')
for i, choice in enumerate(js_type.choices):
c.Concat(self._TypeToJsType(choice), new_line=False)
if i is not len(js_type.choices) - 1:
c.Append('|', new_line=False)
c.Append(')', new_line=False)
return c
if js_type.property_type is PropertyType.FUNCTION:
return self._FunctionToJsFunction(js_type.function)
if js_type.property_type is PropertyType.ANY:
return Code().Append('*')
if js_type.property_type.is_fundamental:
return Code().Append(js_type.property_type.name)
return Code().Append('?') # TODO(tbreisacher): Make this more specific.
def _GenerateFunction(self, function):
"""Generates the code representing a function, including its documentation.
For example:
/**
* @param {string} title The new title.
*/
chrome.window.setTitle = function(title) {};
"""
c = Code()
params = self._GenerateFunctionParams(function)
(c.Concat(self._GenerateFunctionJsDoc(function))
.Append('chrome.%s.%s = function(%s) {};' % (self._namespace.name,
function.name,
params))
)
return c
def _GenerateEvent(self, event):
"""Generates the code representing an event.
For example:
/** @type {!ChromeEvent} */
chrome.bookmarks.onChildrenReordered;
"""
c = Code()
c.Sblock(line='/**', line_prefix=' * ')
if (event.description):
c.Comment(event.description, comment_prefix='')
c.Append('@type {!ChromeEvent}')
c.Append(self._GenerateSeeLink('event', event.name))
c.Eblock(' */')
c.Append('chrome.%s.%s;' % (self._namespace.name, event.name))
return c
def _GenerateNamespaceObject(self):
"""Generates the code creating namespace object.
For example:
/**
* @const
*/
chrome.bookmarks = {};
"""
c = Code()
(c.Append("""/**
* @const
*/""")
.Append('chrome.%s = {};' % self._namespace.name))
return c
def _GenerateFunctionParams(self, function):
params = function.params[:]
if function.callback:
params.append(function.callback)
return ', '.join(param.name for param in params)
def _GenerateSeeLink(self, object_type, object_name):
"""Generates a @see link for a given API 'object' (type, method, or event).
"""
# NOTE(devlin): This is kind of a hack. Some APIs will be hosted on
# developer.chrome.com/apps/ instead of /extensions/, and some APIs have
# '.'s in them (like app.window), which should resolve to 'app_window'.
# Luckily, the doc server has excellent url resolution, and knows exactly
# what we mean. This saves us from needing any complicated logic here.
return ('@see https://developer.chrome.com/extensions/%s#%s-%s' %
(self._namespace.name, object_type, object_name))
| bsd-3-clause | -8,642,730,527,378,819,000 | 32.800604 | 80 | 0.622631 | false |
qk4l/Flexget | flexget/tests/test_input_sites.py | 5 | 1359 | from __future__ import unicode_literals, division, absolute_import
from builtins import * # noqa pylint: disable=unused-import, redefined-builtin
import pytest
@pytest.mark.online
class TestInputSites(object):
config = ("""
templates:
global:
headers:
User-Agent: "Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 """ +
"""(KHTML, like Gecko) Chrome/35.0.1916.114 Safari/537.36"
tasks:
test_sceper:
sceper: http://sceper.ws/category/movies/movies-dvd-rip
test_apple_trailers:
apple_trailers:
quality: 480p
genres: ['Action and Adventure']
test_apple_trailers_simple:
apple_trailers: 720p
""")
@pytest.mark.skip(reason='Missing a usable urlrewriter for uploadgig?')
def test_sceper(self, execute_task):
task = execute_task('test_sceper')
assert task.entries, 'no entries created / site may be down'
def test_apple_trailers(self, execute_task, use_vcr):
task = execute_task('test_apple_trailers')
assert task.entries, 'no entries created / site may be down'
def test_apple_trailers_simple(self, execute_task):
task = execute_task('test_apple_trailers_simple')
assert task.entries, 'no entries created / site may be down'
| mit | -6,331,254,158,386,032,000 | 34.763158 | 87 | 0.626932 | false |
ChanChiChoi/scikit-learn | sklearn/cluster/tests/test_birch.py | 342 | 5603 | """
Tests for the birch clustering algorithm.
"""
from scipy import sparse
import numpy as np
from sklearn.cluster.tests.common import generate_clustered_data
from sklearn.cluster.birch import Birch
from sklearn.cluster.hierarchical import AgglomerativeClustering
from sklearn.datasets import make_blobs
from sklearn.linear_model import ElasticNet
from sklearn.metrics import pairwise_distances_argmin, v_measure_score
from sklearn.utils.testing import assert_greater_equal
from sklearn.utils.testing import assert_equal
from sklearn.utils.testing import assert_greater
from sklearn.utils.testing import assert_almost_equal
from sklearn.utils.testing import assert_array_equal
from sklearn.utils.testing import assert_raises
from sklearn.utils.testing import assert_warns
def test_n_samples_leaves_roots():
# Sanity check for the number of samples in leaves and roots
X, y = make_blobs(n_samples=10)
brc = Birch()
brc.fit(X)
n_samples_root = sum([sc.n_samples_ for sc in brc.root_.subclusters_])
n_samples_leaves = sum([sc.n_samples_ for leaf in brc._get_leaves()
for sc in leaf.subclusters_])
assert_equal(n_samples_leaves, X.shape[0])
assert_equal(n_samples_root, X.shape[0])
def test_partial_fit():
# Test that fit is equivalent to calling partial_fit multiple times
X, y = make_blobs(n_samples=100)
brc = Birch(n_clusters=3)
brc.fit(X)
brc_partial = Birch(n_clusters=None)
brc_partial.partial_fit(X[:50])
brc_partial.partial_fit(X[50:])
assert_array_equal(brc_partial.subcluster_centers_,
brc.subcluster_centers_)
# Test that same global labels are obtained after calling partial_fit
# with None
brc_partial.set_params(n_clusters=3)
brc_partial.partial_fit(None)
assert_array_equal(brc_partial.subcluster_labels_, brc.subcluster_labels_)
def test_birch_predict():
# Test the predict method predicts the nearest centroid.
rng = np.random.RandomState(0)
X = generate_clustered_data(n_clusters=3, n_features=3,
n_samples_per_cluster=10)
# n_samples * n_samples_per_cluster
shuffle_indices = np.arange(30)
rng.shuffle(shuffle_indices)
X_shuffle = X[shuffle_indices, :]
brc = Birch(n_clusters=4, threshold=1.)
brc.fit(X_shuffle)
centroids = brc.subcluster_centers_
assert_array_equal(brc.labels_, brc.predict(X_shuffle))
nearest_centroid = pairwise_distances_argmin(X_shuffle, centroids)
assert_almost_equal(v_measure_score(nearest_centroid, brc.labels_), 1.0)
def test_n_clusters():
# Test that n_clusters param works properly
X, y = make_blobs(n_samples=100, centers=10)
brc1 = Birch(n_clusters=10)
brc1.fit(X)
assert_greater(len(brc1.subcluster_centers_), 10)
assert_equal(len(np.unique(brc1.labels_)), 10)
# Test that n_clusters = Agglomerative Clustering gives
# the same results.
gc = AgglomerativeClustering(n_clusters=10)
brc2 = Birch(n_clusters=gc)
brc2.fit(X)
assert_array_equal(brc1.subcluster_labels_, brc2.subcluster_labels_)
assert_array_equal(brc1.labels_, brc2.labels_)
# Test that the wrong global clustering step raises an Error.
clf = ElasticNet()
brc3 = Birch(n_clusters=clf)
assert_raises(ValueError, brc3.fit, X)
# Test that a small number of clusters raises a warning.
brc4 = Birch(threshold=10000.)
assert_warns(UserWarning, brc4.fit, X)
def test_sparse_X():
# Test that sparse and dense data give same results
X, y = make_blobs(n_samples=100, centers=10)
brc = Birch(n_clusters=10)
brc.fit(X)
csr = sparse.csr_matrix(X)
brc_sparse = Birch(n_clusters=10)
brc_sparse.fit(csr)
assert_array_equal(brc.labels_, brc_sparse.labels_)
assert_array_equal(brc.subcluster_centers_,
brc_sparse.subcluster_centers_)
def check_branching_factor(node, branching_factor):
subclusters = node.subclusters_
assert_greater_equal(branching_factor, len(subclusters))
for cluster in subclusters:
if cluster.child_:
check_branching_factor(cluster.child_, branching_factor)
def test_branching_factor():
# Test that nodes have at max branching_factor number of subclusters
X, y = make_blobs()
branching_factor = 9
# Purposefully set a low threshold to maximize the subclusters.
brc = Birch(n_clusters=None, branching_factor=branching_factor,
threshold=0.01)
brc.fit(X)
check_branching_factor(brc.root_, branching_factor)
brc = Birch(n_clusters=3, branching_factor=branching_factor,
threshold=0.01)
brc.fit(X)
check_branching_factor(brc.root_, branching_factor)
# Raises error when branching_factor is set to one.
brc = Birch(n_clusters=None, branching_factor=1, threshold=0.01)
assert_raises(ValueError, brc.fit, X)
def check_threshold(birch_instance, threshold):
"""Use the leaf linked list for traversal"""
current_leaf = birch_instance.dummy_leaf_.next_leaf_
while current_leaf:
subclusters = current_leaf.subclusters_
for sc in subclusters:
assert_greater_equal(threshold, sc.radius)
current_leaf = current_leaf.next_leaf_
def test_threshold():
# Test that the leaf subclusters have a threshold lesser than radius
X, y = make_blobs(n_samples=80, centers=4)
brc = Birch(threshold=0.5, n_clusters=None)
brc.fit(X)
check_threshold(brc, 0.5)
brc = Birch(threshold=5.0, n_clusters=None)
brc.fit(X)
check_threshold(brc, 5.)
| bsd-3-clause | 8,295,389,328,305,418,000 | 34.01875 | 78 | 0.693914 | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.