repo_name
stringlengths 5
100
| path
stringlengths 4
375
| copies
stringclasses 991
values | size
stringlengths 4
7
| content
stringlengths 666
1M
| license
stringclasses 15
values |
---|---|---|---|---|---|
gchp/django | django/contrib/staticfiles/management/commands/findstatic.py | 463 | 1745 | from __future__ import unicode_literals
import os
from django.contrib.staticfiles import finders
from django.core.management.base import LabelCommand
from django.utils.encoding import force_text
class Command(LabelCommand):
help = "Finds the absolute paths for the given static file(s)."
label = 'static file'
def add_arguments(self, parser):
super(Command, self).add_arguments(parser)
parser.add_argument('--first', action='store_false', dest='all',
default=True,
help="Only return the first match for each static file.")
def handle_label(self, path, **options):
verbosity = options['verbosity']
result = finders.find(path, all=options['all'])
path = force_text(path)
if verbosity >= 2:
searched_locations = ("Looking in the following locations:\n %s" %
"\n ".join(force_text(location)
for location in finders.searched_locations))
else:
searched_locations = ''
if result:
if not isinstance(result, (list, tuple)):
result = [result]
result = (force_text(os.path.realpath(path)) for path in result)
if verbosity >= 1:
file_list = '\n '.join(result)
return ("Found '%s' here:\n %s\n%s" %
(path, file_list, searched_locations))
else:
return '\n'.join(result)
else:
message = ["No matching file found for '%s'." % path]
if verbosity >= 2:
message.append(searched_locations)
if verbosity >= 1:
self.stderr.write('\n'.join(message))
| bsd-3-clause |
PhilHarnish/forge | bin/process_clues.py | 1 | 1840 | """Process http://www.otsys.com/clue/ DB for use with python."""
import collections
import os
import sqlite3
import sys
# Add parent directory to path.
sys.path.append(os.path.join(
os.path.dirname(os.path.dirname(os.path.realpath(__file__))),
'src'))
from data import crossword
from data import data
from puzzle.puzzlepedia import prod_config
prod_config.init()
STOP_WORD = '~' # Appears after z.
MAX_KEYWORDS = 50
MIN_USAGES = 5
VISITED = set()
ALL_SEEN = collections.defaultdict(int)
def _prune_keywords(keywords):
top = sorted(
keywords.items(), key=lambda i: i[1], reverse=True
)[:MAX_KEYWORDS]
results = {}
for keyword, count in top:
ALL_SEEN[keyword] += count
results[keyword] = count
return results
conn = crossword.init('data/crossword.sqlite')
c = conn.cursor()
def _insert(solution, usages, keywords):
try:
crossword.add(c, solution, usages, _prune_keywords(keywords))
except (sqlite3.OperationalError, sqlite3.IntegrityError):
conn.commit()
conn.close()
raise
last_solution = None
keywords = collections.defaultdict(int)
usages = 0
for i, line in enumerate(
data.open_project_path('data/clues.txt', errors='ignore')):
solution, unused_int, unused_year, unused_source, clue = line.lower().split(
None, 4)
if solution > STOP_WORD:
print(line)
break
if solution in VISITED:
print('Skipping %s' % line)
continue
if last_solution and solution != last_solution:
if usages >= MIN_USAGES and keywords:
_insert(last_solution, usages, keywords)
VISITED.add(last_solution)
keywords.clear()
usages = 0
usages += 1
for keyword in crossword.clue_keywords(clue):
keywords[keyword] += 1
last_solution = solution
_insert(last_solution, usages, keywords)
conn.commit()
conn.close()
print(_prune_keywords(ALL_SEEN))
| mit |
40223108/w18 | static/Brython3.1.1-20150328-091302/Lib/unittest/test/_test_warnings.py | 858 | 2304 | # helper module for test_runner.Test_TextTestRunner.test_warnings
"""
This module has a number of tests that raise different kinds of warnings.
When the tests are run, the warnings are caught and their messages are printed
to stdout. This module also accepts an arg that is then passed to
unittest.main to affect the behavior of warnings.
Test_TextTestRunner.test_warnings executes this script with different
combinations of warnings args and -W flags and check that the output is correct.
See #10535.
"""
import sys
import unittest
import warnings
def warnfun():
warnings.warn('rw', RuntimeWarning)
class TestWarnings(unittest.TestCase):
# unittest warnings will be printed at most once per type (max one message
# for the fail* methods, and one for the assert* methods)
def test_assert(self):
self.assertEquals(2+2, 4)
self.assertEquals(2*2, 4)
self.assertEquals(2**2, 4)
def test_fail(self):
self.failUnless(1)
self.failUnless(True)
def test_other_unittest(self):
self.assertAlmostEqual(2+2, 4)
self.assertNotAlmostEqual(4+4, 2)
# these warnings are normally silenced, but they are printed in unittest
def test_deprecation(self):
warnings.warn('dw', DeprecationWarning)
warnings.warn('dw', DeprecationWarning)
warnings.warn('dw', DeprecationWarning)
def test_import(self):
warnings.warn('iw', ImportWarning)
warnings.warn('iw', ImportWarning)
warnings.warn('iw', ImportWarning)
# user warnings should always be printed
def test_warning(self):
warnings.warn('uw')
warnings.warn('uw')
warnings.warn('uw')
# these warnings come from the same place; they will be printed
# only once by default or three times if the 'always' filter is used
def test_function(self):
warnfun()
warnfun()
warnfun()
if __name__ == '__main__':
with warnings.catch_warnings(record=True) as ws:
# if an arg is provided pass it to unittest.main as 'warnings'
if len(sys.argv) == 2:
unittest.main(exit=False, warnings=sys.argv.pop())
else:
unittest.main(exit=False)
# print all the warning messages collected
for w in ws:
print(w.message)
| gpl-3.0 |
40223142/cda11 | static/Brython3.1.1-20150328-091302/Lib/multiprocessing/dummy/connection.py | 707 | 3049 | #
# Analogue of `multiprocessing.connection` which uses queues instead of sockets
#
# multiprocessing/dummy/connection.py
#
# Copyright (c) 2006-2008, R Oudkerk
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of author nor the names of any contributors may be
# used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
# OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
#
__all__ = [ 'Client', 'Listener', 'Pipe' ]
from queue import Queue
families = [None]
class Listener(object):
def __init__(self, address=None, family=None, backlog=1):
self._backlog_queue = Queue(backlog)
def accept(self):
return Connection(*self._backlog_queue.get())
def close(self):
self._backlog_queue = None
address = property(lambda self: self._backlog_queue)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, exc_tb):
self.close()
def Client(address):
_in, _out = Queue(), Queue()
address.put((_out, _in))
return Connection(_in, _out)
def Pipe(duplex=True):
a, b = Queue(), Queue()
return Connection(a, b), Connection(b, a)
class Connection(object):
def __init__(self, _in, _out):
self._out = _out
self._in = _in
self.send = self.send_bytes = _out.put
self.recv = self.recv_bytes = _in.get
def poll(self, timeout=0.0):
if self._in.qsize() > 0:
return True
if timeout <= 0.0:
return False
self._in.not_empty.acquire()
self._in.not_empty.wait(timeout)
self._in.not_empty.release()
return self._in.qsize() > 0
def close(self):
pass
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, exc_tb):
self.close()
| gpl-3.0 |
Sumith1896/sympy | sympy/utilities/runtests.py | 4 | 78928 | """
This is our testing framework.
Goals:
* it should be compatible with py.test and operate very similarly
(or identically)
* doesn't require any external dependencies
* preferably all the functionality should be in this file only
* no magic, just import the test file and execute the test functions, that's it
* portable
"""
from __future__ import print_function, division
import os
import sys
import platform
import inspect
import traceback
import pdb
import re
import linecache
from fnmatch import fnmatch
from timeit import default_timer as clock
import doctest as pdoctest # avoid clashing with our doctest() function
from doctest import DocTestFinder, DocTestRunner
import random
import subprocess
import signal
import stat
from inspect import isgeneratorfunction
from sympy.core.cache import clear_cache
from sympy.core.compatibility import exec_, PY3, string_types, range
from sympy.utilities.misc import find_executable
from sympy.external import import_module
from sympy.utilities.exceptions import SymPyDeprecationWarning
IS_WINDOWS = (os.name == 'nt')
class Skipped(Exception):
pass
import __future__
# add more flags ??
future_flags = __future__.division.compiler_flag
def _indent(s, indent=4):
"""
Add the given number of space characters to the beginning of
every non-blank line in ``s``, and return the result.
If the string ``s`` is Unicode, it is encoded using the stdout
encoding and the ``backslashreplace`` error handler.
"""
# After a 2to3 run the below code is bogus, so wrap it with a version check
if not PY3:
if isinstance(s, unicode):
s = s.encode(pdoctest._encoding, 'backslashreplace')
# This regexp matches the start of non-blank lines:
return re.sub('(?m)^(?!$)', indent*' ', s)
pdoctest._indent = _indent
# ovverride reporter to maintain windows and python3
def _report_failure(self, out, test, example, got):
"""
Report that the given example failed.
"""
s = self._checker.output_difference(example, got, self.optionflags)
s = s.encode('raw_unicode_escape').decode('utf8', 'ignore')
out(self._failure_header(test, example) + s)
if PY3 and IS_WINDOWS:
DocTestRunner.report_failure = _report_failure
def convert_to_native_paths(lst):
"""
Converts a list of '/' separated paths into a list of
native (os.sep separated) paths and converts to lowercase
if the system is case insensitive.
"""
newlst = []
for i, rv in enumerate(lst):
rv = os.path.join(*rv.split("/"))
# on windows the slash after the colon is dropped
if sys.platform == "win32":
pos = rv.find(':')
if pos != -1:
if rv[pos + 1] != '\\':
rv = rv[:pos + 1] + '\\' + rv[pos + 1:]
newlst.append(sys_normcase(rv))
return newlst
def get_sympy_dir():
"""
Returns the root sympy directory and set the global value
indicating whether the system is case sensitive or not.
"""
global sys_case_insensitive
this_file = os.path.abspath(__file__)
sympy_dir = os.path.join(os.path.dirname(this_file), "..", "..")
sympy_dir = os.path.normpath(sympy_dir)
sys_case_insensitive = (os.path.isdir(sympy_dir) and
os.path.isdir(sympy_dir.lower()) and
os.path.isdir(sympy_dir.upper()))
return sys_normcase(sympy_dir)
def sys_normcase(f):
if sys_case_insensitive: # global defined after call to get_sympy_dir()
return f.lower()
return f
def setup_pprint():
from sympy import pprint_use_unicode, init_printing
# force pprint to be in ascii mode in doctests
pprint_use_unicode(False)
# hook our nice, hash-stable strprinter
init_printing(pretty_print=False)
def run_in_subprocess_with_hash_randomization(function, function_args=(),
function_kwargs={}, command=sys.executable,
module='sympy.utilities.runtests', force=False):
"""
Run a function in a Python subprocess with hash randomization enabled.
If hash randomization is not supported by the version of Python given, it
returns False. Otherwise, it returns the exit value of the command. The
function is passed to sys.exit(), so the return value of the function will
be the return value.
The environment variable PYTHONHASHSEED is used to seed Python's hash
randomization. If it is set, this function will return False, because
starting a new subprocess is unnecessary in that case. If it is not set,
one is set at random, and the tests are run. Note that if this
environment variable is set when Python starts, hash randomization is
automatically enabled. To force a subprocess to be created even if
PYTHONHASHSEED is set, pass ``force=True``. This flag will not force a
subprocess in Python versions that do not support hash randomization (see
below), because those versions of Python do not support the ``-R`` flag.
``function`` should be a string name of a function that is importable from
the module ``module``, like "_test". The default for ``module`` is
"sympy.utilities.runtests". ``function_args`` and ``function_kwargs``
should be a repr-able tuple and dict, respectively. The default Python
command is sys.executable, which is the currently running Python command.
This function is necessary because the seed for hash randomization must be
set by the environment variable before Python starts. Hence, in order to
use a predetermined seed for tests, we must start Python in a separate
subprocess.
Hash randomization was added in the minor Python versions 2.6.8, 2.7.3,
3.1.5, and 3.2.3, and is enabled by default in all Python versions after
and including 3.3.0.
Examples
========
>>> from sympy.utilities.runtests import (
... run_in_subprocess_with_hash_randomization)
>>> # run the core tests in verbose mode
>>> run_in_subprocess_with_hash_randomization("_test",
... function_args=("core",),
... function_kwargs={'verbose': True}) # doctest: +SKIP
# Will return 0 if sys.executable supports hash randomization and tests
# pass, 1 if they fail, and False if it does not support hash
# randomization.
"""
# Note, we must return False everywhere, not None, as subprocess.call will
# sometimes return None.
# First check if the Python version supports hash randomization
# If it doesn't have this support, it won't reconize the -R flag
p = subprocess.Popen([command, "-RV"], stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
p.communicate()
if p.returncode != 0:
return False
hash_seed = os.getenv("PYTHONHASHSEED")
if not hash_seed:
os.environ["PYTHONHASHSEED"] = str(random.randrange(2**32))
else:
if not force:
return False
# Now run the command
commandstring = ("import sys; from %s import %s;sys.exit(%s(*%s, **%s))" %
(module, function, function, repr(function_args),
repr(function_kwargs)))
try:
p = subprocess.Popen([command, "-R", "-c", commandstring])
p.communicate()
except KeyboardInterrupt:
p.wait()
finally:
# Put the environment variable back, so that it reads correctly for
# the current Python process.
if hash_seed is None:
del os.environ["PYTHONHASHSEED"]
else:
os.environ["PYTHONHASHSEED"] = hash_seed
return p.returncode
def run_all_tests(test_args=(), test_kwargs={}, doctest_args=(),
doctest_kwargs={}, examples_args=(), examples_kwargs={'quiet': True}):
"""
Run all tests.
Right now, this runs the regular tests (bin/test), the doctests
(bin/doctest), the examples (examples/all.py), and the sage tests (see
sympy/external/tests/test_sage.py).
This is what ``setup.py test`` uses.
You can pass arguments and keyword arguments to the test functions that
support them (for now, test, doctest, and the examples). See the
docstrings of those functions for a description of the available options.
For example, to run the solvers tests with colors turned off:
>>> from sympy.utilities.runtests import run_all_tests
>>> run_all_tests(test_args=("solvers",),
... test_kwargs={"colors:False"}) # doctest: +SKIP
"""
tests_successful = True
try:
# Regular tests
if not test(*test_args, **test_kwargs):
# some regular test fails, so set the tests_successful
# flag to false and continue running the doctests
tests_successful = False
# Doctests
print()
if not doctest(*doctest_args, **doctest_kwargs):
tests_successful = False
# Examples
print()
sys.path.append("examples")
from all import run_examples # examples/all.py
if not run_examples(*examples_args, **examples_kwargs):
tests_successful = False
# Sage tests
if not (sys.platform == "win32" or PY3):
# run Sage tests; Sage currently doesn't support Windows or Python 3
dev_null = open(os.devnull, 'w')
if subprocess.call("sage -v", shell=True, stdout=dev_null,
stderr=dev_null) == 0:
if subprocess.call("sage -python bin/test "
"sympy/external/tests/test_sage.py", shell=True) != 0:
tests_successful = False
if tests_successful:
return
else:
# Return nonzero exit code
sys.exit(1)
except KeyboardInterrupt:
print()
print("DO *NOT* COMMIT!")
sys.exit(1)
def test(*paths, **kwargs):
"""
Run tests in the specified test_*.py files.
Tests in a particular test_*.py file are run if any of the given strings
in ``paths`` matches a part of the test file's path. If ``paths=[]``,
tests in all test_*.py files are run.
Notes:
- If sort=False, tests are run in random order (not default).
- Paths can be entered in native system format or in unix,
forward-slash format.
- Files that are on the blacklist can be tested by providing
their path; they are only excluded if no paths are given.
**Explanation of test results**
====== ===============================================================
Output Meaning
====== ===============================================================
. passed
F failed
X XPassed (expected to fail but passed)
f XFAILed (expected to fail and indeed failed)
s skipped
w slow
T timeout (e.g., when ``--timeout`` is used)
K KeyboardInterrupt (when running the slow tests with ``--slow``,
you can interrupt one of them without killing the test runner)
====== ===============================================================
Colors have no additional meaning and are used just to facilitate
interpreting the output.
Examples
========
>>> import sympy
Run all tests:
>>> sympy.test() # doctest: +SKIP
Run one file:
>>> sympy.test("sympy/core/tests/test_basic.py") # doctest: +SKIP
>>> sympy.test("_basic") # doctest: +SKIP
Run all tests in sympy/functions/ and some particular file:
>>> sympy.test("sympy/core/tests/test_basic.py",
... "sympy/functions") # doctest: +SKIP
Run all tests in sympy/core and sympy/utilities:
>>> sympy.test("/core", "/util") # doctest: +SKIP
Run specific test from a file:
>>> sympy.test("sympy/core/tests/test_basic.py",
... kw="test_equality") # doctest: +SKIP
Run specific test from any file:
>>> sympy.test(kw="subs") # doctest: +SKIP
Run the tests with verbose mode on:
>>> sympy.test(verbose=True) # doctest: +SKIP
Don't sort the test output:
>>> sympy.test(sort=False) # doctest: +SKIP
Turn on post-mortem pdb:
>>> sympy.test(pdb=True) # doctest: +SKIP
Turn off colors:
>>> sympy.test(colors=False) # doctest: +SKIP
Force colors, even when the output is not to a terminal (this is useful,
e.g., if you are piping to ``less -r`` and you still want colors)
>>> sympy.test(force_colors=False) # doctest: +SKIP
The traceback verboseness can be set to "short" or "no" (default is
"short")
>>> sympy.test(tb='no') # doctest: +SKIP
The ``split`` option can be passed to split the test run into parts. The
split currently only splits the test files, though this may change in the
future. ``split`` should be a string of the form 'a/b', which will run
part ``a`` of ``b``. For instance, to run the first half of the test suite:
>>> sympy.test(split='1/2') # doctest: +SKIP
You can disable running the tests in a separate subprocess using
``subprocess=False``. This is done to support seeding hash randomization,
which is enabled by default in the Python versions where it is supported.
If subprocess=False, hash randomization is enabled/disabled according to
whether it has been enabled or not in the calling Python process.
However, even if it is enabled, the seed cannot be printed unless it is
called from a new Python process.
Hash randomization was added in the minor Python versions 2.6.8, 2.7.3,
3.1.5, and 3.2.3, and is enabled by default in all Python versions after
and including 3.3.0.
If hash randomization is not supported ``subprocess=False`` is used
automatically.
>>> sympy.test(subprocess=False) # doctest: +SKIP
To set the hash randomization seed, set the environment variable
``PYTHONHASHSEED`` before running the tests. This can be done from within
Python using
>>> import os
>>> os.environ['PYTHONHASHSEED'] = '42' # doctest: +SKIP
Or from the command line using
$ PYTHONHASHSEED=42 ./bin/test
If the seed is not set, a random seed will be chosen.
Note that to reproduce the same hash values, you must use both the same seed
as well as the same architecture (32-bit vs. 64-bit).
"""
subprocess = kwargs.pop("subprocess", True)
rerun = kwargs.pop("rerun", 0)
# count up from 0, do not print 0
print_counter = lambda i : (print("rerun %d" % (rerun-i))
if rerun-i else None)
if subprocess:
# loop backwards so last i is 0
for i in range(rerun, -1, -1):
print_counter(i)
ret = run_in_subprocess_with_hash_randomization("_test",
function_args=paths, function_kwargs=kwargs)
if ret is False:
break
val = not bool(ret)
# exit on the first failure or if done
if not val or i == 0:
return val
# rerun even if hash randomization is not supported
for i in range(rerun, -1, -1):
print_counter(i)
val = not bool(_test(*paths, **kwargs))
if not val or i == 0:
return val
def _test(*paths, **kwargs):
"""
Internal function that actually runs the tests.
All keyword arguments from ``test()`` are passed to this function except for
``subprocess``.
Returns 0 if tests passed and 1 if they failed. See the docstring of
``test()`` for more information.
"""
verbose = kwargs.get("verbose", False)
tb = kwargs.get("tb", "short")
kw = kwargs.get("kw", None) or ()
# ensure that kw is a tuple
if isinstance(kw, str):
kw = (kw, )
post_mortem = kwargs.get("pdb", False)
colors = kwargs.get("colors", True)
force_colors = kwargs.get("force_colors", False)
sort = kwargs.get("sort", True)
seed = kwargs.get("seed", None)
if seed is None:
seed = random.randrange(100000000)
timeout = kwargs.get("timeout", False)
slow = kwargs.get("slow", False)
enhance_asserts = kwargs.get("enhance_asserts", False)
split = kwargs.get('split', None)
blacklist = kwargs.get('blacklist', [])
blacklist = convert_to_native_paths(blacklist)
r = PyTestReporter(verbose=verbose, tb=tb, colors=colors,
force_colors=force_colors, split=split)
t = SymPyTests(r, kw, post_mortem, seed)
# Disable warnings for external modules
import sympy.external
sympy.external.importtools.WARN_OLD_VERSION = False
sympy.external.importtools.WARN_NOT_INSTALLED = False
# Show deprecation warnings
import warnings
warnings.simplefilter("error", SymPyDeprecationWarning)
test_files = t.get_test_files('sympy')
not_blacklisted = [f for f in test_files
if not any(b in f for b in blacklist)]
if len(paths) == 0:
matched = not_blacklisted
else:
paths = convert_to_native_paths(paths)
matched = []
for f in not_blacklisted:
basename = os.path.basename(f)
for p in paths:
if p in f or fnmatch(basename, p):
matched.append(f)
break
if split:
matched = split_list(matched, split)
t._testfiles.extend(matched)
return int(not t.test(sort=sort, timeout=timeout,
slow=slow, enhance_asserts=enhance_asserts))
def doctest(*paths, **kwargs):
"""
Runs doctests in all \*.py files in the sympy directory which match
any of the given strings in ``paths`` or all tests if paths=[].
Notes:
- Paths can be entered in native system format or in unix,
forward-slash format.
- Files that are on the blacklist can be tested by providing
their path; they are only excluded if no paths are given.
Examples
========
>>> import sympy
Run all tests:
>>> sympy.doctest() # doctest: +SKIP
Run one file:
>>> sympy.doctest("sympy/core/basic.py") # doctest: +SKIP
>>> sympy.doctest("polynomial.rst") # doctest: +SKIP
Run all tests in sympy/functions/ and some particular file:
>>> sympy.doctest("/functions", "basic.py") # doctest: +SKIP
Run any file having polynomial in its name, doc/src/modules/polynomial.rst,
sympy/functions/special/polynomials.py, and sympy/polys/polynomial.py:
>>> sympy.doctest("polynomial") # doctest: +SKIP
The ``split`` option can be passed to split the test run into parts. The
split currently only splits the test files, though this may change in the
future. ``split`` should be a string of the form 'a/b', which will run
part ``a`` of ``b``. Note that the regular doctests and the Sphinx
doctests are split independently. For instance, to run the first half of
the test suite:
>>> sympy.doctest(split='1/2') # doctest: +SKIP
The ``subprocess`` and ``verbose`` options are the same as with the function
``test()``. See the docstring of that function for more information.
"""
subprocess = kwargs.pop("subprocess", True)
rerun = kwargs.pop("rerun", 0)
# count up from 0, do not print 0
print_counter = lambda i : (print("rerun %d" % (rerun-i))
if rerun-i else None)
if subprocess:
# loop backwards so last i is 0
for i in range(rerun, -1, -1):
print_counter(i)
ret = run_in_subprocess_with_hash_randomization("_doctest",
function_args=paths, function_kwargs=kwargs)
if ret is False:
break
val = not bool(ret)
# exit on the first failure or if done
if not val or i == 0:
return val
# rerun even if hash randomization is not supported
for i in range(rerun, -1, -1):
print_counter(i)
val = not bool(_doctest(*paths, **kwargs))
if not val or i == 0:
return val
def _doctest(*paths, **kwargs):
"""
Internal function that actually runs the doctests.
All keyword arguments from ``doctest()`` are passed to this function
except for ``subprocess``.
Returns 0 if tests passed and 1 if they failed. See the docstrings of
``doctest()`` and ``test()`` for more information.
"""
normal = kwargs.get("normal", False)
verbose = kwargs.get("verbose", False)
blacklist = kwargs.get("blacklist", [])
split = kwargs.get('split', None)
blacklist.extend([
"doc/src/modules/plotting.rst", # generates live plots
"sympy/utilities/compilef.py", # needs tcc
"sympy/physics/gaussopt.py", # raises deprecation warning
])
if import_module('numpy') is None:
blacklist.extend([
"sympy/plotting/experimental_lambdify.py",
"sympy/plotting/plot_implicit.py",
"examples/advanced/autowrap_integrators.py",
"examples/advanced/autowrap_ufuncify.py",
"examples/intermediate/sample.py",
"examples/intermediate/mplot2d.py",
"examples/intermediate/mplot3d.py",
"doc/src/modules/numeric-computation.rst"
])
else:
if import_module('matplotlib') is None:
blacklist.extend([
"examples/intermediate/mplot2d.py",
"examples/intermediate/mplot3d.py"
])
else:
# don't display matplotlib windows
from sympy.plotting.plot import unset_show
unset_show()
if import_module('pyglet') is None:
blacklist.extend(["sympy/plotting/pygletplot"])
if import_module('theano') is None:
blacklist.extend(["doc/src/modules/numeric-computation.rst"])
# disabled because of doctest failures in asmeurer's bot
blacklist.extend([
"sympy/utilities/autowrap.py",
"examples/advanced/autowrap_integrators.py",
"examples/advanced/autowrap_ufuncify.py"
])
# blacklist these modules until issue 4840 is resolved
blacklist.extend([
"sympy/conftest.py",
"sympy/utilities/benchmarking.py"
])
blacklist = convert_to_native_paths(blacklist)
# Disable warnings for external modules
import sympy.external
sympy.external.importtools.WARN_OLD_VERSION = False
sympy.external.importtools.WARN_NOT_INSTALLED = False
# Show deprecation warnings
import warnings
warnings.simplefilter("error", SymPyDeprecationWarning)
r = PyTestReporter(verbose, split=split)
t = SymPyDocTests(r, normal)
test_files = t.get_test_files('sympy')
test_files.extend(t.get_test_files('examples', init_only=False))
not_blacklisted = [f for f in test_files
if not any(b in f for b in blacklist)]
if len(paths) == 0:
matched = not_blacklisted
else:
# take only what was requested...but not blacklisted items
# and allow for partial match anywhere or fnmatch of name
paths = convert_to_native_paths(paths)
matched = []
for f in not_blacklisted:
basename = os.path.basename(f)
for p in paths:
if p in f or fnmatch(basename, p):
matched.append(f)
break
if split:
matched = split_list(matched, split)
t._testfiles.extend(matched)
# run the tests and record the result for this *py portion of the tests
if t._testfiles:
failed = not t.test()
else:
failed = False
# N.B.
# --------------------------------------------------------------------
# Here we test *.rst files at or below doc/src. Code from these must
# be self supporting in terms of imports since there is no importing
# of necessary modules by doctest.testfile. If you try to pass *.py
# files through this they might fail because they will lack the needed
# imports and smarter parsing that can be done with source code.
#
test_files = t.get_test_files('doc/src', '*.rst', init_only=False)
test_files.sort()
not_blacklisted = [f for f in test_files
if not any(b in f for b in blacklist)]
if len(paths) == 0:
matched = not_blacklisted
else:
# Take only what was requested as long as it's not on the blacklist.
# Paths were already made native in *py tests so don't repeat here.
# There's no chance of having a *py file slip through since we
# only have *rst files in test_files.
matched = []
for f in not_blacklisted:
basename = os.path.basename(f)
for p in paths:
if p in f or fnmatch(basename, p):
matched.append(f)
break
if split:
matched = split_list(matched, split)
setup_pprint()
first_report = True
for rst_file in matched:
if not os.path.isfile(rst_file):
continue
old_displayhook = sys.displayhook
try:
out = sympytestfile(
rst_file, module_relative=False, encoding='utf-8',
optionflags=pdoctest.ELLIPSIS | pdoctest.NORMALIZE_WHITESPACE |
pdoctest.IGNORE_EXCEPTION_DETAIL)
finally:
# make sure we return to the original displayhook in case some
# doctest has changed that
sys.displayhook = old_displayhook
rstfailed, tested = out
if tested:
failed = rstfailed or failed
if first_report:
first_report = False
msg = 'rst doctests start'
if not t._testfiles:
r.start(msg=msg)
else:
r.write_center(msg)
print()
# use as the id, everything past the first 'sympy'
file_id = rst_file[rst_file.find('sympy') + len('sympy') + 1:]
print(file_id, end=" ")
# get at least the name out so it is know who is being tested
wid = r.terminal_width - len(file_id) - 1 # update width
test_file = '[%s]' % (tested)
report = '[%s]' % (rstfailed or 'OK')
print(''.join(
[test_file, ' '*(wid - len(test_file) - len(report)), report])
)
# the doctests for *py will have printed this message already if there was
# a failure, so now only print it if there was intervening reporting by
# testing the *rst as evidenced by first_report no longer being True.
if not first_report and failed:
print()
print("DO *NOT* COMMIT!")
return int(failed)
sp = re.compile(r'([0-9]+)/([1-9][0-9]*)')
def split_list(l, split):
"""
Splits a list into part a of b
split should be a string of the form 'a/b'. For instance, '1/3' would give
the split one of three.
If the length of the list is not divisible by the number of splits, the
last split will have more items.
>>> from sympy.utilities.runtests import split_list
>>> a = list(range(10))
>>> split_list(a, '1/3')
[0, 1, 2]
>>> split_list(a, '2/3')
[3, 4, 5]
>>> split_list(a, '3/3')
[6, 7, 8, 9]
"""
m = sp.match(split)
if not m:
raise ValueError("split must be a string of the form a/b where a and b are ints")
i, t = map(int, m.groups())
return l[(i - 1)*len(l)//t:i*len(l)//t]
from collections import namedtuple
SymPyTestResults = namedtuple('TestResults', 'failed attempted')
def sympytestfile(filename, module_relative=True, name=None, package=None,
globs=None, verbose=None, report=True, optionflags=0,
extraglobs=None, raise_on_error=False,
parser=pdoctest.DocTestParser(), encoding=None):
"""
Test examples in the given file. Return (#failures, #tests).
Optional keyword arg ``module_relative`` specifies how filenames
should be interpreted:
- If ``module_relative`` is True (the default), then ``filename``
specifies a module-relative path. By default, this path is
relative to the calling module's directory; but if the
``package`` argument is specified, then it is relative to that
package. To ensure os-independence, ``filename`` should use
"/" characters to separate path segments, and should not
be an absolute path (i.e., it may not begin with "/").
- If ``module_relative`` is False, then ``filename`` specifies an
os-specific path. The path may be absolute or relative (to
the current working directory).
Optional keyword arg ``name`` gives the name of the test; by default
use the file's basename.
Optional keyword argument ``package`` is a Python package or the
name of a Python package whose directory should be used as the
base directory for a module relative filename. If no package is
specified, then the calling module's directory is used as the base
directory for module relative filenames. It is an error to
specify ``package`` if ``module_relative`` is False.
Optional keyword arg ``globs`` gives a dict to be used as the globals
when executing examples; by default, use {}. A copy of this dict
is actually used for each docstring, so that each docstring's
examples start with a clean slate.
Optional keyword arg ``extraglobs`` gives a dictionary that should be
merged into the globals that are used to execute examples. By
default, no extra globals are used.
Optional keyword arg ``verbose`` prints lots of stuff if true, prints
only failures if false; by default, it's true iff "-v" is in sys.argv.
Optional keyword arg ``report`` prints a summary at the end when true,
else prints nothing at the end. In verbose mode, the summary is
detailed, else very brief (in fact, empty if all tests passed).
Optional keyword arg ``optionflags`` or's together module constants,
and defaults to 0. Possible values (see the docs for details):
- DONT_ACCEPT_TRUE_FOR_1
- DONT_ACCEPT_BLANKLINE
- NORMALIZE_WHITESPACE
- ELLIPSIS
- SKIP
- IGNORE_EXCEPTION_DETAIL
- REPORT_UDIFF
- REPORT_CDIFF
- REPORT_NDIFF
- REPORT_ONLY_FIRST_FAILURE
Optional keyword arg ``raise_on_error`` raises an exception on the
first unexpected exception or failure. This allows failures to be
post-mortem debugged.
Optional keyword arg ``parser`` specifies a DocTestParser (or
subclass) that should be used to extract tests from the files.
Optional keyword arg ``encoding`` specifies an encoding that should
be used to convert the file to unicode.
Advanced tomfoolery: testmod runs methods of a local instance of
class doctest.Tester, then merges the results into (or creates)
global Tester instance doctest.master. Methods of doctest.master
can be called directly too, if you want to do something unusual.
Passing report=0 to testmod is especially useful then, to delay
displaying a summary. Invoke doctest.master.summarize(verbose)
when you're done fiddling.
"""
if package and not module_relative:
raise ValueError("Package may only be specified for module-"
"relative paths.")
# Relativize the path
if not PY3:
text, filename = pdoctest._load_testfile(
filename, package, module_relative)
if encoding is not None:
text = text.decode(encoding)
else:
text, filename = pdoctest._load_testfile(
filename, package, module_relative, encoding)
# If no name was given, then use the file's name.
if name is None:
name = os.path.basename(filename)
# Assemble the globals.
if globs is None:
globs = {}
else:
globs = globs.copy()
if extraglobs is not None:
globs.update(extraglobs)
if '__name__' not in globs:
globs['__name__'] = '__main__'
if raise_on_error:
runner = pdoctest.DebugRunner(verbose=verbose, optionflags=optionflags)
else:
runner = SymPyDocTestRunner(verbose=verbose, optionflags=optionflags)
runner._checker = SymPyOutputChecker()
# Read the file, convert it to a test, and run it.
test = parser.get_doctest(text, globs, name, filename, 0)
runner.run(test, compileflags=future_flags)
if report:
runner.summarize()
if pdoctest.master is None:
pdoctest.master = runner
else:
pdoctest.master.merge(runner)
return SymPyTestResults(runner.failures, runner.tries)
class SymPyTests(object):
def __init__(self, reporter, kw="", post_mortem=False,
seed=None):
self._post_mortem = post_mortem
self._kw = kw
self._count = 0
self._root_dir = sympy_dir
self._reporter = reporter
self._reporter.root_dir(self._root_dir)
self._testfiles = []
self._seed = seed if seed is not None else random.random()
def test(self, sort=False, timeout=False, slow=False, enhance_asserts=False):
"""
Runs the tests returning True if all tests pass, otherwise False.
If sort=False run tests in random order.
"""
if sort:
self._testfiles.sort()
else:
from random import shuffle
random.seed(self._seed)
shuffle(self._testfiles)
self._reporter.start(self._seed)
for f in self._testfiles:
try:
self.test_file(f, sort, timeout, slow, enhance_asserts)
except KeyboardInterrupt:
print(" interrupted by user")
self._reporter.finish()
raise
return self._reporter.finish()
def _enhance_asserts(self, source):
from ast import (NodeTransformer, Compare, Name, Store, Load, Tuple,
Assign, BinOp, Str, Mod, Assert, parse, fix_missing_locations)
ops = {"Eq": '==', "NotEq": '!=', "Lt": '<', "LtE": '<=',
"Gt": '>', "GtE": '>=', "Is": 'is', "IsNot": 'is not',
"In": 'in', "NotIn": 'not in'}
class Transform(NodeTransformer):
def visit_Assert(self, stmt):
if isinstance(stmt.test, Compare):
compare = stmt.test
values = [compare.left] + compare.comparators
names = [ "_%s" % i for i, _ in enumerate(values) ]
names_store = [ Name(n, Store()) for n in names ]
names_load = [ Name(n, Load()) for n in names ]
target = Tuple(names_store, Store())
value = Tuple(values, Load())
assign = Assign([target], value)
new_compare = Compare(names_load[0], compare.ops, names_load[1:])
msg_format = "\n%s " + "\n%s ".join([ ops[op.__class__.__name__] for op in compare.ops ]) + "\n%s"
msg = BinOp(Str(msg_format), Mod(), Tuple(names_load, Load()))
test = Assert(new_compare, msg, lineno=stmt.lineno, col_offset=stmt.col_offset)
return [assign, test]
else:
return stmt
tree = parse(source)
new_tree = Transform().visit(tree)
return fix_missing_locations(new_tree)
def test_file(self, filename, sort=True, timeout=False, slow=False, enhance_asserts=False):
funcs = []
try:
gl = {'__file__': filename}
try:
if PY3:
open_file = lambda: open(filename, encoding="utf8")
else:
open_file = lambda: open(filename)
with open_file() as f:
source = f.read()
if self._kw:
for l in source.splitlines():
if l.lstrip().startswith('def '):
if any(l.find(k) != -1 for k in self._kw):
break
else:
return
if enhance_asserts:
try:
source = self._enhance_asserts(source)
except ImportError:
pass
code = compile(source, filename, "exec")
exec_(code, gl)
except (SystemExit, KeyboardInterrupt):
raise
except ImportError:
self._reporter.import_error(filename, sys.exc_info())
return
clear_cache()
self._count += 1
random.seed(self._seed)
pytestfile = ""
if "XFAIL" in gl:
pytestfile = inspect.getsourcefile(gl["XFAIL"])
pytestfile2 = ""
if "slow" in gl:
pytestfile2 = inspect.getsourcefile(gl["slow"])
disabled = gl.get("disabled", False)
if not disabled:
# we need to filter only those functions that begin with 'test_'
# that are defined in the testing file or in the file where
# is defined the XFAIL decorator
funcs = [gl[f] for f in gl.keys() if f.startswith("test_") and
(inspect.isfunction(gl[f]) or inspect.ismethod(gl[f])) and
(inspect.getsourcefile(gl[f]) == filename or
inspect.getsourcefile(gl[f]) == pytestfile or
inspect.getsourcefile(gl[f]) == pytestfile2)]
if slow:
funcs = [f for f in funcs if getattr(f, '_slow', False)]
# Sorting of XFAILed functions isn't fixed yet :-(
funcs.sort(key=lambda x: inspect.getsourcelines(x)[1])
i = 0
while i < len(funcs):
if isgeneratorfunction(funcs[i]):
# some tests can be generators, that return the actual
# test functions. We unpack it below:
f = funcs.pop(i)
for fg in f():
func = fg[0]
args = fg[1:]
fgw = lambda: func(*args)
funcs.insert(i, fgw)
i += 1
else:
i += 1
# drop functions that are not selected with the keyword expression:
funcs = [x for x in funcs if self.matches(x)]
if not funcs:
return
except Exception:
self._reporter.entering_filename(filename, len(funcs))
raise
self._reporter.entering_filename(filename, len(funcs))
if not sort:
random.shuffle(funcs)
for f in funcs:
self._reporter.entering_test(f)
try:
if getattr(f, '_slow', False) and not slow:
raise Skipped("Slow")
if timeout:
self._timeout(f, timeout)
else:
random.seed(self._seed)
f()
except KeyboardInterrupt:
if getattr(f, '_slow', False):
self._reporter.test_skip("KeyboardInterrupt")
else:
raise
except Exception:
if timeout:
signal.alarm(0) # Disable the alarm. It could not be handled before.
t, v, tr = sys.exc_info()
if t is AssertionError:
self._reporter.test_fail((t, v, tr))
if self._post_mortem:
pdb.post_mortem(tr)
elif t.__name__ == "Skipped":
self._reporter.test_skip(v)
elif t.__name__ == "XFail":
self._reporter.test_xfail()
elif t.__name__ == "XPass":
self._reporter.test_xpass(v)
else:
self._reporter.test_exception((t, v, tr))
if self._post_mortem:
pdb.post_mortem(tr)
else:
self._reporter.test_pass()
self._reporter.leaving_filename()
def _timeout(self, function, timeout):
def callback(x, y):
signal.alarm(0)
raise Skipped("Timeout")
signal.signal(signal.SIGALRM, callback)
signal.alarm(timeout) # Set an alarm with a given timeout
function()
signal.alarm(0) # Disable the alarm
def matches(self, x):
"""
Does the keyword expression self._kw match "x"? Returns True/False.
Always returns True if self._kw is "".
"""
if not self._kw:
return True
for kw in self._kw:
if x.__name__.find(kw) != -1:
return True
return False
def get_test_files(self, dir, pat='test_*.py'):
"""
Returns the list of test_*.py (default) files at or below directory
``dir`` relative to the sympy home directory.
"""
dir = os.path.join(self._root_dir, convert_to_native_paths([dir])[0])
g = []
for path, folders, files in os.walk(dir):
g.extend([os.path.join(path, f) for f in files if fnmatch(f, pat)])
return sorted([sys_normcase(gi) for gi in g])
class SymPyDocTests(object):
def __init__(self, reporter, normal):
self._count = 0
self._root_dir = sympy_dir
self._reporter = reporter
self._reporter.root_dir(self._root_dir)
self._normal = normal
self._testfiles = []
def test(self):
"""
Runs the tests and returns True if all tests pass, otherwise False.
"""
self._reporter.start()
for f in self._testfiles:
try:
self.test_file(f)
except KeyboardInterrupt:
print(" interrupted by user")
self._reporter.finish()
raise
return self._reporter.finish()
def test_file(self, filename):
clear_cache()
from sympy.core.compatibility import StringIO
rel_name = filename[len(self._root_dir) + 1:]
dirname, file = os.path.split(filename)
module = rel_name.replace(os.sep, '.')[:-3]
if rel_name.startswith("examples"):
# Examples files do not have __init__.py files,
# So we have to temporarily extend sys.path to import them
sys.path.insert(0, dirname)
module = file[:-3] # remove ".py"
setup_pprint()
try:
module = pdoctest._normalize_module(module)
tests = SymPyDocTestFinder().find(module)
except (SystemExit, KeyboardInterrupt):
raise
except ImportError:
self._reporter.import_error(filename, sys.exc_info())
return
finally:
if rel_name.startswith("examples"):
del sys.path[0]
tests = [test for test in tests if len(test.examples) > 0]
# By default tests are sorted by alphabetical order by function name.
# We sort by line number so one can edit the file sequentially from
# bottom to top. However, if there are decorated functions, their line
# numbers will be too large and for now one must just search for these
# by text and function name.
tests.sort(key=lambda x: -x.lineno)
if not tests:
return
self._reporter.entering_filename(filename, len(tests))
for test in tests:
assert len(test.examples) != 0
# check if there are external dependencies which need to be met
if '_doctest_depends_on' in test.globs:
if not self._process_dependencies(test.globs['_doctest_depends_on']):
self._reporter.test_skip()
continue
runner = SymPyDocTestRunner(optionflags=pdoctest.ELLIPSIS |
pdoctest.NORMALIZE_WHITESPACE |
pdoctest.IGNORE_EXCEPTION_DETAIL)
runner._checker = SymPyOutputChecker()
old = sys.stdout
new = StringIO()
sys.stdout = new
# If the testing is normal, the doctests get importing magic to
# provide the global namespace. If not normal (the default) then
# then must run on their own; all imports must be explicit within
# a function's docstring. Once imported that import will be
# available to the rest of the tests in a given function's
# docstring (unless clear_globs=True below).
if not self._normal:
test.globs = {}
# if this is uncommented then all the test would get is what
# comes by default with a "from sympy import *"
#exec('from sympy import *') in test.globs
test.globs['print_function'] = print_function
try:
f, t = runner.run(test, compileflags=future_flags,
out=new.write, clear_globs=False)
except KeyboardInterrupt:
raise
finally:
sys.stdout = old
if f > 0:
self._reporter.doctest_fail(test.name, new.getvalue())
else:
self._reporter.test_pass()
self._reporter.leaving_filename()
def get_test_files(self, dir, pat='*.py', init_only=True):
"""
Returns the list of \*.py files (default) from which docstrings
will be tested which are at or below directory ``dir``. By default,
only those that have an __init__.py in their parent directory
and do not start with ``test_`` will be included.
"""
def importable(x):
"""
Checks if given pathname x is an importable module by checking for
__init__.py file.
Returns True/False.
Currently we only test if the __init__.py file exists in the
directory with the file "x" (in theory we should also test all the
parent dirs).
"""
init_py = os.path.join(os.path.dirname(x), "__init__.py")
return os.path.exists(init_py)
dir = os.path.join(self._root_dir, convert_to_native_paths([dir])[0])
g = []
for path, folders, files in os.walk(dir):
g.extend([os.path.join(path, f) for f in files
if not f.startswith('test_') and fnmatch(f, pat)])
if init_only:
# skip files that are not importable (i.e. missing __init__.py)
g = [x for x in g if importable(x)]
return [sys_normcase(gi) for gi in g]
def _process_dependencies(self, deps):
"""
Returns ``False`` if some dependencies are not met and the test should be
skipped otherwise returns ``True``.
"""
executables = deps.get('exe', None)
moduledeps = deps.get('modules', None)
viewers = deps.get('disable_viewers', None)
pyglet = deps.get('pyglet', None)
# print deps
if executables is not None:
for ex in executables:
found = find_executable(ex)
if found is None:
return False
if moduledeps is not None:
for extmod in moduledeps:
if extmod == 'matplotlib':
matplotlib = import_module(
'matplotlib',
__import__kwargs={'fromlist':
['pyplot', 'cm', 'collections']},
min_module_version='1.0.0', catch=(RuntimeError,))
if matplotlib is not None:
pass
else:
return False
else:
# TODO min version support
mod = import_module(extmod)
if mod is not None:
version = "unknown"
if hasattr(mod, '__version__'):
version = mod.__version__
else:
return False
if viewers is not None:
import tempfile
tempdir = tempfile.mkdtemp()
os.environ['PATH'] = '%s:%s' % (tempdir, os.environ['PATH'])
if PY3:
vw = '#!/usr/bin/env python3\n' \
'import sys\n' \
'if len(sys.argv) <= 1:\n' \
' exit("wrong number of args")\n'
else:
vw = '#!/usr/bin/env python\n' \
'import sys\n' \
'if len(sys.argv) <= 1:\n' \
' exit("wrong number of args")\n'
for viewer in viewers:
with open(os.path.join(tempdir, viewer), 'w') as fh:
fh.write(vw)
# make the file executable
os.chmod(os.path.join(tempdir, viewer),
stat.S_IREAD | stat.S_IWRITE | stat.S_IXUSR)
if pyglet:
# monkey-patch pyglet s.t. it does not open a window during
# doctesting
import pyglet
class DummyWindow(object):
def __init__(self, *args, **kwargs):
self.has_exit=True
self.width = 600
self.height = 400
def set_vsync(self, x):
pass
def switch_to(self):
pass
def push_handlers(self, x):
pass
def close(self):
pass
pyglet.window.Window = DummyWindow
return True
class SymPyDocTestFinder(DocTestFinder):
"""
A class used to extract the DocTests that are relevant to a given
object, from its docstring and the docstrings of its contained
objects. Doctests can currently be extracted from the following
object types: modules, functions, classes, methods, staticmethods,
classmethods, and properties.
Modified from doctest's version by looking harder for code in the
case that it looks like the the code comes from a different module.
In the case of decorated functions (e.g. @vectorize) they appear
to come from a different module (e.g. multidemensional) even though
their code is not there.
"""
def _find(self, tests, obj, name, module, source_lines, globs, seen):
"""
Find tests for the given object and any contained objects, and
add them to ``tests``.
"""
if self._verbose:
print('Finding tests in %s' % name)
# If we've already processed this object, then ignore it.
if id(obj) in seen:
return
seen[id(obj)] = 1
# Make sure we don't run doctests for classes outside of sympy, such
# as in numpy or scipy.
if inspect.isclass(obj):
if obj.__module__.split('.')[0] != 'sympy':
return
# Find a test for this object, and add it to the list of tests.
test = self._get_test(obj, name, module, globs, source_lines)
if test is not None:
tests.append(test)
if not self._recurse:
return
# Look for tests in a module's contained objects.
if inspect.ismodule(obj):
for rawname, val in obj.__dict__.items():
# Recurse to functions & classes.
if inspect.isfunction(val) or inspect.isclass(val):
# Make sure we don't run doctests functions or classes
# from different modules
if val.__module__ != module.__name__:
continue
assert self._from_module(module, val), \
"%s is not in module %s (rawname %s)" % (val, module, rawname)
try:
valname = '%s.%s' % (name, rawname)
self._find(tests, val, valname, module,
source_lines, globs, seen)
except KeyboardInterrupt:
raise
# Look for tests in a module's __test__ dictionary.
for valname, val in getattr(obj, '__test__', {}).items():
if not isinstance(valname, string_types):
raise ValueError("SymPyDocTestFinder.find: __test__ keys "
"must be strings: %r" %
(type(valname),))
if not (inspect.isfunction(val) or inspect.isclass(val) or
inspect.ismethod(val) or inspect.ismodule(val) or
isinstance(val, string_types)):
raise ValueError("SymPyDocTestFinder.find: __test__ values "
"must be strings, functions, methods, "
"classes, or modules: %r" %
(type(val),))
valname = '%s.__test__.%s' % (name, valname)
self._find(tests, val, valname, module, source_lines,
globs, seen)
# Look for tests in a class's contained objects.
if inspect.isclass(obj):
for valname, val in obj.__dict__.items():
# Special handling for staticmethod/classmethod.
if isinstance(val, staticmethod):
val = getattr(obj, valname)
if isinstance(val, classmethod):
val = getattr(obj, valname).__func__
# Recurse to methods, properties, and nested classes.
if (inspect.isfunction(val) or
inspect.isclass(val) or
isinstance(val, property)):
# Make sure we don't run doctests functions or classes
# from different modules
if isinstance(val, property):
if hasattr(val.fget, '__module__'):
if val.fget.__module__ != module.__name__:
continue
else:
if val.__module__ != module.__name__:
continue
assert self._from_module(module, val), \
"%s is not in module %s (valname %s)" % (
val, module, valname)
valname = '%s.%s' % (name, valname)
self._find(tests, val, valname, module, source_lines,
globs, seen)
def _get_test(self, obj, name, module, globs, source_lines):
"""
Return a DocTest for the given object, if it defines a docstring;
otherwise, return None.
"""
lineno = None
# Extract the object's docstring. If it doesn't have one,
# then return None (no test for this object).
if isinstance(obj, string_types):
# obj is a string in the case for objects in the polys package.
# Note that source_lines is a binary string (compiled polys
# modules), which can't be handled by _find_lineno so determine
# the line number here.
docstring = obj
matches = re.findall("line \d+", name)
assert len(matches) == 1, \
"string '%s' does not contain lineno " % name
# NOTE: this is not the exact linenumber but its better than no
# lineno ;)
lineno = int(matches[0][5:])
else:
try:
if obj.__doc__ is None:
docstring = ''
else:
docstring = obj.__doc__
if not isinstance(docstring, string_types):
docstring = str(docstring)
except (TypeError, AttributeError):
docstring = ''
# Don't bother if the docstring is empty.
if self._exclude_empty and not docstring:
return None
# check that properties have a docstring because _find_lineno
# assumes it
if isinstance(obj, property):
if obj.fget.__doc__ is None:
return None
# Find the docstring's location in the file.
if lineno is None:
# handling of properties is not implemented in _find_lineno so do
# it here
if hasattr(obj, 'func_closure') and obj.func_closure is not None:
tobj = obj.func_closure[0].cell_contents
elif isinstance(obj, property):
tobj = obj.fget
else:
tobj = obj
lineno = self._find_lineno(tobj, source_lines)
if lineno is None:
return None
# Return a DocTest for this object.
if module is None:
filename = None
else:
filename = getattr(module, '__file__', module.__name__)
if filename[-4:] in (".pyc", ".pyo"):
filename = filename[:-1]
if hasattr(obj, '_doctest_depends_on'):
globs['_doctest_depends_on'] = obj._doctest_depends_on
else:
globs['_doctest_depends_on'] = {}
return self._parser.get_doctest(docstring, globs, name,
filename, lineno)
class SymPyDocTestRunner(DocTestRunner):
"""
A class used to run DocTest test cases, and accumulate statistics.
The ``run`` method is used to process a single DocTest case. It
returns a tuple ``(f, t)``, where ``t`` is the number of test cases
tried, and ``f`` is the number of test cases that failed.
Modified from the doctest version to not reset the sys.displayhook (see
issue 5140).
See the docstring of the original DocTestRunner for more information.
"""
def run(self, test, compileflags=None, out=None, clear_globs=True):
"""
Run the examples in ``test``, and display the results using the
writer function ``out``.
The examples are run in the namespace ``test.globs``. If
``clear_globs`` is true (the default), then this namespace will
be cleared after the test runs, to help with garbage
collection. If you would like to examine the namespace after
the test completes, then use ``clear_globs=False``.
``compileflags`` gives the set of flags that should be used by
the Python compiler when running the examples. If not
specified, then it will default to the set of future-import
flags that apply to ``globs``.
The output of each example is checked using
``SymPyDocTestRunner.check_output``, and the results are
formatted by the ``SymPyDocTestRunner.report_*`` methods.
"""
self.test = test
if compileflags is None:
compileflags = pdoctest._extract_future_flags(test.globs)
save_stdout = sys.stdout
if out is None:
out = save_stdout.write
sys.stdout = self._fakeout
# Patch pdb.set_trace to restore sys.stdout during interactive
# debugging (so it's not still redirected to self._fakeout).
# Note that the interactive output will go to *our*
# save_stdout, even if that's not the real sys.stdout; this
# allows us to write test cases for the set_trace behavior.
save_set_trace = pdb.set_trace
self.debugger = pdoctest._OutputRedirectingPdb(save_stdout)
self.debugger.reset()
pdb.set_trace = self.debugger.set_trace
# Patch linecache.getlines, so we can see the example's source
# when we're inside the debugger.
self.save_linecache_getlines = pdoctest.linecache.getlines
linecache.getlines = self.__patched_linecache_getlines
try:
test.globs['print_function'] = print_function
return self.__run(test, compileflags, out)
finally:
sys.stdout = save_stdout
pdb.set_trace = save_set_trace
linecache.getlines = self.save_linecache_getlines
if clear_globs:
test.globs.clear()
# We have to override the name mangled methods.
SymPyDocTestRunner._SymPyDocTestRunner__patched_linecache_getlines = \
DocTestRunner._DocTestRunner__patched_linecache_getlines
SymPyDocTestRunner._SymPyDocTestRunner__run = DocTestRunner._DocTestRunner__run
SymPyDocTestRunner._SymPyDocTestRunner__record_outcome = \
DocTestRunner._DocTestRunner__record_outcome
class SymPyOutputChecker(pdoctest.OutputChecker):
"""
Compared to the OutputChecker from the stdlib our OutputChecker class
supports numerical comparison of floats occuring in the output of the
doctest examples
"""
def __init__(self):
# NOTE OutputChecker is an old-style class with no __init__ method,
# so we can't call the base class version of __init__ here
got_floats = r'(\d+\.\d*|\.\d+)'
# floats in the 'want' string may contain ellipses
want_floats = got_floats + r'(\.{3})?'
front_sep = r'\s|\+|\-|\*|,'
back_sep = front_sep + r'|j|e'
fbeg = r'^%s(?=%s|$)' % (got_floats, back_sep)
fmidend = r'(?<=%s)%s(?=%s|$)' % (front_sep, got_floats, back_sep)
self.num_got_rgx = re.compile(r'(%s|%s)' %(fbeg, fmidend))
fbeg = r'^%s(?=%s|$)' % (want_floats, back_sep)
fmidend = r'(?<=%s)%s(?=%s|$)' % (front_sep, want_floats, back_sep)
self.num_want_rgx = re.compile(r'(%s|%s)' %(fbeg, fmidend))
def check_output(self, want, got, optionflags):
"""
Return True iff the actual output from an example (`got`)
matches the expected output (`want`). These strings are
always considered to match if they are identical; but
depending on what option flags the test runner is using,
several non-exact match types are also possible. See the
documentation for `TestRunner` for more information about
option flags.
"""
# Handle the common case first, for efficiency:
# if they're string-identical, always return true.
if got == want:
return True
# TODO parse integers as well ?
# Parse floats and compare them. If some of the parsed floats contain
# ellipses, skip the comparison.
matches = self.num_got_rgx.finditer(got)
numbers_got = [match.group(1) for match in matches] # list of strs
matches = self.num_want_rgx.finditer(want)
numbers_want = [match.group(1) for match in matches] # list of strs
if len(numbers_got) != len(numbers_want):
return False
if len(numbers_got) > 0:
nw_ = []
for ng, nw in zip(numbers_got, numbers_want):
if '...' in nw:
nw_.append(ng)
continue
else:
nw_.append(nw)
if abs(float(ng)-float(nw)) > 1e-5:
return False
got = self.num_got_rgx.sub(r'%s', got)
got = got % tuple(nw_)
# <BLANKLINE> can be used as a special sequence to signify a
# blank line, unless the DONT_ACCEPT_BLANKLINE flag is used.
if not (optionflags & pdoctest.DONT_ACCEPT_BLANKLINE):
# Replace <BLANKLINE> in want with a blank line.
want = re.sub('(?m)^%s\s*?$' % re.escape(pdoctest.BLANKLINE_MARKER),
'', want)
# If a line in got contains only spaces, then remove the
# spaces.
got = re.sub('(?m)^\s*?$', '', got)
if got == want:
return True
# This flag causes doctest to ignore any differences in the
# contents of whitespace strings. Note that this can be used
# in conjunction with the ELLIPSIS flag.
if optionflags & pdoctest.NORMALIZE_WHITESPACE:
got = ' '.join(got.split())
want = ' '.join(want.split())
if got == want:
return True
# The ELLIPSIS flag says to let the sequence "..." in `want`
# match any substring in `got`.
if optionflags & pdoctest.ELLIPSIS:
if pdoctest._ellipsis_match(want, got):
return True
# We didn't find any match; return false.
return False
class Reporter(object):
"""
Parent class for all reporters.
"""
pass
class PyTestReporter(Reporter):
"""
Py.test like reporter. Should produce output identical to py.test.
"""
def __init__(self, verbose=False, tb="short", colors=True,
force_colors=False, split=None):
self._verbose = verbose
self._tb_style = tb
self._colors = colors
self._force_colors = force_colors
self._xfailed = 0
self._xpassed = []
self._failed = []
self._failed_doctest = []
self._passed = 0
self._skipped = 0
self._exceptions = []
self._terminal_width = None
self._default_width = 80
self._split = split
# this tracks the x-position of the cursor (useful for positioning
# things on the screen), without the need for any readline library:
self._write_pos = 0
self._line_wrap = False
def root_dir(self, dir):
self._root_dir = dir
@property
def terminal_width(self):
if self._terminal_width is not None:
return self._terminal_width
def findout_terminal_width():
if sys.platform == "win32":
# Windows support is based on:
#
# http://code.activestate.com/recipes/
# 440694-determine-size-of-console-window-on-windows/
from ctypes import windll, create_string_buffer
h = windll.kernel32.GetStdHandle(-12)
csbi = create_string_buffer(22)
res = windll.kernel32.GetConsoleScreenBufferInfo(h, csbi)
if res:
import struct
(_, _, _, _, _, left, _, right, _, _, _) = \
struct.unpack("hhhhHhhhhhh", csbi.raw)
return right - left
else:
return self._default_width
if hasattr(sys.stdout, 'isatty') and not sys.stdout.isatty():
return self._default_width # leave PIPEs alone
try:
process = subprocess.Popen(['stty', '-a'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout = process.stdout.read()
if PY3:
stdout = stdout.decode("utf-8")
except (OSError, IOError):
pass
else:
# We support the following output formats from stty:
#
# 1) Linux -> columns 80
# 2) OS X -> 80 columns
# 3) Solaris -> columns = 80
re_linux = r"columns\s+(?P<columns>\d+);"
re_osx = r"(?P<columns>\d+)\s*columns;"
re_solaris = r"columns\s+=\s+(?P<columns>\d+);"
for regex in (re_linux, re_osx, re_solaris):
match = re.search(regex, stdout)
if match is not None:
columns = match.group('columns')
try:
width = int(columns)
except ValueError:
pass
if width != 0:
return width
return self._default_width
width = findout_terminal_width()
self._terminal_width = width
return width
def write(self, text, color="", align="left", width=None,
force_colors=False):
"""
Prints a text on the screen.
It uses sys.stdout.write(), so no readline library is necessary.
Parameters
==========
color : choose from the colors below, "" means default color
align : "left"/"right", "left" is a normal print, "right" is aligned on
the right-hand side of the screen, filled with spaces if
necessary
width : the screen width
"""
color_templates = (
("Black", "0;30"),
("Red", "0;31"),
("Green", "0;32"),
("Brown", "0;33"),
("Blue", "0;34"),
("Purple", "0;35"),
("Cyan", "0;36"),
("LightGray", "0;37"),
("DarkGray", "1;30"),
("LightRed", "1;31"),
("LightGreen", "1;32"),
("Yellow", "1;33"),
("LightBlue", "1;34"),
("LightPurple", "1;35"),
("LightCyan", "1;36"),
("White", "1;37"),
)
colors = {}
for name, value in color_templates:
colors[name] = value
c_normal = '\033[0m'
c_color = '\033[%sm'
if width is None:
width = self.terminal_width
if align == "right":
if self._write_pos + len(text) > width:
# we don't fit on the current line, create a new line
self.write("\n")
self.write(" "*(width - self._write_pos - len(text)))
if not self._force_colors and hasattr(sys.stdout, 'isatty') and not \
sys.stdout.isatty():
# the stdout is not a terminal, this for example happens if the
# output is piped to less, e.g. "bin/test | less". In this case,
# the terminal control sequences would be printed verbatim, so
# don't use any colors.
color = ""
elif sys.platform == "win32":
# Windows consoles don't support ANSI escape sequences
color = ""
elif not self._colors:
color = ""
if self._line_wrap:
if text[0] != "\n":
sys.stdout.write("\n")
# Avoid UnicodeEncodeError when printing out test failures
if PY3 and IS_WINDOWS:
text = text.encode('raw_unicode_escape').decode('utf8', 'ignore')
elif PY3 and not sys.stdout.encoding.lower().startswith('utf'):
text = text.encode(sys.stdout.encoding, 'backslashreplace'
).decode(sys.stdout.encoding)
if color == "":
sys.stdout.write(text)
else:
sys.stdout.write("%s%s%s" %
(c_color % colors[color], text, c_normal))
sys.stdout.flush()
l = text.rfind("\n")
if l == -1:
self._write_pos += len(text)
else:
self._write_pos = len(text) - l - 1
self._line_wrap = self._write_pos >= width
self._write_pos %= width
def write_center(self, text, delim="="):
width = self.terminal_width
if text != "":
text = " %s " % text
idx = (width - len(text)) // 2
t = delim*idx + text + delim*(width - idx - len(text))
self.write(t + "\n")
def write_exception(self, e, val, tb):
t = traceback.extract_tb(tb)
# remove the first item, as that is always runtests.py
t = t[1:]
t = traceback.format_list(t)
self.write("".join(t))
t = traceback.format_exception_only(e, val)
self.write("".join(t))
def start(self, seed=None, msg="test process starts"):
self.write_center(msg)
executable = sys.executable
v = tuple(sys.version_info)
python_version = "%s.%s.%s-%s-%s" % v
implementation = platform.python_implementation()
if implementation == 'PyPy':
implementation += " %s.%s.%s-%s-%s" % sys.pypy_version_info
self.write("executable: %s (%s) [%s]\n" %
(executable, python_version, implementation))
from .misc import ARCH
self.write("architecture: %s\n" % ARCH)
from sympy.core.cache import USE_CACHE
self.write("cache: %s\n" % USE_CACHE)
from sympy.core.compatibility import GROUND_TYPES, HAS_GMPY
version = ''
if GROUND_TYPES =='gmpy':
if HAS_GMPY == 1:
import gmpy
elif HAS_GMPY == 2:
import gmpy2 as gmpy
version = gmpy.version()
self.write("ground types: %s %s\n" % (GROUND_TYPES, version))
if seed is not None:
self.write("random seed: %d\n" % seed)
from .misc import HASH_RANDOMIZATION
self.write("hash randomization: ")
hash_seed = os.getenv("PYTHONHASHSEED") or '0'
if HASH_RANDOMIZATION and (hash_seed == "random" or int(hash_seed)):
self.write("on (PYTHONHASHSEED=%s)\n" % hash_seed)
else:
self.write("off\n")
if self._split:
self.write("split: %s\n" % self._split)
self.write('\n')
self._t_start = clock()
def finish(self):
self._t_end = clock()
self.write("\n")
global text, linelen
text = "tests finished: %d passed, " % self._passed
linelen = len(text)
def add_text(mytext):
global text, linelen
"""Break new text if too long."""
if linelen + len(mytext) > self.terminal_width:
text += '\n'
linelen = 0
text += mytext
linelen += len(mytext)
if len(self._failed) > 0:
add_text("%d failed, " % len(self._failed))
if len(self._failed_doctest) > 0:
add_text("%d failed, " % len(self._failed_doctest))
if self._skipped > 0:
add_text("%d skipped, " % self._skipped)
if self._xfailed > 0:
add_text("%d expected to fail, " % self._xfailed)
if len(self._xpassed) > 0:
add_text("%d expected to fail but passed, " % len(self._xpassed))
if len(self._exceptions) > 0:
add_text("%d exceptions, " % len(self._exceptions))
add_text("in %.2f seconds" % (self._t_end - self._t_start))
if len(self._xpassed) > 0:
self.write_center("xpassed tests", "_")
for e in self._xpassed:
self.write("%s: %s\n" % (e[0], e[1]))
self.write("\n")
if self._tb_style != "no" and len(self._exceptions) > 0:
for e in self._exceptions:
filename, f, (t, val, tb) = e
self.write_center("", "_")
if f is None:
s = "%s" % filename
else:
s = "%s:%s" % (filename, f.__name__)
self.write_center(s, "_")
self.write_exception(t, val, tb)
self.write("\n")
if self._tb_style != "no" and len(self._failed) > 0:
for e in self._failed:
filename, f, (t, val, tb) = e
self.write_center("", "_")
self.write_center("%s:%s" % (filename, f.__name__), "_")
self.write_exception(t, val, tb)
self.write("\n")
if self._tb_style != "no" and len(self._failed_doctest) > 0:
for e in self._failed_doctest:
filename, msg = e
self.write_center("", "_")
self.write_center("%s" % filename, "_")
self.write(msg)
self.write("\n")
self.write_center(text)
ok = len(self._failed) == 0 and len(self._exceptions) == 0 and \
len(self._failed_doctest) == 0
if not ok:
self.write("DO *NOT* COMMIT!\n")
return ok
def entering_filename(self, filename, n):
rel_name = filename[len(self._root_dir) + 1:]
self._active_file = rel_name
self._active_file_error = False
self.write(rel_name)
self.write("[%d] " % n)
def leaving_filename(self):
self.write(" ")
if self._active_file_error:
self.write("[FAIL]", "Red", align="right")
else:
self.write("[OK]", "Green", align="right")
self.write("\n")
if self._verbose:
self.write("\n")
def entering_test(self, f):
self._active_f = f
if self._verbose:
self.write("\n" + f.__name__ + " ")
def test_xfail(self):
self._xfailed += 1
self.write("f", "Green")
def test_xpass(self, v):
message = str(v)
self._xpassed.append((self._active_file, message))
self.write("X", "Green")
def test_fail(self, exc_info):
self._failed.append((self._active_file, self._active_f, exc_info))
self.write("F", "Red")
self._active_file_error = True
def doctest_fail(self, name, error_msg):
# the first line contains "******", remove it:
error_msg = "\n".join(error_msg.split("\n")[1:])
self._failed_doctest.append((name, error_msg))
self.write("F", "Red")
self._active_file_error = True
def test_pass(self, char="."):
self._passed += 1
if self._verbose:
self.write("ok", "Green")
else:
self.write(char, "Green")
def test_skip(self, v=None):
char = "s"
self._skipped += 1
if v is not None:
message = str(v)
if message == "KeyboardInterrupt":
char = "K"
elif message == "Timeout":
char = "T"
elif message == "Slow":
char = "w"
self.write(char, "Blue")
if self._verbose:
self.write(" - ", "Blue")
if v is not None:
self.write(message, "Blue")
def test_exception(self, exc_info):
self._exceptions.append((self._active_file, self._active_f, exc_info))
self.write("E", "Red")
self._active_file_error = True
def import_error(self, filename, exc_info):
self._exceptions.append((filename, None, exc_info))
rel_name = filename[len(self._root_dir) + 1:]
self.write(rel_name)
self.write("[?] Failed to import", "Red")
self.write(" ")
self.write("[FAIL]", "Red", align="right")
self.write("\n")
sympy_dir = get_sympy_dir()
| bsd-3-clause |
40223137/2015abc | static/Brython3.1.0-20150301-090019/Lib/xml/etree/ElementPath.py | 785 | 9477 | #
# ElementTree
# $Id: ElementPath.py 3375 2008-02-13 08:05:08Z fredrik $
#
# limited xpath support for element trees
#
# history:
# 2003-05-23 fl created
# 2003-05-28 fl added support for // etc
# 2003-08-27 fl fixed parsing of periods in element names
# 2007-09-10 fl new selection engine
# 2007-09-12 fl fixed parent selector
# 2007-09-13 fl added iterfind; changed findall to return a list
# 2007-11-30 fl added namespaces support
# 2009-10-30 fl added child element value filter
#
# Copyright (c) 2003-2009 by Fredrik Lundh. All rights reserved.
#
# [email protected]
# http://www.pythonware.com
#
# --------------------------------------------------------------------
# The ElementTree toolkit is
#
# Copyright (c) 1999-2009 by Fredrik Lundh
#
# By obtaining, using, and/or copying this software and/or its
# associated documentation, you agree that you have read, understood,
# and will comply with the following terms and conditions:
#
# Permission to use, copy, modify, and distribute this software and
# its associated documentation for any purpose and without fee is
# hereby granted, provided that the above copyright notice appears in
# all copies, and that both that copyright notice and this permission
# notice appear in supporting documentation, and that the name of
# Secret Labs AB or the author not be used in advertising or publicity
# pertaining to distribution of the software without specific, written
# prior permission.
#
# SECRET LABS AB AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD
# TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANT-
# ABILITY AND FITNESS. IN NO EVENT SHALL SECRET LABS AB OR THE AUTHOR
# BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY
# DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
# OF THIS SOFTWARE.
# --------------------------------------------------------------------
# Licensed to PSF under a Contributor Agreement.
# See http://www.python.org/psf/license for licensing details.
##
# Implementation module for XPath support. There's usually no reason
# to import this module directly; the <b>ElementTree</b> does this for
# you, if needed.
##
import re
xpath_tokenizer_re = re.compile(
"("
"'[^']*'|\"[^\"]*\"|"
"::|"
"//?|"
"\.\.|"
"\(\)|"
"[/.*:\[\]\(\)@=])|"
"((?:\{[^}]+\})?[^/\[\]\(\)@=\s]+)|"
"\s+"
)
def xpath_tokenizer(pattern, namespaces=None):
for token in xpath_tokenizer_re.findall(pattern):
tag = token[1]
if tag and tag[0] != "{" and ":" in tag:
try:
prefix, uri = tag.split(":", 1)
if not namespaces:
raise KeyError
yield token[0], "{%s}%s" % (namespaces[prefix], uri)
except KeyError:
raise SyntaxError("prefix %r not found in prefix map" % prefix)
else:
yield token
def get_parent_map(context):
parent_map = context.parent_map
if parent_map is None:
context.parent_map = parent_map = {}
for p in context.root.iter():
for e in p:
parent_map[e] = p
return parent_map
def prepare_child(next, token):
tag = token[1]
def select(context, result):
for elem in result:
for e in elem:
if e.tag == tag:
yield e
return select
def prepare_star(next, token):
def select(context, result):
for elem in result:
for e in elem:
yield e
return select
def prepare_self(next, token):
def select(context, result):
for elem in result:
yield elem
return select
def prepare_descendant(next, token):
token = next()
if token[0] == "*":
tag = "*"
elif not token[0]:
tag = token[1]
else:
raise SyntaxError("invalid descendant")
def select(context, result):
for elem in result:
for e in elem.iter(tag):
if e is not elem:
yield e
return select
def prepare_parent(next, token):
def select(context, result):
# FIXME: raise error if .. is applied at toplevel?
parent_map = get_parent_map(context)
result_map = {}
for elem in result:
if elem in parent_map:
parent = parent_map[elem]
if parent not in result_map:
result_map[parent] = None
yield parent
return select
def prepare_predicate(next, token):
# FIXME: replace with real parser!!! refs:
# http://effbot.org/zone/simple-iterator-parser.htm
# http://javascript.crockford.com/tdop/tdop.html
signature = []
predicate = []
while 1:
token = next()
if token[0] == "]":
break
if token[0] and token[0][:1] in "'\"":
token = "'", token[0][1:-1]
signature.append(token[0] or "-")
predicate.append(token[1])
signature = "".join(signature)
# use signature to determine predicate type
if signature == "@-":
# [@attribute] predicate
key = predicate[1]
def select(context, result):
for elem in result:
if elem.get(key) is not None:
yield elem
return select
if signature == "@-='":
# [@attribute='value']
key = predicate[1]
value = predicate[-1]
def select(context, result):
for elem in result:
if elem.get(key) == value:
yield elem
return select
if signature == "-" and not re.match("\d+$", predicate[0]):
# [tag]
tag = predicate[0]
def select(context, result):
for elem in result:
if elem.find(tag) is not None:
yield elem
return select
if signature == "-='" and not re.match("\d+$", predicate[0]):
# [tag='value']
tag = predicate[0]
value = predicate[-1]
def select(context, result):
for elem in result:
for e in elem.findall(tag):
if "".join(e.itertext()) == value:
yield elem
break
return select
if signature == "-" or signature == "-()" or signature == "-()-":
# [index] or [last()] or [last()-index]
if signature == "-":
index = int(predicate[0]) - 1
else:
if predicate[0] != "last":
raise SyntaxError("unsupported function")
if signature == "-()-":
try:
index = int(predicate[2]) - 1
except ValueError:
raise SyntaxError("unsupported expression")
else:
index = -1
def select(context, result):
parent_map = get_parent_map(context)
for elem in result:
try:
parent = parent_map[elem]
# FIXME: what if the selector is "*" ?
elems = list(parent.findall(elem.tag))
if elems[index] is elem:
yield elem
except (IndexError, KeyError):
pass
return select
raise SyntaxError("invalid predicate")
ops = {
"": prepare_child,
"*": prepare_star,
".": prepare_self,
"..": prepare_parent,
"//": prepare_descendant,
"[": prepare_predicate,
}
_cache = {}
class _SelectorContext:
parent_map = None
def __init__(self, root):
self.root = root
# --------------------------------------------------------------------
##
# Generate all matching objects.
def iterfind(elem, path, namespaces=None):
# compile selector pattern
if path[-1:] == "/":
path = path + "*" # implicit all (FIXME: keep this?)
try:
selector = _cache[path]
except KeyError:
if len(_cache) > 100:
_cache.clear()
if path[:1] == "/":
raise SyntaxError("cannot use absolute path on element")
next = iter(xpath_tokenizer(path, namespaces)).__next__
token = next()
selector = []
while 1:
try:
selector.append(ops[token[0]](next, token))
except StopIteration:
raise SyntaxError("invalid path")
try:
token = next()
if token[0] == "/":
token = next()
except StopIteration:
break
_cache[path] = selector
# execute selector pattern
result = [elem]
context = _SelectorContext(elem)
for select in selector:
result = select(context, result)
return result
##
# Find first matching object.
def find(elem, path, namespaces=None):
try:
return next(iterfind(elem, path, namespaces))
except StopIteration:
return None
##
# Find all matching objects.
def findall(elem, path, namespaces=None):
return list(iterfind(elem, path, namespaces))
##
# Find text for first matching object.
def findtext(elem, path, default=None, namespaces=None):
try:
elem = next(iterfind(elem, path, namespaces))
return elem.text or ""
except StopIteration:
return default
| gpl-3.0 |
felipecorrea/python-pocket | examples/save_to_pocket.py | 1 | 1208 | #!/usr/bin/env python
'''Add an Item to Pocket'''
__author__ = 'Felipe Borges'
import sys
sys.path.append("..")
import getopt
import pocket
USAGE = '''Usage: save_to_pocket [options] url
This script adds an Item to Pocket.
Options:
-h --help: print this help
--consumer_key : the Pocket API consumer key
--access_token : the user's Pocket Access Token
'''
def print_usage_and_exit():
print USAGE
sys.exit(2)
def main():
try:
shortflags = 'h'
longflags = ['help', 'consumer_key=', 'access_token=']
opts, args = getopt.gnu_getopt(sys.argv[1:], shortflags, longflags)
except getopt.GetoptError:
print_usage_and_exit()
consumer_key = None
access_token = None
for o, a in opts:
if o in ('-h', '--help'):
print_usage_and_exit()
if o in ('--consumer_key'):
consumer_key = a
if o in ('--access_token'):
access_token = a
url = ' '.join(args)
if not url or not consumer_key or not access_token:
print_usage_and_exit()
api = pocket.Api(consumer_key = consumer_key, access_token = access_token)
try:
item = api.add(url)
print 'Item \'%s\' added successfuly!' % item.normal_url
except e:
print e
sys.exit(2)
if __name__ == "__main__":
main()
| apache-2.0 |
eneldoserrata/marcos_openerp | addons/report_aeroo/barcode/barcode.py | 19 | 2368 | ##############################################################################
#
# Copyright (c) 2008-2011 Alistek Ltd (http://www.alistek.com) All Rights Reserved.
# General contacts <[email protected]>
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsability of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# garantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This module is GPLv3 or newer and incompatible
# with OpenERP SA "AGPL + Private Use License"!
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
from code128 import get_code
from code39 import create_c39
from EANBarCode import EanBarCode
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
def make_barcode(code, code_type='ean13', rotate=None, height=50, xw=1):
if code:
if code_type.lower()=='ean13':
bar=EanBarCode()
im = bar.getImage(code,height)
elif code_type.lower()=='code128':
im = get_code(code, xw, height)
elif code_type.lower()=='code39':
im = create_c39(height, xw, code)
else:
return StringIO(), 'image/png'
tf = StringIO()
try:
if rotate!=None:
im=im.rotate(int(rotate))
except Exception, e:
pass
im.save(tf, 'png')
size_x = str(im.size[0]/96.0)+'in'
size_y = str(im.size[1]/96.0)+'in'
return tf, 'image/png', size_x, size_y
| agpl-3.0 |
lnfamous/Kernel_CyanogenMod11_Pico | tools/perf/scripts/python/netdev-times.py | 11271 | 15048 | # Display a process of packets and processed time.
# It helps us to investigate networking or network device.
#
# options
# tx: show only tx chart
# rx: show only rx chart
# dev=: show only thing related to specified device
# debug: work with debug mode. It shows buffer status.
import os
import sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import *
all_event_list = []; # insert all tracepoint event related with this script
irq_dic = {}; # key is cpu and value is a list which stacks irqs
# which raise NET_RX softirq
net_rx_dic = {}; # key is cpu and value include time of NET_RX softirq-entry
# and a list which stacks receive
receive_hunk_list = []; # a list which include a sequence of receive events
rx_skb_list = []; # received packet list for matching
# skb_copy_datagram_iovec
buffer_budget = 65536; # the budget of rx_skb_list, tx_queue_list and
# tx_xmit_list
of_count_rx_skb_list = 0; # overflow count
tx_queue_list = []; # list of packets which pass through dev_queue_xmit
of_count_tx_queue_list = 0; # overflow count
tx_xmit_list = []; # list of packets which pass through dev_hard_start_xmit
of_count_tx_xmit_list = 0; # overflow count
tx_free_list = []; # list of packets which is freed
# options
show_tx = 0;
show_rx = 0;
dev = 0; # store a name of device specified by option "dev="
debug = 0;
# indices of event_info tuple
EINFO_IDX_NAME= 0
EINFO_IDX_CONTEXT=1
EINFO_IDX_CPU= 2
EINFO_IDX_TIME= 3
EINFO_IDX_PID= 4
EINFO_IDX_COMM= 5
# Calculate a time interval(msec) from src(nsec) to dst(nsec)
def diff_msec(src, dst):
return (dst - src) / 1000000.0
# Display a process of transmitting a packet
def print_transmit(hunk):
if dev != 0 and hunk['dev'].find(dev) < 0:
return
print "%7s %5d %6d.%06dsec %12.3fmsec %12.3fmsec" % \
(hunk['dev'], hunk['len'],
nsecs_secs(hunk['queue_t']),
nsecs_nsecs(hunk['queue_t'])/1000,
diff_msec(hunk['queue_t'], hunk['xmit_t']),
diff_msec(hunk['xmit_t'], hunk['free_t']))
# Format for displaying rx packet processing
PF_IRQ_ENTRY= " irq_entry(+%.3fmsec irq=%d:%s)"
PF_SOFT_ENTRY=" softirq_entry(+%.3fmsec)"
PF_NAPI_POLL= " napi_poll_exit(+%.3fmsec %s)"
PF_JOINT= " |"
PF_WJOINT= " | |"
PF_NET_RECV= " |---netif_receive_skb(+%.3fmsec skb=%x len=%d)"
PF_NET_RX= " |---netif_rx(+%.3fmsec skb=%x)"
PF_CPY_DGRAM= " | skb_copy_datagram_iovec(+%.3fmsec %d:%s)"
PF_KFREE_SKB= " | kfree_skb(+%.3fmsec location=%x)"
PF_CONS_SKB= " | consume_skb(+%.3fmsec)"
# Display a process of received packets and interrputs associated with
# a NET_RX softirq
def print_receive(hunk):
show_hunk = 0
irq_list = hunk['irq_list']
cpu = irq_list[0]['cpu']
base_t = irq_list[0]['irq_ent_t']
# check if this hunk should be showed
if dev != 0:
for i in range(len(irq_list)):
if irq_list[i]['name'].find(dev) >= 0:
show_hunk = 1
break
else:
show_hunk = 1
if show_hunk == 0:
return
print "%d.%06dsec cpu=%d" % \
(nsecs_secs(base_t), nsecs_nsecs(base_t)/1000, cpu)
for i in range(len(irq_list)):
print PF_IRQ_ENTRY % \
(diff_msec(base_t, irq_list[i]['irq_ent_t']),
irq_list[i]['irq'], irq_list[i]['name'])
print PF_JOINT
irq_event_list = irq_list[i]['event_list']
for j in range(len(irq_event_list)):
irq_event = irq_event_list[j]
if irq_event['event'] == 'netif_rx':
print PF_NET_RX % \
(diff_msec(base_t, irq_event['time']),
irq_event['skbaddr'])
print PF_JOINT
print PF_SOFT_ENTRY % \
diff_msec(base_t, hunk['sirq_ent_t'])
print PF_JOINT
event_list = hunk['event_list']
for i in range(len(event_list)):
event = event_list[i]
if event['event_name'] == 'napi_poll':
print PF_NAPI_POLL % \
(diff_msec(base_t, event['event_t']), event['dev'])
if i == len(event_list) - 1:
print ""
else:
print PF_JOINT
else:
print PF_NET_RECV % \
(diff_msec(base_t, event['event_t']), event['skbaddr'],
event['len'])
if 'comm' in event.keys():
print PF_WJOINT
print PF_CPY_DGRAM % \
(diff_msec(base_t, event['comm_t']),
event['pid'], event['comm'])
elif 'handle' in event.keys():
print PF_WJOINT
if event['handle'] == "kfree_skb":
print PF_KFREE_SKB % \
(diff_msec(base_t,
event['comm_t']),
event['location'])
elif event['handle'] == "consume_skb":
print PF_CONS_SKB % \
diff_msec(base_t,
event['comm_t'])
print PF_JOINT
def trace_begin():
global show_tx
global show_rx
global dev
global debug
for i in range(len(sys.argv)):
if i == 0:
continue
arg = sys.argv[i]
if arg == 'tx':
show_tx = 1
elif arg =='rx':
show_rx = 1
elif arg.find('dev=',0, 4) >= 0:
dev = arg[4:]
elif arg == 'debug':
debug = 1
if show_tx == 0 and show_rx == 0:
show_tx = 1
show_rx = 1
def trace_end():
# order all events in time
all_event_list.sort(lambda a,b :cmp(a[EINFO_IDX_TIME],
b[EINFO_IDX_TIME]))
# process all events
for i in range(len(all_event_list)):
event_info = all_event_list[i]
name = event_info[EINFO_IDX_NAME]
if name == 'irq__softirq_exit':
handle_irq_softirq_exit(event_info)
elif name == 'irq__softirq_entry':
handle_irq_softirq_entry(event_info)
elif name == 'irq__softirq_raise':
handle_irq_softirq_raise(event_info)
elif name == 'irq__irq_handler_entry':
handle_irq_handler_entry(event_info)
elif name == 'irq__irq_handler_exit':
handle_irq_handler_exit(event_info)
elif name == 'napi__napi_poll':
handle_napi_poll(event_info)
elif name == 'net__netif_receive_skb':
handle_netif_receive_skb(event_info)
elif name == 'net__netif_rx':
handle_netif_rx(event_info)
elif name == 'skb__skb_copy_datagram_iovec':
handle_skb_copy_datagram_iovec(event_info)
elif name == 'net__net_dev_queue':
handle_net_dev_queue(event_info)
elif name == 'net__net_dev_xmit':
handle_net_dev_xmit(event_info)
elif name == 'skb__kfree_skb':
handle_kfree_skb(event_info)
elif name == 'skb__consume_skb':
handle_consume_skb(event_info)
# display receive hunks
if show_rx:
for i in range(len(receive_hunk_list)):
print_receive(receive_hunk_list[i])
# display transmit hunks
if show_tx:
print " dev len Qdisc " \
" netdevice free"
for i in range(len(tx_free_list)):
print_transmit(tx_free_list[i])
if debug:
print "debug buffer status"
print "----------------------------"
print "xmit Qdisc:remain:%d overflow:%d" % \
(len(tx_queue_list), of_count_tx_queue_list)
print "xmit netdevice:remain:%d overflow:%d" % \
(len(tx_xmit_list), of_count_tx_xmit_list)
print "receive:remain:%d overflow:%d" % \
(len(rx_skb_list), of_count_rx_skb_list)
# called from perf, when it finds a correspoinding event
def irq__softirq_entry(name, context, cpu, sec, nsec, pid, comm, vec):
if symbol_str("irq__softirq_entry", "vec", vec) != "NET_RX":
return
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, vec)
all_event_list.append(event_info)
def irq__softirq_exit(name, context, cpu, sec, nsec, pid, comm, vec):
if symbol_str("irq__softirq_entry", "vec", vec) != "NET_RX":
return
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, vec)
all_event_list.append(event_info)
def irq__softirq_raise(name, context, cpu, sec, nsec, pid, comm, vec):
if symbol_str("irq__softirq_entry", "vec", vec) != "NET_RX":
return
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, vec)
all_event_list.append(event_info)
def irq__irq_handler_entry(name, context, cpu, sec, nsec, pid, comm,
irq, irq_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
irq, irq_name)
all_event_list.append(event_info)
def irq__irq_handler_exit(name, context, cpu, sec, nsec, pid, comm, irq, ret):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, irq, ret)
all_event_list.append(event_info)
def napi__napi_poll(name, context, cpu, sec, nsec, pid, comm, napi, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
napi, dev_name)
all_event_list.append(event_info)
def net__netif_receive_skb(name, context, cpu, sec, nsec, pid, comm, skbaddr,
skblen, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen, dev_name)
all_event_list.append(event_info)
def net__netif_rx(name, context, cpu, sec, nsec, pid, comm, skbaddr,
skblen, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen, dev_name)
all_event_list.append(event_info)
def net__net_dev_queue(name, context, cpu, sec, nsec, pid, comm,
skbaddr, skblen, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen, dev_name)
all_event_list.append(event_info)
def net__net_dev_xmit(name, context, cpu, sec, nsec, pid, comm,
skbaddr, skblen, rc, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen, rc ,dev_name)
all_event_list.append(event_info)
def skb__kfree_skb(name, context, cpu, sec, nsec, pid, comm,
skbaddr, protocol, location):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, protocol, location)
all_event_list.append(event_info)
def skb__consume_skb(name, context, cpu, sec, nsec, pid, comm, skbaddr):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr)
all_event_list.append(event_info)
def skb__skb_copy_datagram_iovec(name, context, cpu, sec, nsec, pid, comm,
skbaddr, skblen):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen)
all_event_list.append(event_info)
def handle_irq_handler_entry(event_info):
(name, context, cpu, time, pid, comm, irq, irq_name) = event_info
if cpu not in irq_dic.keys():
irq_dic[cpu] = []
irq_record = {'irq':irq, 'name':irq_name, 'cpu':cpu, 'irq_ent_t':time}
irq_dic[cpu].append(irq_record)
def handle_irq_handler_exit(event_info):
(name, context, cpu, time, pid, comm, irq, ret) = event_info
if cpu not in irq_dic.keys():
return
irq_record = irq_dic[cpu].pop()
if irq != irq_record['irq']:
return
irq_record.update({'irq_ext_t':time})
# if an irq doesn't include NET_RX softirq, drop.
if 'event_list' in irq_record.keys():
irq_dic[cpu].append(irq_record)
def handle_irq_softirq_raise(event_info):
(name, context, cpu, time, pid, comm, vec) = event_info
if cpu not in irq_dic.keys() \
or len(irq_dic[cpu]) == 0:
return
irq_record = irq_dic[cpu].pop()
if 'event_list' in irq_record.keys():
irq_event_list = irq_record['event_list']
else:
irq_event_list = []
irq_event_list.append({'time':time, 'event':'sirq_raise'})
irq_record.update({'event_list':irq_event_list})
irq_dic[cpu].append(irq_record)
def handle_irq_softirq_entry(event_info):
(name, context, cpu, time, pid, comm, vec) = event_info
net_rx_dic[cpu] = {'sirq_ent_t':time, 'event_list':[]}
def handle_irq_softirq_exit(event_info):
(name, context, cpu, time, pid, comm, vec) = event_info
irq_list = []
event_list = 0
if cpu in irq_dic.keys():
irq_list = irq_dic[cpu]
del irq_dic[cpu]
if cpu in net_rx_dic.keys():
sirq_ent_t = net_rx_dic[cpu]['sirq_ent_t']
event_list = net_rx_dic[cpu]['event_list']
del net_rx_dic[cpu]
if irq_list == [] or event_list == 0:
return
rec_data = {'sirq_ent_t':sirq_ent_t, 'sirq_ext_t':time,
'irq_list':irq_list, 'event_list':event_list}
# merge information realted to a NET_RX softirq
receive_hunk_list.append(rec_data)
def handle_napi_poll(event_info):
(name, context, cpu, time, pid, comm, napi, dev_name) = event_info
if cpu in net_rx_dic.keys():
event_list = net_rx_dic[cpu]['event_list']
rec_data = {'event_name':'napi_poll',
'dev':dev_name, 'event_t':time}
event_list.append(rec_data)
def handle_netif_rx(event_info):
(name, context, cpu, time, pid, comm,
skbaddr, skblen, dev_name) = event_info
if cpu not in irq_dic.keys() \
or len(irq_dic[cpu]) == 0:
return
irq_record = irq_dic[cpu].pop()
if 'event_list' in irq_record.keys():
irq_event_list = irq_record['event_list']
else:
irq_event_list = []
irq_event_list.append({'time':time, 'event':'netif_rx',
'skbaddr':skbaddr, 'skblen':skblen, 'dev_name':dev_name})
irq_record.update({'event_list':irq_event_list})
irq_dic[cpu].append(irq_record)
def handle_netif_receive_skb(event_info):
global of_count_rx_skb_list
(name, context, cpu, time, pid, comm,
skbaddr, skblen, dev_name) = event_info
if cpu in net_rx_dic.keys():
rec_data = {'event_name':'netif_receive_skb',
'event_t':time, 'skbaddr':skbaddr, 'len':skblen}
event_list = net_rx_dic[cpu]['event_list']
event_list.append(rec_data)
rx_skb_list.insert(0, rec_data)
if len(rx_skb_list) > buffer_budget:
rx_skb_list.pop()
of_count_rx_skb_list += 1
def handle_net_dev_queue(event_info):
global of_count_tx_queue_list
(name, context, cpu, time, pid, comm,
skbaddr, skblen, dev_name) = event_info
skb = {'dev':dev_name, 'skbaddr':skbaddr, 'len':skblen, 'queue_t':time}
tx_queue_list.insert(0, skb)
if len(tx_queue_list) > buffer_budget:
tx_queue_list.pop()
of_count_tx_queue_list += 1
def handle_net_dev_xmit(event_info):
global of_count_tx_xmit_list
(name, context, cpu, time, pid, comm,
skbaddr, skblen, rc, dev_name) = event_info
if rc == 0: # NETDEV_TX_OK
for i in range(len(tx_queue_list)):
skb = tx_queue_list[i]
if skb['skbaddr'] == skbaddr:
skb['xmit_t'] = time
tx_xmit_list.insert(0, skb)
del tx_queue_list[i]
if len(tx_xmit_list) > buffer_budget:
tx_xmit_list.pop()
of_count_tx_xmit_list += 1
return
def handle_kfree_skb(event_info):
(name, context, cpu, time, pid, comm,
skbaddr, protocol, location) = event_info
for i in range(len(tx_queue_list)):
skb = tx_queue_list[i]
if skb['skbaddr'] == skbaddr:
del tx_queue_list[i]
return
for i in range(len(tx_xmit_list)):
skb = tx_xmit_list[i]
if skb['skbaddr'] == skbaddr:
skb['free_t'] = time
tx_free_list.append(skb)
del tx_xmit_list[i]
return
for i in range(len(rx_skb_list)):
rec_data = rx_skb_list[i]
if rec_data['skbaddr'] == skbaddr:
rec_data.update({'handle':"kfree_skb",
'comm':comm, 'pid':pid, 'comm_t':time})
del rx_skb_list[i]
return
def handle_consume_skb(event_info):
(name, context, cpu, time, pid, comm, skbaddr) = event_info
for i in range(len(tx_xmit_list)):
skb = tx_xmit_list[i]
if skb['skbaddr'] == skbaddr:
skb['free_t'] = time
tx_free_list.append(skb)
del tx_xmit_list[i]
return
def handle_skb_copy_datagram_iovec(event_info):
(name, context, cpu, time, pid, comm, skbaddr, skblen) = event_info
for i in range(len(rx_skb_list)):
rec_data = rx_skb_list[i]
if skbaddr == rec_data['skbaddr']:
rec_data.update({'handle':"skb_copy_datagram_iovec",
'comm':comm, 'pid':pid, 'comm_t':time})
del rx_skb_list[i]
return
| gpl-2.0 |
DanteOnline/free-art | venv/lib/python3.4/site-packages/django/db/backends/oracle/creation.py | 160 | 17256 | import sys
import time
from django.conf import settings
from django.db.backends.base.creation import BaseDatabaseCreation
from django.db.utils import DatabaseError
from django.utils.functional import cached_property
from django.utils.six.moves import input
TEST_DATABASE_PREFIX = 'test_'
PASSWORD = 'Im_a_lumberjack'
class DatabaseCreation(BaseDatabaseCreation):
@cached_property
def _maindb_connection(self):
"""
This is analogous to other backends' `_nodb_connection` property,
which allows access to an "administrative" connection which can
be used to manage the test databases.
For Oracle, the only connection that can be used for that purpose
is the main (non-test) connection.
"""
settings_dict = settings.DATABASES[self.connection.alias]
user = settings_dict.get('SAVED_USER') or settings_dict['USER']
password = settings_dict.get('SAVED_PASSWORD') or settings_dict['PASSWORD']
settings_dict = settings_dict.copy()
settings_dict.update(USER=user, PASSWORD=password)
DatabaseWrapper = type(self.connection)
return DatabaseWrapper(settings_dict, alias=self.connection.alias)
def _create_test_db(self, verbosity=1, autoclobber=False, keepdb=False):
parameters = self._get_test_db_params()
cursor = self._maindb_connection.cursor()
if self._test_database_create():
try:
self._execute_test_db_creation(cursor, parameters, verbosity, keepdb)
except Exception as e:
# if we want to keep the db, then no need to do any of the below,
# just return and skip it all.
if keepdb:
return
sys.stderr.write("Got an error creating the test database: %s\n" % e)
if not autoclobber:
confirm = input(
"It appears the test database, %s, already exists. "
"Type 'yes' to delete it, or 'no' to cancel: " % parameters['user'])
if autoclobber or confirm == 'yes':
if verbosity >= 1:
print("Destroying old test database for alias '%s'..." % self.connection.alias)
try:
self._execute_test_db_destruction(cursor, parameters, verbosity)
except DatabaseError as e:
if 'ORA-29857' in str(e):
self._handle_objects_preventing_db_destruction(cursor, parameters,
verbosity, autoclobber)
else:
# Ran into a database error that isn't about leftover objects in the tablespace
sys.stderr.write("Got an error destroying the old test database: %s\n" % e)
sys.exit(2)
except Exception as e:
sys.stderr.write("Got an error destroying the old test database: %s\n" % e)
sys.exit(2)
try:
self._execute_test_db_creation(cursor, parameters, verbosity, keepdb)
except Exception as e:
sys.stderr.write("Got an error recreating the test database: %s\n" % e)
sys.exit(2)
else:
print("Tests cancelled.")
sys.exit(1)
if self._test_user_create():
if verbosity >= 1:
print("Creating test user...")
try:
self._create_test_user(cursor, parameters, verbosity, keepdb)
except Exception as e:
# If we want to keep the db, then we want to also keep the user.
if keepdb:
return
sys.stderr.write("Got an error creating the test user: %s\n" % e)
if not autoclobber:
confirm = input(
"It appears the test user, %s, already exists. Type "
"'yes' to delete it, or 'no' to cancel: " % parameters['user'])
if autoclobber or confirm == 'yes':
try:
if verbosity >= 1:
print("Destroying old test user...")
self._destroy_test_user(cursor, parameters, verbosity)
if verbosity >= 1:
print("Creating test user...")
self._create_test_user(cursor, parameters, verbosity, keepdb)
except Exception as e:
sys.stderr.write("Got an error recreating the test user: %s\n" % e)
sys.exit(2)
else:
print("Tests cancelled.")
sys.exit(1)
self._maindb_connection.close() # done with main user -- test user and tablespaces created
self._switch_to_test_user(parameters)
return self.connection.settings_dict['NAME']
def _switch_to_test_user(self, parameters):
"""
Oracle doesn't have the concept of separate databases under the same user.
Thus, we use a separate user (see _create_test_db). This method is used
to switch to that user. We will need the main user again for clean-up when
we end testing, so we keep its credentials in SAVED_USER/SAVED_PASSWORD
entries in the settings dict.
"""
real_settings = settings.DATABASES[self.connection.alias]
real_settings['SAVED_USER'] = self.connection.settings_dict['SAVED_USER'] = \
self.connection.settings_dict['USER']
real_settings['SAVED_PASSWORD'] = self.connection.settings_dict['SAVED_PASSWORD'] = \
self.connection.settings_dict['PASSWORD']
real_test_settings = real_settings['TEST']
test_settings = self.connection.settings_dict['TEST']
real_test_settings['USER'] = real_settings['USER'] = test_settings['USER'] = \
self.connection.settings_dict['USER'] = parameters['user']
real_settings['PASSWORD'] = self.connection.settings_dict['PASSWORD'] = parameters['password']
def set_as_test_mirror(self, primary_settings_dict):
"""
Set this database up to be used in testing as a mirror of a primary database
whose settings are given
"""
self.connection.settings_dict['USER'] = primary_settings_dict['USER']
self.connection.settings_dict['PASSWORD'] = primary_settings_dict['PASSWORD']
def _handle_objects_preventing_db_destruction(self, cursor, parameters, verbosity, autoclobber):
# There are objects in the test tablespace which prevent dropping it
# The easy fix is to drop the test user -- but are we allowed to do so?
print("There are objects in the old test database which prevent its destruction.")
print("If they belong to the test user, deleting the user will allow the test "
"database to be recreated.")
print("Otherwise, you will need to find and remove each of these objects, "
"or use a different tablespace.\n")
if self._test_user_create():
if not autoclobber:
confirm = input("Type 'yes' to delete user %s: " % parameters['user'])
if autoclobber or confirm == 'yes':
try:
if verbosity >= 1:
print("Destroying old test user...")
self._destroy_test_user(cursor, parameters, verbosity)
except Exception as e:
sys.stderr.write("Got an error destroying the test user: %s\n" % e)
sys.exit(2)
try:
if verbosity >= 1:
print("Destroying old test database for alias '%s'..." % self.connection.alias)
self._execute_test_db_destruction(cursor, parameters, verbosity)
except Exception as e:
sys.stderr.write("Got an error destroying the test database: %s\n" % e)
sys.exit(2)
else:
print("Tests cancelled -- test database cannot be recreated.")
sys.exit(1)
else:
print("Django is configured to use pre-existing test user '%s',"
" and will not attempt to delete it.\n" % parameters['user'])
print("Tests cancelled -- test database cannot be recreated.")
sys.exit(1)
def _destroy_test_db(self, test_database_name, verbosity=1):
"""
Destroy a test database, prompting the user for confirmation if the
database already exists. Returns the name of the test database created.
"""
self.connection.settings_dict['USER'] = self.connection.settings_dict['SAVED_USER']
self.connection.settings_dict['PASSWORD'] = self.connection.settings_dict['SAVED_PASSWORD']
self.connection.close()
parameters = self._get_test_db_params()
cursor = self._maindb_connection.cursor()
time.sleep(1) # To avoid "database is being accessed by other users" errors.
if self._test_user_create():
if verbosity >= 1:
print('Destroying test user...')
self._destroy_test_user(cursor, parameters, verbosity)
if self._test_database_create():
if verbosity >= 1:
print('Destroying test database tables...')
self._execute_test_db_destruction(cursor, parameters, verbosity)
self._maindb_connection.close()
def _execute_test_db_creation(self, cursor, parameters, verbosity, keepdb=False):
if verbosity >= 2:
print("_create_test_db(): dbname = %s" % parameters['user'])
statements = [
"""CREATE TABLESPACE %(tblspace)s
DATAFILE '%(datafile)s' SIZE 20M
REUSE AUTOEXTEND ON NEXT 10M MAXSIZE %(maxsize)s
""",
"""CREATE TEMPORARY TABLESPACE %(tblspace_temp)s
TEMPFILE '%(datafile_tmp)s' SIZE 20M
REUSE AUTOEXTEND ON NEXT 10M MAXSIZE %(maxsize_tmp)s
""",
]
# Ignore "tablespace already exists" error when keepdb is on.
acceptable_ora_err = 'ORA-01543' if keepdb else None
self._execute_allow_fail_statements(cursor, statements, parameters, verbosity, acceptable_ora_err)
def _create_test_user(self, cursor, parameters, verbosity, keepdb=False):
if verbosity >= 2:
print("_create_test_user(): username = %s" % parameters['user'])
statements = [
"""CREATE USER %(user)s
IDENTIFIED BY %(password)s
DEFAULT TABLESPACE %(tblspace)s
TEMPORARY TABLESPACE %(tblspace_temp)s
QUOTA UNLIMITED ON %(tblspace)s
""",
"""GRANT CREATE SESSION,
CREATE TABLE,
CREATE SEQUENCE,
CREATE PROCEDURE,
CREATE TRIGGER
TO %(user)s""",
]
# Ignore "user already exists" error when keepdb is on
acceptable_ora_err = 'ORA-01920' if keepdb else None
self._execute_allow_fail_statements(cursor, statements, parameters, verbosity, acceptable_ora_err)
# Most test-suites can be run without the create-view privilege. But some need it.
extra = "GRANT CREATE VIEW TO %(user)s"
success = self._execute_allow_fail_statements(cursor, [extra], parameters, verbosity, 'ORA-01031')
if not success and verbosity >= 2:
print("Failed to grant CREATE VIEW permission to test user. This may be ok.")
def _execute_test_db_destruction(self, cursor, parameters, verbosity):
if verbosity >= 2:
print("_execute_test_db_destruction(): dbname=%s" % parameters['user'])
statements = [
'DROP TABLESPACE %(tblspace)s INCLUDING CONTENTS AND DATAFILES CASCADE CONSTRAINTS',
'DROP TABLESPACE %(tblspace_temp)s INCLUDING CONTENTS AND DATAFILES CASCADE CONSTRAINTS',
]
self._execute_statements(cursor, statements, parameters, verbosity)
def _destroy_test_user(self, cursor, parameters, verbosity):
if verbosity >= 2:
print("_destroy_test_user(): user=%s" % parameters['user'])
print("Be patient. This can take some time...")
statements = [
'DROP USER %(user)s CASCADE',
]
self._execute_statements(cursor, statements, parameters, verbosity)
def _execute_statements(self, cursor, statements, parameters, verbosity, allow_quiet_fail=False):
for template in statements:
stmt = template % parameters
if verbosity >= 2:
print(stmt)
try:
cursor.execute(stmt)
except Exception as err:
if (not allow_quiet_fail) or verbosity >= 2:
sys.stderr.write("Failed (%s)\n" % (err))
raise
def _execute_allow_fail_statements(self, cursor, statements, parameters, verbosity, acceptable_ora_err):
"""
Execute statements which are allowed to fail silently if the Oracle
error code given by `acceptable_ora_err` is raised. Return True if the
statements execute without an exception, or False otherwise.
"""
try:
# Statement can fail when acceptable_ora_err is not None
allow_quiet_fail = acceptable_ora_err is not None and len(acceptable_ora_err) > 0
self._execute_statements(cursor, statements, parameters, verbosity, allow_quiet_fail=allow_quiet_fail)
return True
except DatabaseError as err:
description = str(err)
if acceptable_ora_err is None or acceptable_ora_err not in description:
raise
return False
def _get_test_db_params(self):
return {
'dbname': self._test_database_name(),
'user': self._test_database_user(),
'password': self._test_database_passwd(),
'tblspace': self._test_database_tblspace(),
'tblspace_temp': self._test_database_tblspace_tmp(),
'datafile': self._test_database_tblspace_datafile(),
'datafile_tmp': self._test_database_tblspace_tmp_datafile(),
'maxsize': self._test_database_tblspace_size(),
'maxsize_tmp': self._test_database_tblspace_tmp_size(),
}
def _test_settings_get(self, key, default=None, prefixed=None):
"""
Return a value from the test settings dict,
or a given default,
or a prefixed entry from the main settings dict
"""
settings_dict = self.connection.settings_dict
val = settings_dict['TEST'].get(key, default)
if val is None:
val = TEST_DATABASE_PREFIX + settings_dict[prefixed]
return val
def _test_database_name(self):
return self._test_settings_get('NAME', prefixed='NAME')
def _test_database_create(self):
return self._test_settings_get('CREATE_DB', default=True)
def _test_user_create(self):
return self._test_settings_get('CREATE_USER', default=True)
def _test_database_user(self):
return self._test_settings_get('USER', prefixed='USER')
def _test_database_passwd(self):
return self._test_settings_get('PASSWORD', default=PASSWORD)
def _test_database_tblspace(self):
return self._test_settings_get('TBLSPACE', prefixed='USER')
def _test_database_tblspace_tmp(self):
settings_dict = self.connection.settings_dict
return settings_dict['TEST'].get('TBLSPACE_TMP',
TEST_DATABASE_PREFIX + settings_dict['USER'] + '_temp')
def _test_database_tblspace_datafile(self):
tblspace = '%s.dbf' % self._test_database_tblspace()
return self._test_settings_get('DATAFILE', default=tblspace)
def _test_database_tblspace_tmp_datafile(self):
tblspace = '%s.dbf' % self._test_database_tblspace_tmp()
return self._test_settings_get('DATAFILE_TMP', default=tblspace)
def _test_database_tblspace_size(self):
return self._test_settings_get('DATAFILE_MAXSIZE', default='500M')
def _test_database_tblspace_tmp_size(self):
return self._test_settings_get('DATAFILE_TMP_MAXSIZE', default='500M')
def _get_test_db_name(self):
"""
We need to return the 'production' DB name to get the test DB creation
machinery to work. This isn't a great deal in this case because DB
names as handled by Django haven't real counterparts in Oracle.
"""
return self.connection.settings_dict['NAME']
def test_db_signature(self):
settings_dict = self.connection.settings_dict
return (
settings_dict['HOST'],
settings_dict['PORT'],
settings_dict['ENGINE'],
settings_dict['NAME'],
self._test_database_user(),
)
| gpl-3.0 |
DavidLi2010/ramcloud | bindings/python/stresstest_bank.py | 19 | 8020 | #!/usr/bin/env python
# Copyright (c) 2009-2010 Stanford University
#
# Permission to use, copy, modify, and distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR(S) DISCLAIM ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL AUTHORS BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""Repeatedly execute bank transfers.
WARNING: This file does not create a new client instance per worker process.
Your client library needs a mutex in shared memory around the networking code.
See RAM-39.
This is a stress test for RAMCloud.
Run this program with --help for usage."""
import os
import sys
import random
import time
from optparse import OptionParser
import multiprocessing
from retries import ImmediateRetry as RetryStrategy
import ramcloud
import txramcloud
from testutil import BreakException
txramcloud.RetryStrategy = RetryStrategy
class Stats(object):
INCREMENTS = 0
ABORTS = 1
CRASHES = 2
NUM = 3
LABELS = ['increments', 'aborts', 'crashes']
@classmethod
def to_str(cls, stats):
pairs = ["'%s': %d" % (l, v) for (l, v) in zip(cls.LABELS, stats)]
return '{%s}' % ', '.join(pairs)
class CountdownHook(object):
def __init__(self, count):
self.count = count
def __call__(self):
if self.count == 0:
raise BreakException
else:
self.count -= 1
class Test(object):
def __init__(self, txrc, table, oids, stats, die, options, args):
self.txrc = txrc
self.table = table
self.oids = oids
self.global_stats = stats
self.die = die
self.options = options
self.args = args
def __call__(self):
# Called by the child in its address space
# self.global_stats, self.die are in shared memory
# detach the child from the parent's TTY
os.setsid()
if self.options.crash:
when = random.randint(0, self.options.crash)
self.txrc.hook = CountdownHook(when)
self.local_stats = [0] * Stats.NUM
self.cache = {}
for oid in self.oids:
self.cache[oid] = None
i = 1
try:
while True:
if i % 10**6 == 0:
print "PID %d: continuing after %s" % (os.getpid(),
Stats.to_str(self.local_stats))
accts = self.choose_accts()
for retry in RetryStrategy():
if die.value:
print "PID %d: done after %s" % (os.getpid(),
Stats.to_str(self.local_stats))
return
try:
for oid in accts:
if self.cache[oid] is None:
blob, version = self.txrc.read(self.table, oid)
value = int(blob)
self.cache[oid] = (value, version)
if not self.algo(accts):
retry.later()
except BreakException:
print "PID %d: crash after %s" % (os.getpid(),
Stats.to_str(self.local_stats))
self.local_stats[Stats.CRASHES] += 1
for oid in self.cache:
self.cache[oid] = None
when = random.randint(0, self.options.crash)
self.txrc.hook = CountdownHook(when)
# and keep going
i += 1
finally:
# update global stats
for i, v in enumerate(self.local_stats):
self.global_stats[i] += v
def choose_accts(self):
assert len(self.oids) >= 2
max_num_accts = len(self.oids)
if (self.options.max_num_accts_per_tx and
self.options.max_num_accts_per_tx < max_num_accts):
max_num_accts = self.options.max_num_accts_per_tx
num_accts = random.randint(2, max_num_accts)
accts = list(oids)
random.shuffle(accts)
accts = accts[:num_accts]
return accts
def algo(self, accts):
mt = txramcloud.MiniTransaction()
new_values = {}
for oid in accts:
value, version = self.cache[oid]
rr = ramcloud.RejectRules.exactly(version)
if oid == accts[0]:
value -= len(accts[1:])
else:
value += 1
new_values[oid] = value
mt[(self.table, oid)] = txramcloud.MTWrite(str(value), rr)
try:
result = self.txrc.mt_commit(mt)
except txramcloud.TxRAMCloud.TransactionRejected, e:
for ((table, oid), reason) in e.reasons.items():
self.cache[oid] = None
self.local_stats[Stats.ABORTS] += 1
return False
except txramcloud.TxRAMCloud.TransactionExpired, e:
self.local_stats[Stats.ABORTS] += 1
return False
else:
for ((table, oid), version) in result.items():
self.cache[oid] = (new_values[oid], version)
self.local_stats[Stats.INCREMENTS] += 1
return True
if __name__ == '__main__':
parser = OptionParser()
parser.set_description(__doc__.split('\n\n', 1)[0])
parser.add_option("-p", "--num-processes",
dest="num_procs", type="int", default=1,
help="spawn NUM processes, defaults to 1",
metavar="NUM")
parser.add_option("-o", "--num-objects",
dest="num_objects", type="int", default=2,
help=("increment across NUM objects, defaults to 2"),
metavar="NUM")
parser.add_option("-m", "--max-tx",
dest="max_num_accts_per_tx", type="int", default=0,
help=("the maximum NUM of accounts to involve in a " +
"single transaction, defaults to infinity"),
metavar="NUM")
parser.add_option("-c", "--crash",
dest="crash", type="int", default=0,
help=("crash randomly by the NUM-th RAMCloud "
"operation, defaults to not crashing"),
metavar="NUM")
(options, args) = parser.parse_args()
assert not args
r = txramcloud.TxRAMCloud(7)
r.connect()
r.create_table("test")
table = r.get_table_id("test")
oids = range(options.num_objects)
for oid in oids:
r.create(table, oid, str(0))
stats = multiprocessing.Array('i', Stats.NUM)
die = multiprocessing.Value('i', 0, lock=False)
target = Test(r, table, oids, stats, die, options, args)
procs = []
for i in range(options.num_procs):
procs.append(multiprocessing.Process(target=target))
start = time.time()
for p in procs:
p.start()
try:
for p in procs:
p.join()
except KeyboardInterrupt:
# a process can be joined multiple times
die.value = 1
for p in procs:
p.join()
end = time.time()
print "wall time: %0.02fs" % (end - start)
print "stats:", Stats.to_str(stats[:])
sum = 0
for oid in oids:
blob, version = r.read(table, oid)
value = int(blob)
sum += value
print 'oid %d: value=%d, version=%d' % (oid, value, version)
print 'sum: %d' % sum
assert sum == 0
| isc |
fengbaicanhe/intellij-community | python/helpers/profiler/thrift/TSerialization.py | 36 | 1401 | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
from thrift.protocol import TBinaryProtocol
from thrift.transport import TTransport
def serialize(thrift_object,
protocol_factory=TBinaryProtocol.TBinaryProtocolFactory()):
transport = TTransport.TMemoryBuffer()
protocol = protocol_factory.getProtocol(transport)
thrift_object.write(protocol)
return transport.getvalue()
def deserialize(base,
buf,
protocol_factory=TBinaryProtocol.TBinaryProtocolFactory()):
transport = TTransport.TMemoryBuffer(buf)
protocol = protocol_factory.getProtocol(transport)
base.read(protocol)
return base
| apache-2.0 |
Tesora-Release/tesora-trove | trove/guestagent/module/drivers/new_relic_license_driver.py | 1 | 3407 | # Copyright 2016 Tesora, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
from datetime import date
from oslo_log import log as logging
from trove.common import cfg
from trove.common.i18n import _
from trove.common import stream_codecs
from trove.common import utils
from trove.guestagent.common import operating_system
from trove.guestagent.module.drivers import module_driver
LOG = logging.getLogger(__name__)
CONF = cfg.CONF
NR_ADD_LICENSE_CMD = ['nrsysmond-config', '--set', 'license_key=%s']
NR_SRV_CONTROL_CMD = ['/etc/init.d/newrelic-sysmond']
class NewRelicLicenseDriver(module_driver.ModuleDriver):
"""Module to set up the license for the NewRelic service."""
def get_description(self):
return "New Relic License Module Driver"
def get_updated(self):
return date(2016, 4, 12)
@module_driver.output(
log_message=_('Installing New Relic license key'),
success_message=_('New Relic license key installed'),
fail_message=_('New Relic license key not installed'))
def apply(self, name, datastore, ds_version, data_file, admin_module):
license_key = None
data = operating_system.read_file(
data_file, codec=stream_codecs.KeyValueCodec())
for key, value in data.items():
if 'license_key' == key.lower():
license_key = value
break
if license_key:
self._add_license_key(license_key)
self._server_control('start')
else:
return False, "'license_key' not found in contents file"
def _add_license_key(self, license_key):
try:
exec_args = {'timeout': 10,
'run_as_root': True,
'root_helper': 'sudo'}
cmd = list(NR_ADD_LICENSE_CMD)
cmd[-1] = cmd[-1] % license_key
utils.execute_with_timeout(*cmd, **exec_args)
except Exception:
LOG.exception(_("Could not install license key '%s'") %
license_key)
raise
def _server_control(self, command):
try:
exec_args = {'timeout': 10,
'run_as_root': True,
'root_helper': 'sudo'}
cmd = list(NR_SRV_CONTROL_CMD)
cmd.append(command)
utils.execute_with_timeout(*cmd, **exec_args)
except Exception:
LOG.exception(_("Could not %s New Relic server") % command)
raise
@module_driver.output(
log_message=_('Removing New Relic license key'),
success_message=_('New Relic license key removed'),
fail_message=_('New Relic license key not removed'))
def remove(self, name, datastore, ds_version, data_file):
self._add_license_key("bad_key")
self._server_control('stop')
| apache-2.0 |
RomanHargrave/namebench | libnamebench/provider_extensions.py | 174 | 1713 | # Copyright 2009 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tricks that depend on a certain DNS provider.
Tricks that require inheritence by nameserver.py must go here, otherwise,
see providers.py for externally available functions.
"""
__author__ = '[email protected] (Thomas Stromberg)'
class NameServerProvider(object):
"""Inherited by nameserver."""
# myresolver.info
def GetMyResolverIpWithDuration(self):
return self.GetIpFromNameWithDuration('self.myresolver.info.')
def GetMyResolverHostNameWithDuration(self):
return self.GetNameFromNameWithDuration('self.myresolver.info.')
# OpenDNS
def GetOpenDnsNodeWithDuration(self):
return self.GetTxtRecordWithDuration('which.opendns.com.')[0:2]
def GetOpenDnsInterceptionStateWithDuration(self):
"""Check if our packets are actually getting to the correct servers."""
(node_id, duration) = self.GetOpenDnsNodeWithDuration()
if node_id and 'I am not an OpenDNS resolver' in node_id:
return (True, duration)
return (False, duration)
# UltraDNS
def GetUltraDnsNodeWithDuration(self):
return self.GetNameFromNameWithDuration('whoareyou.ultradns.net.')
| apache-2.0 |
DrMattChristian/btrbk | contrib/crypt/kdf_pbkdf2.py | 2 | 1919 | #!/usr/bin/env python3
#
# kdf_pbkdf2.py - (kdf_backend for btrbk)
#
# Copyright (c) 2017 Axel Burri
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# ---------------------------------------------------------------------
# The official btrbk website is located at:
# https://digint.ch/btrbk/
#
# Author:
# Axel Burri <[email protected]>
# ---------------------------------------------------------------------
import sys
import os
import getpass
import hashlib
def passprompt():
pprompt = lambda: (getpass.getpass("Passphrase: "), getpass.getpass("Retype passphrase: "))
p1, p2 = pprompt()
while p1 != p2:
print("No match, please try again", file=sys.stderr)
p1, p2 = pprompt()
return p1
if len(sys.argv) <= 1:
print("Usage: {} <dklen>".format(sys.argv[0]), file=sys.stderr)
sys.exit(1)
hash_name = "sha256"
iterations = 300000
dklen = int(sys.argv[1])
salt = os.urandom(16)
password = passprompt().encode("utf-8")
dk = hashlib.pbkdf2_hmac(hash_name=hash_name, password=password, salt=salt, iterations=iterations, dklen=dklen)
salt_hex = "".join(["{:02x}".format(x) for x in salt])
dk_hex = "".join(["{:02x}".format(x) for x in dk])
print("KEY=" + dk_hex);
print("algoritm=pbkdf2_hmac");
print("hash_name=" + hash_name);
print("salt=" + salt_hex);
print("iterations=" + str(iterations));
| gpl-3.0 |
lindycoder/netman | netman/adapters/switches/util.py | 3 | 3299 | # Copyright 2015 Internap.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import traceback
from netman import regex
import re
class SubShell(object):
debug = False
def __init__(self, ssh, enter, exit_cmd, validate=None):
self.ssh = ssh
self.enter = enter
self.exit = exit_cmd
self.validate = validate or (lambda x: None)
def __enter__(self):
if isinstance(self.enter, list):
[self.validate(self.ssh.do(cmd)) for cmd in self.enter]
else:
self.validate(self.ssh.do(self.enter))
return self.ssh
def __exit__(self, eType, eValue, eTrace):
if self.debug and eType is not None:
logging.error("Subshell exception {}: {}\n{}"
.format(eType.__name__, eValue, "".join(traceback.format_tb(eTrace))))
self.ssh.do(self.exit)
def no_output(exc, *args):
def m(welcome_msg):
if len(welcome_msg) > 0:
raise exc(*args)
return m
def split_on_bang(data):
current_chunk = []
for line in data:
if re.match("^!.*", line):
if len(current_chunk) > 0:
yield current_chunk
current_chunk = []
else:
current_chunk.append(line)
def split_on_dedent(data):
current_chunk = []
for line in data:
if re.match("^[^\s].*", line) and len(current_chunk) > 0:
yield current_chunk
current_chunk = [line]
else:
current_chunk.append(line)
yield current_chunk
class ResultChecker(object):
def __init__(self, result=None):
self.result = result
def on_any_result(self, exception, *args, **kwargs):
if self.result and len(self.result) > 0:
raise exception(*args, **kwargs)
return self
def on_result_matching(self, matcher, exception, *args, **kwargs):
if regex.match(matcher, "\n".join(self.result), flags=re.DOTALL):
raise exception(*args, **kwargs)
return self
class PageReader(object):
def __init__(self, read_while, and_press, unless_prompt):
self.next_page_indicator = read_while
self.continue_key = and_press
self.prompt = unless_prompt
def do(self, shell, command):
result = shell.do(command,
wait_for=(self.next_page_indicator, self.prompt),
include_last_line=True)
while len(result) > 0 and self.next_page_indicator in result[-1]:
result = result[:-1] + shell.send_key(self.continue_key,
wait_for=(self.next_page_indicator, self.prompt),
include_last_line=True)
return result[:-1]
| apache-2.0 |
jiachenning/odoo | addons/portal/mail_mail.py | 320 | 2625 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2011 OpenERP S.A (<http://www.openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import SUPERUSER_ID
from openerp.osv import osv
from openerp.tools.translate import _
class mail_mail(osv.Model):
""" Update of mail_mail class, to add the signin URL to notifications. """
_inherit = 'mail.mail'
def _get_partner_access_link(self, cr, uid, mail, partner=None, context=None):
""" Generate URLs for links in mails:
- partner is not an user: signup_url
- partner is an user: fallback on classic URL
"""
if context is None:
context = {}
partner_obj = self.pool.get('res.partner')
if partner and not partner.user_ids:
contex_signup = dict(context, signup_valid=True)
signup_url = partner_obj._get_signup_url_for_action(cr, SUPERUSER_ID, [partner.id],
action='mail.action_mail_redirect',
model=mail.model, res_id=mail.res_id,
context=contex_signup)[partner.id]
return ", <span class='oe_mail_footer_access'><small>%(access_msg)s <a style='color:inherit' href='%(portal_link)s'>%(portal_msg)s</a></small></span>" % {
'access_msg': _('access directly to'),
'portal_link': signup_url,
'portal_msg': '%s %s' % (context.get('model_name', ''), mail.record_name) if mail.record_name else _('your messages '),
}
else:
return super(mail_mail, self)._get_partner_access_link(cr, uid, mail, partner=partner, context=context)
| agpl-3.0 |
zhangqifan/findSomething | FindSomething/Pods/AVOSCloudCrashReporting/Breakpad/src/tools/gyp/test/actions/gyptest-all.py | 243 | 3677 | #!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies simple actions when using an explicit build target of 'all'.
"""
import glob
import os
import TestGyp
test = TestGyp.TestGyp(workdir='workarea_all')
test.run_gyp('actions.gyp', chdir='src')
test.relocate('src', 'relocate/src')
# Some gyp files use an action that mentions an output but never
# writes it as a means to making the action run on every build. That
# doesn't mesh well with ninja's semantics. TODO(evan): figure out
# how to work always-run actions in to ninja.
# Android also can't do this as it doesn't have order-only dependencies.
if test.format in ['ninja', 'android']:
test.build('actions.gyp', test.ALL, chdir='relocate/src')
else:
# Test that an "always run" action increases a counter on multiple
# invocations, and that a dependent action updates in step.
test.build('actions.gyp', test.ALL, chdir='relocate/src')
test.must_match('relocate/src/subdir1/actions-out/action-counter.txt', '1')
test.must_match('relocate/src/subdir1/actions-out/action-counter_2.txt', '1')
test.build('actions.gyp', test.ALL, chdir='relocate/src')
test.must_match('relocate/src/subdir1/actions-out/action-counter.txt', '2')
test.must_match('relocate/src/subdir1/actions-out/action-counter_2.txt', '2')
# The "always run" action only counts to 2, but the dependent target
# will count forever if it's allowed to run. This verifies that the
# dependent target only runs when the "always run" action generates
# new output, not just because the "always run" ran.
test.build('actions.gyp', test.ALL, chdir='relocate/src')
test.must_match('relocate/src/subdir1/actions-out/action-counter.txt', '2')
test.must_match('relocate/src/subdir1/actions-out/action-counter_2.txt', '2')
expect = """\
Hello from program.c
Hello from make-prog1.py
Hello from make-prog2.py
"""
if test.format == 'xcode':
chdir = 'relocate/src/subdir1'
else:
chdir = 'relocate/src'
test.run_built_executable('program', chdir=chdir, stdout=expect)
test.must_match('relocate/src/subdir2/file.out', "Hello from make-file.py\n")
expect = "Hello from generate_main.py\n"
if test.format == 'xcode':
chdir = 'relocate/src/subdir3'
else:
chdir = 'relocate/src'
test.run_built_executable('null_input', chdir=chdir, stdout=expect)
# Clean out files which may have been created if test.ALL was run.
def clean_dep_files():
for file in (glob.glob('relocate/src/dep_*.txt') +
glob.glob('relocate/src/deps_all_done_*.txt')):
if os.path.exists(file):
os.remove(file)
# Confirm our clean.
clean_dep_files()
test.must_not_exist('relocate/src/dep_1.txt')
test.must_not_exist('relocate/src/deps_all_done_first_123.txt')
# Make sure all deps finish before an action is run on a 'None' target.
# If using the Make builder, add -j to make things more difficult.
arguments = []
if test.format == 'make':
arguments = ['-j']
test.build('actions.gyp', 'action_with_dependencies_123', chdir='relocate/src',
arguments=arguments)
test.must_exist('relocate/src/deps_all_done_first_123.txt')
# Try again with a target that has deps in reverse. Output files from
# previous tests deleted. Confirm this execution did NOT run the ALL
# target which would mess up our dep tests.
clean_dep_files()
test.build('actions.gyp', 'action_with_dependencies_321', chdir='relocate/src',
arguments=arguments)
test.must_exist('relocate/src/deps_all_done_first_321.txt')
test.must_not_exist('relocate/src/deps_all_done_first_123.txt')
test.pass_test()
| mit |
openstack-ja/horizon | openstack_dashboard/openstack/common/notifier/log_notifier.py | 19 | 1297 | # Copyright 2011 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo.config import cfg
from openstack_dashboard.openstack.common import jsonutils
from openstack_dashboard.openstack.common import log as logging
CONF = cfg.CONF
def notify(_context, message):
"""Notifies the recipient of the desired event given the model.
Log notifications using openstack's default logging system.
"""
priority = message.get('priority',
CONF.default_notification_level)
priority = priority.lower()
logger = logging.getLogger(
'openstack_dashboard.openstack.common.notification.%s' %
message['event_type'])
getattr(logger, priority)(jsonutils.dumps(message))
| apache-2.0 |
puckipedia/youtube-dl | youtube_dl/extractor/brightcove.py | 89 | 15403 | # encoding: utf-8
from __future__ import unicode_literals
import re
import json
import xml.etree.ElementTree
from .common import InfoExtractor
from ..compat import (
compat_parse_qs,
compat_str,
compat_urllib_parse,
compat_urllib_parse_urlparse,
compat_urllib_request,
compat_urlparse,
compat_xml_parse_error,
)
from ..utils import (
determine_ext,
ExtractorError,
find_xpath_attr,
fix_xml_ampersands,
unescapeHTML,
unsmuggle_url,
)
class BrightcoveIE(InfoExtractor):
_VALID_URL = r'(?:https?://.*brightcove\.com/(services|viewer).*?\?|brightcove:)(?P<query>.*)'
_FEDERATED_URL_TEMPLATE = 'http://c.brightcove.com/services/viewer/htmlFederated?%s'
_TESTS = [
{
# From http://www.8tv.cat/8aldia/videos/xavier-sala-i-martin-aquesta-tarda-a-8-al-dia/
'url': 'http://c.brightcove.com/services/viewer/htmlFederated?playerID=1654948606001&flashID=myExperience&%40videoPlayer=2371591881001',
'md5': '5423e113865d26e40624dce2e4b45d95',
'note': 'Test Brightcove downloads and detection in GenericIE',
'info_dict': {
'id': '2371591881001',
'ext': 'mp4',
'title': 'Xavier Sala i Martín: “Un banc que no presta és un banc zombi que no serveix per a res”',
'uploader': '8TV',
'description': 'md5:a950cc4285c43e44d763d036710cd9cd',
}
},
{
# From http://medianetwork.oracle.com/video/player/1785452137001
'url': 'http://c.brightcove.com/services/viewer/htmlFederated?playerID=1217746023001&flashID=myPlayer&%40videoPlayer=1785452137001',
'info_dict': {
'id': '1785452137001',
'ext': 'flv',
'title': 'JVMLS 2012: Arrays 2.0 - Opportunities and Challenges',
'description': 'John Rose speaks at the JVM Language Summit, August 1, 2012.',
'uploader': 'Oracle',
},
},
{
# From http://mashable.com/2013/10/26/thermoelectric-bracelet-lets-you-control-your-body-temperature/
'url': 'http://c.brightcove.com/services/viewer/federated_f9?&playerID=1265504713001&publisherID=AQ%7E%7E%2CAAABBzUwv1E%7E%2CxP-xFHVUstiMFlNYfvF4G9yFnNaqCw_9&videoID=2750934548001',
'info_dict': {
'id': '2750934548001',
'ext': 'mp4',
'title': 'This Bracelet Acts as a Personal Thermostat',
'description': 'md5:547b78c64f4112766ccf4e151c20b6a0',
'uploader': 'Mashable',
},
},
{
# test that the default referer works
# from http://national.ballet.ca/interact/video/Lost_in_Motion_II/
'url': 'http://link.brightcove.com/services/player/bcpid756015033001?bckey=AQ~~,AAAApYJi_Ck~,GxhXCegT1Dp39ilhXuxMJxasUhVNZiil&bctid=2878862109001',
'info_dict': {
'id': '2878862109001',
'ext': 'mp4',
'title': 'Lost in Motion II',
'description': 'md5:363109c02998fee92ec02211bd8000df',
'uploader': 'National Ballet of Canada',
},
},
{
# test flv videos served by akamaihd.net
# From http://www.redbull.com/en/bike/stories/1331655643987/replay-uci-dh-world-cup-2014-from-fort-william
'url': 'http://c.brightcove.com/services/viewer/htmlFederated?%40videoPlayer=ref%3ABC2996102916001&linkBaseURL=http%3A%2F%2Fwww.redbull.com%2Fen%2Fbike%2Fvideos%2F1331655630249%2Freplay-uci-fort-william-2014-dh&playerKey=AQ%7E%7E%2CAAAApYJ7UqE%7E%2Cxqr_zXk0I-zzNndy8NlHogrCb5QdyZRf&playerID=1398061561001#__youtubedl_smuggle=%7B%22Referer%22%3A+%22http%3A%2F%2Fwww.redbull.com%2Fen%2Fbike%2Fstories%2F1331655643987%2Freplay-uci-dh-world-cup-2014-from-fort-william%22%7D',
# The md5 checksum changes on each download
'info_dict': {
'id': '2996102916001',
'ext': 'flv',
'title': 'UCI MTB World Cup 2014: Fort William, UK - Downhill Finals',
'uploader': 'Red Bull TV',
'description': 'UCI MTB World Cup 2014: Fort William, UK - Downhill Finals',
},
},
{
# playlist test
# from http://support.brightcove.com/en/video-cloud/docs/playlist-support-single-video-players
'url': 'http://c.brightcove.com/services/viewer/htmlFederated?playerID=3550052898001&playerKey=AQ%7E%7E%2CAAABmA9XpXk%7E%2C-Kp7jNgisre1fG5OdqpAFUTcs0lP_ZoL',
'info_dict': {
'title': 'Sealife',
'id': '3550319591001',
},
'playlist_mincount': 7,
},
]
@classmethod
def _build_brighcove_url(cls, object_str):
"""
Build a Brightcove url from a xml string containing
<object class="BrightcoveExperience">{params}</object>
"""
# Fix up some stupid HTML, see https://github.com/rg3/youtube-dl/issues/1553
object_str = re.sub(r'(<param(?:\s+[a-zA-Z0-9_]+="[^"]*")*)>',
lambda m: m.group(1) + '/>', object_str)
# Fix up some stupid XML, see https://github.com/rg3/youtube-dl/issues/1608
object_str = object_str.replace('<--', '<!--')
# remove namespace to simplify extraction
object_str = re.sub(r'(<object[^>]*)(xmlns=".*?")', r'\1', object_str)
object_str = fix_xml_ampersands(object_str)
try:
object_doc = xml.etree.ElementTree.fromstring(object_str.encode('utf-8'))
except compat_xml_parse_error:
return
fv_el = find_xpath_attr(object_doc, './param', 'name', 'flashVars')
if fv_el is not None:
flashvars = dict(
(k, v[0])
for k, v in compat_parse_qs(fv_el.attrib['value']).items())
else:
flashvars = {}
def find_param(name):
if name in flashvars:
return flashvars[name]
node = find_xpath_attr(object_doc, './param', 'name', name)
if node is not None:
return node.attrib['value']
return None
params = {}
playerID = find_param('playerID')
if playerID is None:
raise ExtractorError('Cannot find player ID')
params['playerID'] = playerID
playerKey = find_param('playerKey')
# Not all pages define this value
if playerKey is not None:
params['playerKey'] = playerKey
# The three fields hold the id of the video
videoPlayer = find_param('@videoPlayer') or find_param('videoId') or find_param('videoID')
if videoPlayer is not None:
params['@videoPlayer'] = videoPlayer
linkBase = find_param('linkBaseURL')
if linkBase is not None:
params['linkBaseURL'] = linkBase
return cls._make_brightcove_url(params)
@classmethod
def _build_brighcove_url_from_js(cls, object_js):
# The layout of JS is as follows:
# customBC.createVideo = function (width, height, playerID, playerKey, videoPlayer, VideoRandomID) {
# // build Brightcove <object /> XML
# }
m = re.search(
r'''(?x)customBC.\createVideo\(
.*? # skipping width and height
["\'](?P<playerID>\d+)["\']\s*,\s* # playerID
["\'](?P<playerKey>AQ[^"\']{48})[^"\']*["\']\s*,\s* # playerKey begins with AQ and is 50 characters
# in length, however it's appended to itself
# in places, so truncate
["\'](?P<videoID>\d+)["\'] # @videoPlayer
''', object_js)
if m:
return cls._make_brightcove_url(m.groupdict())
@classmethod
def _make_brightcove_url(cls, params):
data = compat_urllib_parse.urlencode(params)
return cls._FEDERATED_URL_TEMPLATE % data
@classmethod
def _extract_brightcove_url(cls, webpage):
"""Try to extract the brightcove url from the webpage, returns None
if it can't be found
"""
urls = cls._extract_brightcove_urls(webpage)
return urls[0] if urls else None
@classmethod
def _extract_brightcove_urls(cls, webpage):
"""Return a list of all Brightcove URLs from the webpage """
url_m = re.search(
r'<meta\s+property=[\'"]og:video[\'"]\s+content=[\'"](https?://(?:secure|c)\.brightcove.com/[^\'"]+)[\'"]',
webpage)
if url_m:
url = unescapeHTML(url_m.group(1))
# Some sites don't add it, we can't download with this url, for example:
# http://www.ktvu.com/videos/news/raw-video-caltrain-releases-video-of-man-almost/vCTZdY/
if 'playerKey' in url or 'videoId' in url:
return [url]
matches = re.findall(
r'''(?sx)<object
(?:
[^>]+?class=[\'"][^>]*?BrightcoveExperience.*?[\'"] |
[^>]*?>\s*<param\s+name="movie"\s+value="https?://[^/]*brightcove\.com/
).+?>\s*</object>''',
webpage)
if matches:
return list(filter(None, [cls._build_brighcove_url(m) for m in matches]))
return list(filter(None, [
cls._build_brighcove_url_from_js(custom_bc)
for custom_bc in re.findall(r'(customBC\.createVideo\(.+?\);)', webpage)]))
def _real_extract(self, url):
url, smuggled_data = unsmuggle_url(url, {})
# Change the 'videoId' and others field to '@videoPlayer'
url = re.sub(r'(?<=[?&])(videoI(d|D)|bctid)', '%40videoPlayer', url)
# Change bckey (used by bcove.me urls) to playerKey
url = re.sub(r'(?<=[?&])bckey', 'playerKey', url)
mobj = re.match(self._VALID_URL, url)
query_str = mobj.group('query')
query = compat_urlparse.parse_qs(query_str)
videoPlayer = query.get('@videoPlayer')
if videoPlayer:
# We set the original url as the default 'Referer' header
referer = smuggled_data.get('Referer', url)
return self._get_video_info(
videoPlayer[0], query_str, query, referer=referer)
elif 'playerKey' in query:
player_key = query['playerKey']
return self._get_playlist_info(player_key[0])
else:
raise ExtractorError(
'Cannot find playerKey= variable. Did you forget quotes in a shell invocation?',
expected=True)
def _get_video_info(self, video_id, query_str, query, referer=None):
request_url = self._FEDERATED_URL_TEMPLATE % query_str
req = compat_urllib_request.Request(request_url)
linkBase = query.get('linkBaseURL')
if linkBase is not None:
referer = linkBase[0]
if referer is not None:
req.add_header('Referer', referer)
webpage = self._download_webpage(req, video_id)
error_msg = self._html_search_regex(
r"<h1>We're sorry.</h1>([\s\n]*<p>.*?</p>)+", webpage,
'error message', default=None)
if error_msg is not None:
raise ExtractorError(
'brightcove said: %s' % error_msg, expected=True)
self.report_extraction(video_id)
info = self._search_regex(r'var experienceJSON = ({.*});', webpage, 'json')
info = json.loads(info)['data']
video_info = info['programmedContent']['videoPlayer']['mediaDTO']
video_info['_youtubedl_adServerURL'] = info.get('adServerURL')
return self._extract_video_info(video_info)
def _get_playlist_info(self, player_key):
info_url = 'http://c.brightcove.com/services/json/experience/runtime/?command=get_programming_for_experience&playerKey=%s' % player_key
playlist_info = self._download_webpage(
info_url, player_key, 'Downloading playlist information')
json_data = json.loads(playlist_info)
if 'videoList' not in json_data:
raise ExtractorError('Empty playlist')
playlist_info = json_data['videoList']
videos = [self._extract_video_info(video_info) for video_info in playlist_info['mediaCollectionDTO']['videoDTOs']]
return self.playlist_result(videos, playlist_id='%s' % playlist_info['id'],
playlist_title=playlist_info['mediaCollectionDTO']['displayName'])
def _extract_video_info(self, video_info):
info = {
'id': compat_str(video_info['id']),
'title': video_info['displayName'].strip(),
'description': video_info.get('shortDescription'),
'thumbnail': video_info.get('videoStillURL') or video_info.get('thumbnailURL'),
'uploader': video_info.get('publisherName'),
}
renditions = video_info.get('renditions')
if renditions:
formats = []
for rend in renditions:
url = rend['defaultURL']
if not url:
continue
ext = None
if rend['remote']:
url_comp = compat_urllib_parse_urlparse(url)
if url_comp.path.endswith('.m3u8'):
formats.extend(
self._extract_m3u8_formats(url, info['id'], 'mp4'))
continue
elif 'akamaihd.net' in url_comp.netloc:
# This type of renditions are served through
# akamaihd.net, but they don't use f4m manifests
url = url.replace('control/', '') + '?&v=3.3.0&fp=13&r=FEEFJ&g=RTSJIMBMPFPB'
ext = 'flv'
if ext is None:
ext = determine_ext(url)
size = rend.get('size')
formats.append({
'url': url,
'ext': ext,
'height': rend.get('frameHeight'),
'width': rend.get('frameWidth'),
'filesize': size if size != 0 else None,
})
self._sort_formats(formats)
info['formats'] = formats
elif video_info.get('FLVFullLengthURL') is not None:
info.update({
'url': video_info['FLVFullLengthURL'],
})
if self._downloader.params.get('include_ads', False):
adServerURL = video_info.get('_youtubedl_adServerURL')
if adServerURL:
ad_info = {
'_type': 'url',
'url': adServerURL,
}
if 'url' in info:
return {
'_type': 'playlist',
'title': info['title'],
'entries': [ad_info, info],
}
else:
return ad_info
if 'url' not in info and not info.get('formats'):
raise ExtractorError('Unable to extract video url for %s' % info['id'])
return info
| unlicense |
vvv1559/intellij-community | python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_set_literal.py | 326 | 1699 | """
Optional fixer to transform set() calls to set literals.
"""
# Author: Benjamin Peterson
from lib2to3 import fixer_base, pytree
from lib2to3.fixer_util import token, syms
class FixSetLiteral(fixer_base.BaseFix):
BM_compatible = True
explicit = True
PATTERN = """power< 'set' trailer< '('
(atom=atom< '[' (items=listmaker< any ((',' any)* [',']) >
|
single=any) ']' >
|
atom< '(' items=testlist_gexp< any ((',' any)* [',']) > ')' >
)
')' > >
"""
def transform(self, node, results):
single = results.get("single")
if single:
# Make a fake listmaker
fake = pytree.Node(syms.listmaker, [single.clone()])
single.replace(fake)
items = fake
else:
items = results["items"]
# Build the contents of the literal
literal = [pytree.Leaf(token.LBRACE, u"{")]
literal.extend(n.clone() for n in items.children)
literal.append(pytree.Leaf(token.RBRACE, u"}"))
# Set the prefix of the right brace to that of the ')' or ']'
literal[-1].prefix = items.next_sibling.prefix
maker = pytree.Node(syms.dictsetmaker, literal)
maker.prefix = node.prefix
# If the original was a one tuple, we need to remove the extra comma.
if len(maker.children) == 4:
n = maker.children[2]
n.remove()
maker.children[-1].prefix = n.prefix
# Finally, replace the set call with our shiny new literal.
return maker
| apache-2.0 |
AlexRobson/nilmtk | nilmtk/datastore/key.py | 6 | 1916 | from __future__ import print_function, division
# do not edit! added by PythonBreakpoints
from pdb import set_trace as _breakpoint
class Key(object):
"""A location of data or metadata within NILMTK.
Attributes
----------
building : int
meter : int
utility : str
"""
def __init__(self, string=None, building=None, meter=None):
"""
Parameters
----------
string : str, optional
e.g. 'building1/elec/meter1'
building : int, optional
meter : int, optional
"""
self.utility = None
if string is None:
self.building = building
self.meter = meter
else:
split = string.strip('/').split('/')
assert split[0].startswith('building'), "The first element must be 'building<I>', e.g. 'building1'; not '{}'.".format(split[0])
try:
self.building = int(split[0].replace("building", ""))
except ValueError as e:
raise ValueError("'building' must be followed by an integer.\n{}"
.format(e))
if len(split) > 1:
self.utility = split[1]
if len(split) == 3:
assert split[2].startswith('meter')
self.meter = int(split[-1].replace("meter", ""))
else:
self.meter = None
self._check()
def _check(self):
assert isinstance(self.building, int)
assert self.building >= 1
if self.meter is not None:
assert isinstance(self.meter, int)
assert self.meter >= 1
def __repr__(self):
self._check()
s = "/building{:d}".format(self.building)
if self.meter is not None:
s += "/elec/meter{:d}".format(self.meter)
return s
| apache-2.0 |
credativ/gofer | src/gofer/agent/config.py | 1 | 4642 | #
# Copyright (c) 2011 Red Hat, Inc.
#
# This software is licensed to you under the GNU Lesser General Public
# License as published by the Free Software Foundation; either version
# 2 of the License (LGPLv2) or (at your option) any later version.
# There is NO WARRANTY for this software, express or implied,
# including the implied warranties of MERCHANTABILITY,
# NON-INFRINGEMENT, or FITNESS FOR A PARTICULAR PURPOSE. You should
# have received a copy of LGPLv2 along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/lgpl-2.0.txt.
#
# Jeff Ortel <[email protected]>
#
from gofer import NAME, Singleton
from gofer.config import Config, Graph
from gofer.config import REQUIRED, OPTIONAL, ANY, BOOL, NUMBER
#
# [management]
# enabled
# The manager is (enabled|disabled).
# host
# Host (name or IP) the manager listens on.
# port
# The port number the manager listens on.
#
# [logging]
# <module>
# Logging level
#
# [pam]
# service
# The default PAM service for authentication. Default:passwd
#
AGENT_SCHEMA = (
('management', REQUIRED,
(
('enabled', OPTIONAL, BOOL),
('host', OPTIONAL, ANY),
('port', OPTIONAL, NUMBER),
)
),
('logging', REQUIRED,
[]
),
('pam', REQUIRED,
(
('service', OPTIONAL, ANY),
)
),
)
#
# [main]
#
# enabled
# Plugin enabled/disabled (0|1)
# name
# The (optional) plugin name. The basename of the descriptor is used when not specified.
# plugin
# The (optional) fully qualified module to be loaded from the PYTHON path.
# threads
# The (optional) number of threads for the RMI dispatcher.
# accept
# Accept forwarding from. A comma (,) separated list of plugin names (,=none|*=all).
# forward
# Forward to. A comma (,) separated list of plugin names (,=none|*=all).
#
# [messaging]
#
# uuid
# The (optional) agent identity. This value also specifies the queue name.
# url
# The (optional) broker connection URL.
# cacert
# The (optional) SSL CA certificate used to validate the server certificate.
# clientcert
# The (optional) SSL client certificate. PEM encoded and contains both key and certificate.
# host_validation
# The (optional) flag indicates SSL host validation should be performed.
# authenticator
# The (optional) fully qualified Authenticator to be loaded from the PYTHON path.
#
# [model]
#
# managed
# The (optional) level of broker model management. Default: 2.
# - 0 = none
# - 1 = declare and bind queue.
# - 2 = declare and bind queue; drain and delete queue on explicit detach.
# queue
# The (optional) AMQP queue name. This has precedent over uuid.
# Format: <exchange>/<queue> where *exchange* is optional.
# expiration
# The (optional) auto-deleted queue expiration (seconds).
#
PLUGIN_SCHEMA = (
('main', REQUIRED,
(
('enabled', REQUIRED, BOOL),
('name', OPTIONAL, ANY),
('plugin', OPTIONAL, ANY),
('threads', OPTIONAL, NUMBER),
('accept', OPTIONAL, ANY),
('forward', OPTIONAL, ANY),
)
),
('messaging', REQUIRED,
(
('url', OPTIONAL, ANY),
('uuid', OPTIONAL, ANY),
('cacert', OPTIONAL, ANY),
('clientcert', OPTIONAL, ANY),
('clientkey', OPTIONAL, ANY),
('host_validation', OPTIONAL, BOOL),
('authenticator', OPTIONAL, ANY),
)
),
('model', OPTIONAL,
(
('managed', OPTIONAL, '(0|1|2)'),
('queue', OPTIONAL, ANY),
('expiration', OPTIONAL, NUMBER)
)
),
)
AGENT_DEFAULTS = {
'management': {
'enabled': '0',
'host': 'localhost',
'port': '5650',
},
'logging': {
},
'pam': {
'service': 'passwd'
}
}
PLUGIN_DEFAULTS = {
'main': {
'enabled': '0',
'threads': '1',
'accept': ',',
'forward': ','
},
'messaging': {
},
'model': {
'managed': '2'
}
}
class AgentConfig(Graph):
"""
The gofer agent configuration.
:cvar PATH: The absolute path to the config directory.
:type PATH: str
"""
__metaclass__ = Singleton
PATH = '/etc/%s/agent.conf' % NAME
def __init__(self, path=None):
"""
Read the configuration.
"""
conf = Config(AGENT_DEFAULTS, path or AgentConfig.PATH)
conf.validate(AGENT_SCHEMA)
Graph.__init__(self, conf)
| lgpl-2.1 |
damdam-s/project-service | service_desk/project.py | 23 | 3700 | # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2013 Daniel Reis
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, orm
class ProjectProject(orm.Model):
_inherit = 'project.project'
_columns = {
'use_analytic_account': fields.selection(
[('no', 'No'), ('yes', 'Optional'), ('req', 'Required')],
'Use Analytic Account'),
}
_defaults = {
'use_analytic_account': 'no',
}
class ProjectTask(orm.Model):
"""
Add related ``Analytic Account`` and service ``Location``.
A Location can be any Contact Partner of the AA's Partner.
Other logic is possible, such as maintaining a specific list of service
addresses for each Contract, but that's out of scope here -
modules implementing these other possibilities are very welcome.
"""
_inherit = 'project.task'
_columns = {
'analytic_account_id': fields.many2one(
'account.analytic.account', 'Contract/Analytic',
domain="[('type','in',['normal','contract'])]"),
'location_id': fields.many2one(
'res.partner', 'Location',
domain="[('parent_id','child_of',partner_id)]"),
'use_analytic_account': fields.related(
'project_id', 'use_analytic_account',
type='char', string="Use Analytic Account"),
'project_code': fields.related(
'project_id', 'code', type='char', string="Project Code"),
}
def onchange_project(self, cr, uid, id, project_id, context=None):
# on_change is necessary to populate fields on Create, before saving
try:
# try applying a parent's onchange, may it exist
res = super(ProjectTask, self).onchange_project(
cr, uid, id, project_id, context=context) or {}
except AttributeError:
res = {}
if project_id:
obj = self.pool.get('project.project').browse(
cr, uid, project_id, context=context)
res.setdefault('value', {})
res['value']['use_analytic_account'] = (
obj.use_analytic_account or 'no')
return res
def onchange_analytic(self, cr, uid, id, analytic_id, context=None):
res = {}
model = self.pool.get('account.analytic.account')
obj = model.browse(cr, uid, analytic_id, context=context)
if obj:
# "contact_id" and "department_id" may be provided by other modules
fldmap = [ # analytic_account field -> task field
('partner_id', 'partner_id'),
('contact_id', 'location_id'),
('department_id', 'department_id')]
res['value'] = {dest: getattr(obj, orig).id
for orig, dest in fldmap
if hasattr(obj, orig) and getattr(obj, orig)}
return res
| agpl-3.0 |
maciek263/django2 | myvenv/Lib/site-packages/django/contrib/gis/forms/widgets.py | 422 | 3659 | from __future__ import unicode_literals
import logging
from django.conf import settings
from django.contrib.gis import gdal
from django.contrib.gis.geos import GEOSException, GEOSGeometry
from django.forms.widgets import Widget
from django.template import loader
from django.utils import six, translation
logger = logging.getLogger('django.contrib.gis')
class BaseGeometryWidget(Widget):
"""
The base class for rich geometry widgets.
Renders a map using the WKT of the geometry.
"""
geom_type = 'GEOMETRY'
map_srid = 4326
map_width = 600
map_height = 400
display_raw = False
supports_3d = False
template_name = '' # set on subclasses
def __init__(self, attrs=None):
self.attrs = {}
for key in ('geom_type', 'map_srid', 'map_width', 'map_height', 'display_raw'):
self.attrs[key] = getattr(self, key)
if attrs:
self.attrs.update(attrs)
def serialize(self, value):
return value.wkt if value else ''
def deserialize(self, value):
try:
return GEOSGeometry(value, self.map_srid)
except (GEOSException, ValueError) as err:
logger.error(
"Error creating geometry from value '%s' (%s)" % (
value, err)
)
return None
def render(self, name, value, attrs=None):
# If a string reaches here (via a validation error on another
# field) then just reconstruct the Geometry.
if isinstance(value, six.string_types):
value = self.deserialize(value)
if value:
# Check that srid of value and map match
if value.srid != self.map_srid:
try:
ogr = value.ogr
ogr.transform(self.map_srid)
value = ogr
except gdal.GDALException as err:
logger.error(
"Error transforming geometry from srid '%s' to srid '%s' (%s)" % (
value.srid, self.map_srid, err)
)
context = self.build_attrs(
attrs,
name=name,
module='geodjango_%s' % name.replace('-', '_'), # JS-safe
serialized=self.serialize(value),
geom_type=gdal.OGRGeomType(self.attrs['geom_type']),
STATIC_URL=settings.STATIC_URL,
LANGUAGE_BIDI=translation.get_language_bidi(),
)
return loader.render_to_string(self.template_name, context)
class OpenLayersWidget(BaseGeometryWidget):
template_name = 'gis/openlayers.html'
class Media:
js = (
'http://openlayers.org/api/2.13/OpenLayers.js',
'gis/js/OLMapWidget.js',
)
class OSMWidget(BaseGeometryWidget):
"""
An OpenLayers/OpenStreetMap-based widget.
"""
template_name = 'gis/openlayers-osm.html'
default_lon = 5
default_lat = 47
class Media:
js = (
'http://openlayers.org/api/2.13/OpenLayers.js',
'http://www.openstreetmap.org/openlayers/OpenStreetMap.js',
'gis/js/OLMapWidget.js',
)
def __init__(self, attrs=None):
super(OSMWidget, self).__init__()
for key in ('default_lon', 'default_lat'):
self.attrs[key] = getattr(self, key)
if attrs:
self.attrs.update(attrs)
@property
def map_srid(self):
# Use the official spherical mercator projection SRID when GDAL is
# available; otherwise, fallback to 900913.
if gdal.HAS_GDAL:
return 3857
else:
return 900913
| mit |
harshilasu/LinkurApp | y/google-cloud-sdk/platform/gsutil/third_party/boto/tests/integration/s3/test_cert_verification.py | 126 | 1532 | # Copyright (c) 2012 Mitch Garnaat http://garnaat.org/
# Copyright (c) 2012 Amazon.com, Inc. or its affiliates.
# All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
"""
Check that all of the certs on SQS endpoints validate.
"""
import unittest
from tests.integration import ServiceCertVerificationTest
import boto.s3
class S3CertVerificationTest(unittest.TestCase, ServiceCertVerificationTest):
s3 = True
regions = boto.s3.regions()
def sample_service_call(self, conn):
conn.get_all_buckets()
| gpl-3.0 |
VanirAOSP/external_chromium-trace | trace-viewer/third_party/pywebsocket/src/example/bench_wsh.py | 495 | 2322 | # Copyright 2011, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""A simple load tester for WebSocket clients.
A client program sends a message formatted as "<time> <count> <message>" to
this handler. This handler starts sending total <count> WebSocket messages
containing <message> every <time> seconds. <time> can be a floating point
value. <count> must be an integer value.
"""
import time
def web_socket_do_extra_handshake(request):
pass # Always accept.
def web_socket_transfer_data(request):
line = request.ws_stream.receive_message()
parts = line.split(' ')
if len(parts) != 3:
raise ValueError('Bad parameter format')
wait = float(parts[0])
count = int(parts[1])
message = parts[2]
for i in xrange(count):
request.ws_stream.send_message(message)
time.sleep(wait)
# vi:sts=4 sw=4 et
| bsd-3-clause |
tensorflow/probability | tensorflow_probability/python/internal/hypothesis_testlib_test.py | 1 | 1816 | # Copyright 2018 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Testing the TFP Hypothesis strategies.
(As opposed to using them to test other things).
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import parameterized
import hypothesis as hp
from hypothesis import strategies as hps
import numpy as np
import tensorflow.compat.v2 as tf
from tensorflow_probability.python.internal import hypothesis_testlib as tfp_hps
from tensorflow_probability.python.internal import test_util
@test_util.test_all_tf_execution_regimes
class HypothesisTestlibTest(test_util.TestCase):
@parameterized.parameters((support,) for support in tfp_hps.ALL_SUPPORTS)
@hp.given(hps.data())
@tfp_hps.tfp_hp_settings()
def testTensorsInSupportsAlwaysFinite(self, support, data):
try:
result_ = data.draw(tfp_hps.tensors_in_support(support))
except NotImplementedError:
# Constraint class doesn't have a constrainer function at all, so this
# test is moot.
return
result = self.evaluate(result_)
self.assertTrue(np.all(np.isfinite(result)))
if __name__ == '__main__':
tf.test.main()
| apache-2.0 |
JohnGeorgiadis/invenio | invenio/legacy/bibsched/bibtasklet.py | 17 | 5098 | # -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2009, 2010, 2011, 2012, 2013, 2014 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Invenio Bibliographic Tasklet BibTask.
This is a particular BibTask that execute tasklets, which can be any
function dropped into ``<package>.tasklets`` where ``<package>`` is defined
in ``PACKAGES``.
"""
from __future__ import print_function
import sys
from invenio.version import __version__
from invenio.legacy.bibsched.bibtask import (
task_init, write_message, task_set_option, task_get_option,
task_update_progress)
from invenio.utils.autodiscovery.helpers import get_callable_documentation
from invenio.utils.autodiscovery.checkers import check_arguments_compatibility
from invenio.modules.scheduler.registry import tasklets
_TASKLETS = tasklets
def cli_list_tasklets():
"""Print the list of available tasklets and broken tasklets."""
print("""Available tasklets:""")
for tasklet in _TASKLETS.values():
print(get_callable_documentation(tasklet))
sys.exit(0)
def task_submit_elaborate_specific_parameter(key, value,
dummy_opts, dummy_args):
"""Check meaning of given string key.
Eventually use the value for check. Usually it fills some key in the
options dict. It must return True if it has elaborated the key, False,
if it doesn't know that key.
Example:
.. code-block:: python
if key in ('-n', '--number'):
task_set_option('number', value)
return True
return False
"""
if key in ('-T', '--tasklet'):
task_set_option('tasklet', value)
return True
elif key in ('-a', '--argument'):
arguments = task_get_option('arguments', {})
try:
key, value = value.split('=', 1)
except NameError:
print('ERROR: an argument must be in the form '
'param=value, not "%s"' % (value, ),
file=sys.stderr)
return False
arguments[key] = value
task_set_option('arguments', arguments)
return True
elif key in ('-l', '--list-tasklets'):
cli_list_tasklets()
return True
return False
def task_submit_check_options():
"""Check if a tasklet has been specified and the parameters are good."""
tasklet = task_get_option('tasklet', None)
arguments = task_get_option('arguments', {})
if not tasklet:
print('ERROR: no tasklet specified', file=sys.stderr)
return False
elif tasklet not in _TASKLETS:
print('ERROR: "%s" is not a valid tasklet. Use '
'--list-tasklets to obtain a list of the working tasklets.' %
tasklet, file=sys.stderr)
return False
else:
try:
check_arguments_compatibility(_TASKLETS[tasklet], arguments)
except ValueError as err:
print('ERROR: wrong arguments (%s) specified for '
'tasklet "%s": %s' % (
arguments, tasklet, err), file=sys.stderr)
return False
return True
def task_run_core():
"""Run the specific tasklet."""
tasklet = task_get_option('tasklet')
arguments = task_get_option('arguments', {})
write_message('Starting tasklet "%s" (with arguments %s)' % (
tasklet, arguments))
task_update_progress('%s started' % tasklet)
ret = _TASKLETS[tasklet](**arguments)
task_update_progress('%s finished' % tasklet)
write_message('Finished tasklet "%s" (with arguments %s)' % (
tasklet, arguments))
if ret is not None:
return ret
return True
def main():
"""Main body of bibtasklet."""
task_init(
authorization_action='runbibtasklet',
authorization_msg="BibTaskLet Task Submission",
help_specific_usage="""\
-T, --tasklet Execute the specific tasklet
-a, --argument Specify an argument to be passed to tasklet in the form
param=value, e.g. --argument foo=bar
-l, --list-tasklets List the existing tasklets
""",
version=__version__,
specific_params=("T:a:l", ["tasklet=", "argument=", "list-tasklets"]),
task_submit_elaborate_specific_parameter_fnc=(
task_submit_elaborate_specific_parameter
),
task_run_fnc=task_run_core,
task_submit_check_options_fnc=task_submit_check_options)
| gpl-2.0 |
Fizzadar/pyinfra | pyinfra_cli/inventory.py | 1 | 5989 | from os import listdir, path
from types import GeneratorType
import six
from pyinfra import logger, pseudo_inventory
from pyinfra.api.inventory import Inventory
from pyinfra_cli.util import exec_file
# Hosts in an inventory can be just the hostname or a tuple (hostname, data)
ALLOWED_HOST_TYPES = tuple(
six.string_types + (tuple,),
)
def _is_inventory_group(key, value):
'''
Verify that a module-level variable (key = value) is a valid inventory group.
'''
if (
key.startswith('_')
or not isinstance(value, (list, tuple, GeneratorType))
):
return False
# If the group is a tuple of (hosts, data), check the hosts
if isinstance(value, tuple):
value = value[0]
# Expand any generators of hosts
if isinstance(value, GeneratorType):
value = list(value)
return all(
isinstance(item, ALLOWED_HOST_TYPES)
for item in value
)
def _get_group_data(deploy_dir):
group_data = {}
group_data_directory = path.join(deploy_dir, 'group_data')
if path.exists(group_data_directory):
files = listdir(group_data_directory)
for file in files:
if not file.endswith('.py'):
continue
group_data_file = path.join(group_data_directory, file)
group_name = path.basename(file)[:-3]
logger.debug('Looking for group data in: {0}'.format(group_data_file))
# Read the files locals into a dict
attrs = exec_file(group_data_file, return_locals=True)
keys = attrs.get('__all__', attrs.keys())
group_data[group_name] = {
key: value
for key, value in six.iteritems(attrs)
if key in keys and not key.startswith('_')
}
return group_data
def _get_groups_from_filename(inventory_filename):
attrs = exec_file(inventory_filename, return_locals=True)
return {
key: value
for key, value in six.iteritems(attrs)
if _is_inventory_group(key, value)
}
def make_inventory(
inventory_filename,
deploy_dir=None,
ssh_port=None,
ssh_user=None,
ssh_key=None,
ssh_key_password=None,
ssh_password=None,
winrm_username=None,
winrm_password=None,
winrm_port=None,
winrm_transport=None,
):
'''
Builds a ``pyinfra.api.Inventory`` from the filesystem. If the file does not exist
and doesn't contain a / attempts to use that as the only hostname.
'''
if ssh_port is not None:
ssh_port = int(ssh_port)
file_groupname = None
# If we're not a valid file we assume a list of comma separated hostnames
if not path.exists(inventory_filename):
groups = {
'all': inventory_filename.split(','),
}
else:
groups = _get_groups_from_filename(inventory_filename)
# Used to set all the hosts to an additional group - that of the filename
# ie inventories/dev.py means all the hosts are in the dev group, if not present
file_groupname = path.basename(inventory_filename).rsplit('.', 1)[0]
all_data = {}
if 'all' in groups:
all_hosts = groups.pop('all')
if isinstance(all_hosts, tuple):
all_hosts, all_data = all_hosts
# Build all out of the existing hosts if not defined
else:
all_hosts = []
for hosts in groups.values():
# Groups can be a list of hosts or tuple of (hosts, data)
hosts = hosts[0] if isinstance(hosts, tuple) else hosts
for host in hosts:
# Hosts can be a hostname or tuple of (hostname, data)
hostname = host[0] if isinstance(host, tuple) else host
if hostname not in all_hosts:
all_hosts.append(hostname)
groups['all'] = (all_hosts, all_data)
# Apply the filename group if not already defined
if file_groupname and file_groupname not in groups:
groups[file_groupname] = all_hosts
# In pyinfra an inventory is a combination of (hostnames + data). However, in CLI
# mode we want to be define this in separate files (inventory / group data). The
# issue is we want inventory access within the group data files - but at this point
# we're not ready to make an Inventory. So here we just create a fake one, and
# attach it to pseudo_inventory while we import the data files.
logger.debug('Creating fake inventory...')
fake_groups = {
# In API mode groups *must* be tuples of (hostnames, data)
name: group if isinstance(group, tuple) else (group, {})
for name, group in six.iteritems(groups)
}
fake_inventory = Inventory((all_hosts, all_data), **fake_groups)
pseudo_inventory.set(fake_inventory)
# Get all group data (group_data/*.py)
group_data = _get_group_data(deploy_dir)
# Reset the pseudo inventory
pseudo_inventory.reset()
# For each group load up any data
for name, hosts in six.iteritems(groups):
data = {}
if isinstance(hosts, tuple):
hosts, data = hosts
if name in group_data:
data.update(group_data.pop(name))
# Attach to group object
groups[name] = (hosts, data)
# Loop back through any leftover group data and create an empty (for now)
# group - this is because inventory @connectors can attach arbitrary groups
# to hosts, so we need to support that.
for name, data in six.iteritems(group_data):
groups[name] = ([], data)
return Inventory(
groups.pop('all'),
ssh_user=ssh_user,
ssh_key=ssh_key,
ssh_key_password=ssh_key_password,
ssh_port=ssh_port,
ssh_password=ssh_password,
winrm_username=winrm_username,
winrm_password=winrm_password,
winrm_port=winrm_port,
winrm_transport=winrm_transport,
**groups
), file_groupname and file_groupname.lower()
| mit |
XTAv2/Enigma2 | lib/python/Components/TuneTest.py | 27 | 10721 | from enigma import eDVBFrontendParametersSatellite, eDVBFrontendParametersTerrestrial, eDVBFrontendParametersCable, eDVBFrontendParameters, eDVBResourceManager, eTimer
class Tuner:
def __init__(self, frontend, ignore_rotor=False):
self.frontend = frontend
self.ignore_rotor = ignore_rotor
# transponder = (frequency, symbolrate, polarisation, fec, inversion, orbpos, system, modulation, rolloff, pilot, tsid, onid)
# 0 1 2 3 4 5 6 7 8 9 10 11
def tune(self, transponder):
if self.frontend:
print "[TuneTest] tuning to transponder with data", transponder
parm = eDVBFrontendParametersSatellite()
parm.frequency = transponder[0] * 1000
parm.symbol_rate = transponder[1] * 1000
parm.polarisation = transponder[2]
parm.fec = transponder[3]
parm.inversion = transponder[4]
parm.orbital_position = transponder[5]
parm.system = transponder[6]
parm.modulation = transponder[7]
parm.rolloff = transponder[8]
parm.pilot = transponder[9]
self.tuneSatObj(parm)
def tuneSatObj(self, transponderObj):
if self.frontend:
feparm = eDVBFrontendParameters()
feparm.setDVBS(transponderObj, self.ignore_rotor)
self.lastparm = feparm
self.frontend.tune(feparm)
def tuneTerr(self, frequency,
inversion=2, bandwidth = 7000000, fechigh = 6, feclow = 6,
modulation = 2, transmission = 2, guard = 4,
hierarchy = 4, system = 0, plpid = 0):
if self.frontend:
print "[TuneTest] tuning to transponder with data", [frequency, inversion, bandwidth, fechigh, feclow, modulation, transmission, guard, hierarchy, system, plpid]
parm = eDVBFrontendParametersTerrestrial()
parm.frequency = frequency
parm.inversion = inversion
parm.bandwidth = bandwidth
parm.code_rate_HP = fechigh
parm.code_rate_LP = feclow
parm.modulation = modulation
parm.transmission_mode = transmission
parm.guard_interval = guard
parm.hierarchy = hierarchy
parm.system = system
parm.plpid = plpid
self.tuneTerrObj(parm)
def tuneTerrObj(self, transponderObj):
if self.frontend:
feparm = eDVBFrontendParameters()
feparm.setDVBT(transponderObj)
self.lastparm = feparm
self.frontend.tune(feparm)
def tuneCab(self, transponder):
if self.frontend:
print "[TuneTest] tuning to transponder with data", transponder
parm = eDVBFrontendParametersCable()
parm.frequency = transponder[0]
parm.symbol_rate = transponder[1]
parm.modulation = transponder[2]
parm.fec_inner = transponder[3]
parm.inversion = transponder[4]
#parm.system = transponder[5]
self.tuneCabObj(parm)
def tuneCabObj(self, transponderObj):
if self.frontend:
feparm = eDVBFrontendParameters()
feparm.setDVBC(transponderObj)
self.lastparm = feparm
self.frontend.tune(feparm)
def retune(self):
if self.frontend:
self.frontend.tune(self.lastparm)
def getTransponderData(self):
ret = { }
if self.frontend:
self.frontend.getTransponderData(ret, True)
return ret
# tunes a list of transponders and checks, if they lock and optionally checks the onid/tsid combination
# 1) add transponders with addTransponder()
# 2) call run(<checkPIDs = True>)
# 3) finishedChecking() is called, when the run is finished
class TuneTest:
def __init__(self, feid, stopOnSuccess = -1, stopOnError = -1):
self.stopOnSuccess = stopOnSuccess
self.stopOnError = stopOnError
self.feid = feid
self.transponderlist = []
self.currTuned = None
print "TuneTest for feid %d" % self.feid
if not self.openFrontend():
self.oldref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
self.session.nav.stopService() # try to disable foreground service
if not self.openFrontend():
if self.session.pipshown: # try to disable pip
if hasattr(self.session, 'infobar'):
if self.session.infobar.servicelist.dopipzap:
self.session.infobar.servicelist.togglePipzap()
if hasattr(self.session, 'pip'):
del self.session.pip
self.session.pipshown = False
if not self.openFrontend():
self.frontend = None # in normal case this should not happen
self.tuner = Tuner(self.frontend)
self.timer = eTimer()
self.timer.callback.append(self.updateStatus)
def gotTsidOnid(self, tsid, onid):
print "******** got tsid, onid:", tsid, onid
if tsid is not -1 and onid is not -1:
self.pidStatus = self.INTERNAL_PID_STATUS_SUCCESSFUL
self.tsid = tsid
self.onid = onid
else:
self.pidStatus = self.INTERNAL_PID_STATUS_FAILED
self.tsid = -1
self.onid = -1
self.timer.start(100, True)
def updateStatus(self):
dict = {}
self.frontend.getFrontendStatus(dict)
stop = False
print "status:", dict
if dict["tuner_state"] == "TUNING":
print "TUNING"
self.timer.start(100, True)
self.progressCallback((self.getProgressLength(), self.tuningtransponder, self.STATUS_TUNING, self.currTuned))
elif self.checkPIDs and self.pidStatus == self.INTERNAL_PID_STATUS_NOOP:
print "2nd choice"
if dict["tuner_state"] == "LOCKED":
print "acquiring TSID/ONID"
self.raw_channel.receivedTsidOnid.get().append(self.gotTsidOnid)
self.raw_channel.requestTsidOnid()
self.pidStatus = self.INTERNAL_PID_STATUS_WAITING
else:
self.pidStatus = self.INTERNAL_PID_STATUS_FAILED
elif self.checkPIDs and self.pidStatus == self.INTERNAL_PID_STATUS_WAITING:
print "waiting for pids"
else:
if dict["tuner_state"] == "LOSTLOCK" or dict["tuner_state"] == "FAILED":
self.tuningtransponder = self.nextTransponder()
self.failedTune.append([self.currTuned, self.oldTuned, "tune_failed", dict]) # last parameter is the frontend status)
if self.stopOnError != -1 and self.stopOnError <= len(self.failedTune):
stop = True
elif dict["tuner_state"] == "LOCKED":
pidsFailed = False
if self.checkPIDs:
if self.currTuned is not None:
if self.tsid != self.currTuned[10] or self.onid != self.currTuned[11]:
self.failedTune.append([self.currTuned, self.oldTuned, "pids_failed", {"real": (self.tsid, self.onid), "expected": (self.currTuned[10], self.currTuned[11])}, dict]) # last parameter is the frontend status
pidsFailed = True
else:
self.successfullyTune.append([self.currTuned, self.oldTuned, dict]) # 3rd parameter is the frontend status
if self.stopOnSuccess != -1 and self.stopOnSuccess <= len(self.successfullyTune):
stop = True
elif not self.checkPIDs or (self.checkPids and not pidsFailed):
self.successfullyTune.append([self.currTuned, self.oldTuned, dict]) # 3rd parameter is the frontend status
if self.stopOnSuccess != -1 and self.stopOnSuccess <= len(self.successfullyTune):
stop = True
self.tuningtransponder = self.nextTransponder()
else:
print "************* tuner_state:", dict["tuner_state"]
self.progressCallback((self.getProgressLength(), self.tuningtransponder, self.STATUS_NOOP, self.currTuned))
if not stop:
self.tune()
if self.tuningtransponder < len(self.transponderlist) and not stop:
if self.pidStatus != self.INTERNAL_PID_STATUS_WAITING:
self.timer.start(100, True)
print "restart timer"
else:
print "not restarting timers (waiting for pids)"
else:
self.progressCallback((self.getProgressLength(), len(self.transponderlist), self.STATUS_DONE, self.currTuned))
print "finishedChecking"
self.finishedChecking()
def firstTransponder(self):
print "firstTransponder:"
index = 0
if self.checkPIDs:
print "checkPIDs-loop"
# check for tsid != -1 and onid != -1
print "index:", index
print "len(self.transponderlist):", len(self.transponderlist)
while (index < len(self.transponderlist) and (self.transponderlist[index][10] == -1 or self.transponderlist[index][11] == -1)):
index += 1
print "FirstTransponder final index:", index
return index
def nextTransponder(self):
print "getting next transponder", self.tuningtransponder
index = self.tuningtransponder + 1
if self.checkPIDs:
print "checkPIDs-loop"
# check for tsid != -1 and onid != -1
print "index:", index
print "len(self.transponderlist):", len(self.transponderlist)
while (index < len(self.transponderlist) and (self.transponderlist[index][10] == -1 or self.transponderlist[index][11] == -1)):
index += 1
print "next transponder index:", index
return index
def finishedChecking(self):
print "finished testing"
print "successfull:", self.successfullyTune
print "failed:", self.failedTune
def openFrontend(self):
res_mgr = eDVBResourceManager.getInstance()
if res_mgr:
self.raw_channel = res_mgr.allocateRawChannel(self.feid)
if self.raw_channel:
self.frontend = self.raw_channel.getFrontend()
if self.frontend:
return True
else:
print "getFrontend failed"
else:
print "getRawChannel failed"
else:
print "getResourceManager instance failed"
return False
def tune(self):
print "tuning to", self.tuningtransponder
if self.tuningtransponder < len(self.transponderlist):
self.pidStatus = self.INTERNAL_PID_STATUS_NOOP
self.oldTuned = self.currTuned
self.currTuned = self.transponderlist[self.tuningtransponder]
self.tuner.tune(self.transponderlist[self.tuningtransponder])
INTERNAL_PID_STATUS_NOOP = 0
INTERNAL_PID_STATUS_WAITING = 1
INTERNAL_PID_STATUS_SUCCESSFUL = 2
INTERNAL_PID_STATUS_FAILED = 3
def run(self, checkPIDs = False):
self.checkPIDs = checkPIDs
self.pidStatus = self.INTERNAL_PID_STATUS_NOOP
self.failedTune = []
self.successfullyTune = []
self.tuningtransponder = self.firstTransponder()
self.tune()
self.progressCallback((self.getProgressLength(), self.tuningtransponder, self.STATUS_START, self.currTuned))
self.timer.start(100, True)
# transponder = (frequency, symbolrate, polarisation, fec, inversion, orbpos, <system>, <modulation>, <rolloff>, <pilot>, <tsid>, <onid>)
# 0 1 2 3 4 5 6 7 8 9 10 11
def addTransponder(self, transponder):
self.transponderlist.append(transponder)
def clearTransponder(self):
self.transponderlist = []
def getProgressLength(self):
count = 0
if self.stopOnError == -1:
count = len(self.transponderlist)
else:
if count < self.stopOnError:
count = self.stopOnError
if self.stopOnSuccess == -1:
count = len(self.transponderlist)
else:
if count < self.stopOnSuccess:
count = self.stopOnSuccess
return count
STATUS_START = 0
STATUS_TUNING = 1
STATUS_DONE = 2
STATUS_NOOP = 3
# can be overwritten
# progress = (range, value, status, transponder)
def progressCallback(self, progress):
pass | gpl-2.0 |
taiwanlennon/flask-master | flask/sessions.py | 6 | 14527 | # -*- coding: utf-8 -*-
"""
flask.sessions
~~~~~~~~~~~~~~
Implements cookie based sessions based on itsdangerous.
:copyright: (c) 2012 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import uuid
import hashlib
from base64 import b64encode, b64decode
from datetime import datetime
from werkzeug.http import http_date, parse_date
from werkzeug.datastructures import CallbackDict
from . import Markup, json
from ._compat import iteritems, text_type
from itsdangerous import URLSafeTimedSerializer, BadSignature
def total_seconds(td):
return td.days * 60 * 60 * 24 + td.seconds
class SessionMixin(object):
"""Expands a basic dictionary with an accessors that are expected
by Flask extensions and users for the session.
"""
def _get_permanent(self):
return self.get('_permanent', False)
def _set_permanent(self, value):
self['_permanent'] = bool(value)
#: this reflects the ``'_permanent'`` key in the dict.
permanent = property(_get_permanent, _set_permanent)
del _get_permanent, _set_permanent
#: some session backends can tell you if a session is new, but that is
#: not necessarily guaranteed. Use with caution. The default mixin
#: implementation just hardcodes `False` in.
new = False
#: for some backends this will always be `True`, but some backends will
#: default this to false and detect changes in the dictionary for as
#: long as changes do not happen on mutable structures in the session.
#: The default mixin implementation just hardcodes `True` in.
modified = True
class TaggedJSONSerializer(object):
"""A customized JSON serializer that supports a few extra types that
we take for granted when serializing (tuples, markup objects, datetime).
"""
def dumps(self, value):
def _tag(value):
if isinstance(value, tuple):
return {' t': [_tag(x) for x in value]}
elif isinstance(value, uuid.UUID):
return {' u': value.hex}
elif isinstance(value, bytes):
return {' b': b64encode(value).decode('ascii')}
elif callable(getattr(value, '__html__', None)):
return {' m': text_type(value.__html__())}
elif isinstance(value, list):
return [_tag(x) for x in value]
elif isinstance(value, datetime):
return {' d': http_date(value)}
elif isinstance(value, dict):
return dict((k, _tag(v)) for k, v in iteritems(value))
elif isinstance(value, str):
try:
return text_type(value)
except UnicodeError:
raise UnexpectedUnicodeError(u'A byte string with '
u'non-ASCII data was passed to the session system '
u'which can only store unicode strings. Consider '
u'base64 encoding your string (String was %r)' % value)
return value
return json.dumps(_tag(value), separators=(',', ':'))
def loads(self, value):
def object_hook(obj):
if len(obj) != 1:
return obj
the_key, the_value = next(iteritems(obj))
if the_key == ' t':
return tuple(the_value)
elif the_key == ' u':
return uuid.UUID(the_value)
elif the_key == ' b':
return b64decode(the_value)
elif the_key == ' m':
return Markup(the_value)
elif the_key == ' d':
return parse_date(the_value)
return obj
return json.loads(value, object_hook=object_hook)
session_json_serializer = TaggedJSONSerializer()
class SecureCookieSession(CallbackDict, SessionMixin):
"""Baseclass for sessions based on signed cookies."""
def __init__(self, initial=None):
def on_update(self):
self.modified = True
CallbackDict.__init__(self, initial, on_update)
self.modified = False
class NullSession(SecureCookieSession):
"""Class used to generate nicer error messages if sessions are not
available. Will still allow read-only access to the empty session
but fail on setting.
"""
def _fail(self, *args, **kwargs):
raise RuntimeError('the session is unavailable because no secret '
'key was set. Set the secret_key on the '
'application to something unique and secret.')
__setitem__ = __delitem__ = clear = pop = popitem = \
update = setdefault = _fail
del _fail
class SessionInterface(object):
"""The basic interface you have to implement in order to replace the
default session interface which uses werkzeug's securecookie
implementation. The only methods you have to implement are
:meth:`open_session` and :meth:`save_session`, the others have
useful defaults which you don't need to change.
The session object returned by the :meth:`open_session` method has to
provide a dictionary like interface plus the properties and methods
from the :class:`SessionMixin`. We recommend just subclassing a dict
and adding that mixin::
class Session(dict, SessionMixin):
pass
If :meth:`open_session` returns `None` Flask will call into
:meth:`make_null_session` to create a session that acts as replacement
if the session support cannot work because some requirement is not
fulfilled. The default :class:`NullSession` class that is created
will complain that the secret key was not set.
To replace the session interface on an application all you have to do
is to assign :attr:`flask.Flask.session_interface`::
app = Flask(__name__)
app.session_interface = MySessionInterface()
.. versionadded:: 0.8
"""
#: :meth:`make_null_session` will look here for the class that should
#: be created when a null session is requested. Likewise the
#: :meth:`is_null_session` method will perform a typecheck against
#: this type.
null_session_class = NullSession
#: A flag that indicates if the session interface is pickle based.
#: This can be used by flask extensions to make a decision in regards
#: to how to deal with the session object.
#:
#: .. versionadded:: 0.10
pickle_based = False
def make_null_session(self, app):
"""Creates a null session which acts as a replacement object if the
real session support could not be loaded due to a configuration
error. This mainly aids the user experience because the job of the
null session is to still support lookup without complaining but
modifications are answered with a helpful error message of what
failed.
This creates an instance of :attr:`null_session_class` by default.
"""
return self.null_session_class()
def is_null_session(self, obj):
"""Checks if a given object is a null session. Null sessions are
not asked to be saved.
This checks if the object is an instance of :attr:`null_session_class`
by default.
"""
return isinstance(obj, self.null_session_class)
def get_cookie_domain(self, app):
"""Helpful helper method that returns the cookie domain that should
be used for the session cookie if session cookies are used.
"""
if app.config['SESSION_COOKIE_DOMAIN'] is not None:
return app.config['SESSION_COOKIE_DOMAIN']
if app.config['SERVER_NAME'] is not None:
# chop of the port which is usually not supported by browsers
rv = '.' + app.config['SERVER_NAME'].rsplit(':', 1)[0]
# Google chrome does not like cookies set to .localhost, so
# we just go with no domain then. Flask documents anyways that
# cross domain cookies need a fully qualified domain name
if rv == '.localhost':
rv = None
# If we infer the cookie domain from the server name we need
# to check if we are in a subpath. In that case we can't
# set a cross domain cookie.
if rv is not None:
path = self.get_cookie_path(app)
if path != '/':
rv = rv.lstrip('.')
return rv
def get_cookie_path(self, app):
"""Returns the path for which the cookie should be valid. The
default implementation uses the value from the SESSION_COOKIE_PATH``
config var if it's set, and falls back to ``APPLICATION_ROOT`` or
uses ``/`` if it's `None`.
"""
return app.config['SESSION_COOKIE_PATH'] or \
app.config['APPLICATION_ROOT'] or '/'
def get_cookie_httponly(self, app):
"""Returns True if the session cookie should be httponly. This
currently just returns the value of the ``SESSION_COOKIE_HTTPONLY``
config var.
"""
return app.config['SESSION_COOKIE_HTTPONLY']
def get_cookie_secure(self, app):
"""Returns True if the cookie should be secure. This currently
just returns the value of the ``SESSION_COOKIE_SECURE`` setting.
"""
return app.config['SESSION_COOKIE_SECURE']
def get_expiration_time(self, app, session):
"""A helper method that returns an expiration date for the session
or `None` if the session is linked to the browser session. The
default implementation returns now + the permanent session
lifetime configured on the application.
"""
if session.permanent:
return datetime.utcnow() + app.permanent_session_lifetime
def should_set_cookie(self, app, session):
"""Indicates weather a cookie should be set now or not. This is
used by session backends to figure out if they should emit a
set-cookie header or not. The default behavior is controlled by
the ``SESSION_REFRESH_EACH_REQUEST`` config variable. If
it's set to `False` then a cookie is only set if the session is
modified, if set to `True` it's always set if the session is
permanent.
This check is usually skipped if sessions get deleted.
.. versionadded:: 1.0
"""
if session.modified:
return True
save_each = app.config['SESSION_REFRESH_EACH_REQUEST']
return save_each and session.permanent
def open_session(self, app, request):
"""This method has to be implemented and must either return `None`
in case the loading failed because of a configuration error or an
instance of a session object which implements a dictionary like
interface + the methods and attributes on :class:`SessionMixin`.
"""
raise NotImplementedError()
def save_session(self, app, session, response):
"""This is called for actual sessions returned by :meth:`open_session`
at the end of the request. This is still called during a request
context so if you absolutely need access to the request you can do
that.
"""
raise NotImplementedError()
class SecureCookieSessionInterface(SessionInterface):
"""The default session interface that stores sessions in signed cookies
through the :mod:`itsdangerous` module.
"""
#: the salt that should be applied on top of the secret key for the
#: signing of cookie based sessions.
salt = 'cookie-session'
#: the hash function to use for the signature. The default is sha1
digest_method = staticmethod(hashlib.sha1)
#: the name of the itsdangerous supported key derivation. The default
#: is hmac.
key_derivation = 'hmac'
#: A python serializer for the payload. The default is a compact
#: JSON derived serializer with support for some extra Python types
#: such as datetime objects or tuples.
serializer = session_json_serializer
session_class = SecureCookieSession
def get_signing_serializer(self, app):
if not app.secret_key:
return None
signer_kwargs = dict(
key_derivation=self.key_derivation,
digest_method=self.digest_method
)
return URLSafeTimedSerializer(app.secret_key, salt=self.salt,
serializer=self.serializer,
signer_kwargs=signer_kwargs)
def open_session(self, app, request):
s = self.get_signing_serializer(app)
if s is None:
return None
val = request.cookies.get(app.session_cookie_name)
if not val:
return self.session_class()
max_age = total_seconds(app.permanent_session_lifetime)
try:
data = s.loads(val, max_age=max_age)
return self.session_class(data)
except BadSignature:
return self.session_class()
def save_session(self, app, session, response):
domain = self.get_cookie_domain(app)
path = self.get_cookie_path(app)
# Delete case. If there is no session we bail early.
# If the session was modified to be empty we remove the
# whole cookie.
if not session:
if session.modified:
response.delete_cookie(app.session_cookie_name,
domain=domain, path=path)
return
# Modification case. There are upsides and downsides to
# emitting a set-cookie header each request. The behavior
# is controlled by the :meth:`should_set_cookie` method
# which performs a quick check to figure out if the cookie
# should be set or not. This is controlled by the
# SESSION_REFRESH_EACH_REQUEST config flag as well as
# the permanent flag on the session itself.
if not self.should_set_cookie(app, session):
return
httponly = self.get_cookie_httponly(app)
secure = self.get_cookie_secure(app)
expires = self.get_expiration_time(app, session)
val = self.get_signing_serializer(app).dumps(dict(session))
response.set_cookie(app.session_cookie_name, val,
expires=expires, httponly=httponly,
domain=domain, path=path, secure=secure)
from flask.debughelpers import UnexpectedUnicodeError
| bsd-3-clause |
IsCoolEntertainment/debpkg_python-boto | boto/cloudsearch/sourceattribute.py | 37 | 3157 | # Copyright (c) 202 Mitch Garnaat http://garnaat.org/
# Copyright (c) 2012 Amazon.com, Inc. or its affiliates.
# All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
class SourceAttribute(object):
"""
Provide information about attributes for an index field.
A maximum of 20 source attributes can be configured for
each index field.
:ivar default: Optional default value if the source attribute
is not specified in a document.
:ivar name: The name of the document source field to add
to this ``IndexField``.
:ivar data_function: Identifies the transformation to apply
when copying data from a source attribute.
:ivar data_map: The value is a dict with the following keys:
* cases - A dict that translates source field values
to custom values.
* default - An optional default value to use if the
source attribute is not specified in a document.
* name - the name of the document source field to add
to this ``IndexField``
:ivar data_trim_title: Trims common title words from a source
document attribute when populating an ``IndexField``.
This can be used to create an ``IndexField`` you can
use for sorting. The value is a dict with the following
fields:
* default - An optional default value.
* language - an IETF RFC 4646 language code.
* separator - The separator that follows the text to trim.
* name - The name of the document source field to add.
"""
ValidDataFunctions = ('Copy', 'TrimTitle', 'Map')
def __init__(self):
self.data_copy = {}
self._data_function = self.ValidDataFunctions[0]
self.data_map = {}
self.data_trim_title = {}
@property
def data_function(self):
return self._data_function
@data_function.setter
def data_function(self, value):
if value not in self.ValidDataFunctions:
valid = '|'.join(self.ValidDataFunctions)
raise ValueError('data_function must be one of: %s' % valid)
self._data_function = value
| mit |
amisrs/one-eighty | angular_flask/lib/python2.7/site-packages/requests/packages/charade/langcyrillicmodel.py | 184 | 18054 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
# KOI8-R language model
# Character Mapping Table:
KOI8R_CharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40
155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50
253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60
67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70
191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, # 80
207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, # 90
223,224,225, 68,226,227,228,229,230,231,232,233,234,235,236,237, # a0
238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253, # b0
27, 3, 21, 28, 13, 2, 39, 19, 26, 4, 23, 11, 8, 12, 5, 1, # c0
15, 16, 9, 7, 6, 14, 24, 10, 17, 18, 20, 25, 30, 29, 22, 54, # d0
59, 37, 44, 58, 41, 48, 53, 46, 55, 42, 60, 36, 49, 38, 31, 34, # e0
35, 43, 45, 32, 40, 52, 56, 33, 61, 62, 51, 57, 47, 63, 50, 70, # f0
)
win1251_CharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40
155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50
253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60
67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70
191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,
207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,
223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,
239,240,241,242,243,244,245,246, 68,247,248,249,250,251,252,253,
37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35,
45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43,
3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15,
9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16,
)
latin5_CharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40
155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50
253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60
67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70
191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,
207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,
223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,
37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35,
45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43,
3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15,
9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16,
239, 68,240,241,242,243,244,245,246,247,248,249,250,251,252,255,
)
macCyrillic_CharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40
155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50
253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60
67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70
37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35,
45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43,
191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,
207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,
223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,
239,240,241,242,243,244,245,246,247,248,249,250,251,252, 68, 16,
3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15,
9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27,255,
)
IBM855_CharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40
155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50
253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60
67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70
191,192,193,194, 68,195,196,197,198,199,200,201,202,203,204,205,
206,207,208,209,210,211,212,213,214,215,216,217, 27, 59, 54, 70,
3, 37, 21, 44, 28, 58, 13, 41, 2, 48, 39, 53, 19, 46,218,219,
220,221,222,223,224, 26, 55, 4, 42,225,226,227,228, 23, 60,229,
230,231,232,233,234,235, 11, 36,236,237,238,239,240,241,242,243,
8, 49, 12, 38, 5, 31, 1, 34, 15,244,245,246,247, 35, 16,248,
43, 9, 45, 7, 32, 6, 40, 14, 52, 24, 56, 10, 33, 17, 61,249,
250, 18, 62, 20, 51, 25, 57, 30, 47, 29, 63, 22, 50,251,252,255,
)
IBM866_CharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40
155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50
253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60
67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70
37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35,
45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43,
3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15,
191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,
207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,
223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,
9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16,
239, 68,240,241,242,243,244,245,246,247,248,249,250,251,252,255,
)
# Model Table:
# total sequences: 100%
# first 512 sequences: 97.6601%
# first 1024 sequences: 2.3389%
# rest sequences: 0.1237%
# negative sequences: 0.0009%
RussianLangModel = (
0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,1,3,3,3,3,1,3,3,3,2,3,2,3,3,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,2,2,2,2,2,0,0,2,
3,3,3,2,3,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,3,2,3,2,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,2,2,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,2,3,3,1,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,2,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,0,0,3,3,3,3,3,3,3,3,3,3,3,2,1,
0,0,0,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,3,3,3,2,1,
0,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,2,2,2,3,1,3,3,1,3,3,3,3,2,2,3,0,2,2,2,3,3,2,1,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,2,3,3,3,3,3,2,2,3,2,3,3,3,2,1,2,2,0,1,2,2,2,2,2,2,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,3,0,2,2,3,3,2,1,2,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,2,3,3,1,2,3,2,2,3,2,3,3,3,3,2,2,3,0,3,2,2,3,1,1,1,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,2,3,3,3,3,2,2,2,0,3,3,3,2,2,2,2,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,2,3,2,3,3,3,3,3,3,2,3,2,2,0,1,3,2,1,2,2,1,0,
0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,3,2,1,1,3,0,1,1,1,1,2,1,1,0,2,2,2,1,2,0,1,0,
0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,2,3,3,2,2,2,2,1,3,2,3,2,3,2,1,2,2,0,1,1,2,1,2,1,2,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,3,3,2,2,3,2,3,3,3,2,2,2,2,0,2,2,2,2,3,1,1,0,
0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
3,2,3,2,2,3,3,3,3,3,3,3,3,3,1,3,2,0,0,3,3,3,3,2,3,3,3,3,2,3,2,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,3,3,3,3,3,2,2,3,3,0,2,1,0,3,2,3,2,3,0,0,1,2,0,0,1,0,1,2,1,1,0,
0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,3,0,2,3,3,3,3,2,3,3,3,3,1,2,2,0,0,2,3,2,2,2,3,2,3,2,2,3,0,0,
0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,2,3,0,2,3,2,3,0,1,2,3,3,2,0,2,3,0,0,2,3,2,2,0,1,3,1,3,2,2,1,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,1,3,0,2,3,3,3,3,3,3,3,3,2,1,3,2,0,0,2,2,3,3,3,2,3,3,0,2,2,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,2,2,3,3,2,2,2,3,3,0,0,1,1,1,1,1,2,0,0,1,1,1,1,0,1,0,
0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,2,2,3,3,3,3,3,3,3,0,3,2,3,3,2,3,2,0,2,1,0,1,1,0,1,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,2,3,3,3,2,2,2,2,3,1,3,2,3,1,1,2,1,0,2,2,2,2,1,3,1,0,
0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
2,2,3,3,3,3,3,1,2,2,1,3,1,0,3,0,0,3,0,0,0,1,1,0,1,2,1,0,0,0,0,0,
0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,2,2,1,1,3,3,3,2,2,1,2,2,3,1,1,2,0,0,2,2,1,3,0,0,2,1,1,2,1,1,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,2,3,3,3,3,1,2,2,2,1,2,1,3,3,1,1,2,1,2,1,2,2,0,2,0,0,1,1,0,1,0,
0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,3,3,3,3,3,2,1,3,2,2,3,2,0,3,2,0,3,0,1,0,1,1,0,0,1,1,1,1,0,1,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,2,3,3,3,2,2,2,3,3,1,2,1,2,1,0,1,0,1,1,0,1,0,0,2,1,1,1,0,1,0,
0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,
3,1,1,2,1,2,3,3,2,2,1,2,2,3,0,2,1,0,0,2,2,3,2,1,2,2,2,2,2,3,1,0,
0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,1,1,0,1,1,2,2,1,1,3,0,0,1,3,1,1,1,0,0,0,1,0,1,1,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,1,3,3,3,2,0,0,0,2,1,0,1,0,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,0,1,0,0,2,3,2,2,2,1,2,2,2,1,2,1,0,0,1,1,1,0,2,0,1,1,1,0,0,1,1,
1,0,0,0,0,0,1,2,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,
2,3,3,3,3,0,0,0,0,1,0,0,0,0,3,0,1,2,1,0,0,0,0,0,0,0,1,1,0,0,1,1,
1,0,1,0,1,2,0,0,1,1,2,1,0,1,1,1,1,0,1,1,1,1,0,1,0,0,1,0,0,1,1,0,
2,2,3,2,2,2,3,1,2,2,2,2,2,2,2,2,1,1,1,1,1,1,1,0,1,0,1,1,1,0,2,1,
1,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,1,0,1,0,1,1,0,1,1,1,0,1,1,0,
3,3,3,2,2,2,2,3,2,2,1,1,2,2,2,2,1,1,3,1,2,1,2,0,0,1,1,0,1,0,2,1,
1,1,1,1,1,2,1,0,1,1,1,1,0,1,0,0,1,1,0,0,1,0,1,0,0,1,0,0,0,1,1,0,
2,0,0,1,0,3,2,2,2,2,1,2,1,2,1,2,0,0,0,2,1,2,2,1,1,2,2,0,1,1,0,2,
1,1,1,1,1,0,1,1,1,2,1,1,1,2,1,0,1,2,1,1,1,1,0,1,1,1,0,0,1,0,0,1,
1,3,2,2,2,1,1,1,2,3,0,0,0,0,2,0,2,2,1,0,0,0,0,0,0,1,0,0,0,0,1,1,
1,0,1,1,0,1,0,1,1,0,1,1,0,2,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0,
2,3,2,3,2,1,2,2,2,2,1,0,0,0,2,0,0,1,1,0,0,0,0,0,0,0,1,1,0,0,2,1,
1,1,2,1,0,2,0,0,1,0,1,0,0,1,0,0,1,1,0,1,1,0,0,0,0,0,1,0,0,0,0,0,
3,0,0,1,0,2,2,2,3,2,2,2,2,2,2,2,0,0,0,2,1,2,1,1,1,2,2,0,0,0,1,2,
1,1,1,1,1,0,1,2,1,1,1,1,1,1,1,0,1,1,1,1,1,1,0,1,1,1,1,1,1,0,0,1,
2,3,2,3,3,2,0,1,1,1,0,0,1,0,2,0,1,1,3,1,0,0,0,0,0,0,0,1,0,0,2,1,
1,1,1,1,1,1,1,0,1,0,1,1,1,1,0,1,1,1,0,0,1,1,0,1,0,0,0,0,0,0,1,0,
2,3,3,3,3,1,2,2,2,2,0,1,1,0,2,1,1,1,2,1,0,1,1,0,0,1,0,1,0,0,2,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,3,3,3,2,0,0,1,1,2,2,1,0,0,2,0,1,1,3,0,0,1,0,0,0,0,0,1,0,1,2,1,
1,1,2,0,1,1,1,0,1,0,1,1,0,1,0,1,1,1,1,0,1,0,0,0,0,0,0,1,0,1,1,0,
1,3,2,3,2,1,0,0,2,2,2,0,1,0,2,0,1,1,1,0,1,0,0,0,3,0,1,1,0,0,2,1,
1,1,1,0,1,1,0,0,0,0,1,1,0,1,0,0,2,1,1,0,1,0,0,0,1,0,1,0,0,1,1,0,
3,1,2,1,1,2,2,2,2,2,2,1,2,2,1,1,0,0,0,2,2,2,0,0,0,1,2,1,0,1,0,1,
2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,2,1,1,1,0,1,0,1,1,0,1,1,1,0,0,1,
3,0,0,0,0,2,0,1,1,1,1,1,1,1,0,1,0,0,0,1,1,1,0,1,0,1,1,0,0,1,0,1,
1,1,0,0,1,0,0,0,1,0,1,1,0,0,1,0,1,0,1,0,0,0,0,1,0,0,0,1,0,0,0,1,
1,3,3,2,2,0,0,0,2,2,0,0,0,1,2,0,1,1,2,0,0,0,0,0,0,0,0,1,0,0,2,1,
0,1,1,0,0,1,1,0,0,0,1,1,0,1,1,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0,
2,3,2,3,2,0,0,0,0,1,1,0,0,0,2,0,2,0,2,0,0,0,0,0,1,0,0,1,0,0,1,1,
1,1,2,0,1,2,1,0,1,1,2,1,1,1,1,1,2,1,1,0,1,0,0,1,1,1,1,1,0,1,1,0,
1,3,2,2,2,1,0,0,2,2,1,0,1,2,2,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,1,1,
0,0,1,1,0,1,1,0,0,1,1,0,1,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
1,0,0,1,0,2,3,1,2,2,2,2,2,2,1,1,0,0,0,1,0,1,0,2,1,1,1,0,0,0,0,1,
1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,
2,0,2,0,0,1,0,3,2,1,2,1,2,2,0,1,0,0,0,2,1,0,0,2,1,1,1,1,0,2,0,2,
2,1,1,1,1,1,1,1,1,1,1,1,1,2,1,0,1,1,1,1,0,0,0,1,1,1,1,0,1,0,0,1,
1,2,2,2,2,1,0,0,1,0,0,0,0,0,2,0,1,1,1,1,0,0,0,0,1,0,1,2,0,0,2,0,
1,0,1,1,1,2,1,0,1,0,1,1,0,0,1,0,1,1,1,0,1,0,0,0,1,0,0,1,0,1,1,0,
2,1,2,2,2,0,3,0,1,1,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
0,0,0,1,1,1,0,0,1,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,
1,2,2,3,2,2,0,0,1,1,2,0,1,2,1,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,1,
0,1,1,0,0,1,1,0,0,1,1,0,0,1,1,0,1,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0,
2,2,1,1,2,1,2,2,2,2,2,1,2,2,0,1,0,0,0,1,2,2,2,1,2,1,1,1,1,1,2,1,
1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,0,1,1,1,0,0,0,0,1,1,1,0,1,1,0,0,1,
1,2,2,2,2,0,1,0,2,2,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,
0,0,1,0,0,1,0,0,0,0,1,0,1,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,2,2,2,2,0,0,0,2,2,2,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,1,
0,1,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,2,2,2,2,0,0,0,0,1,0,0,1,1,2,0,0,0,0,1,0,1,0,0,1,0,0,2,0,0,0,1,
0,0,1,0,0,1,0,0,0,1,1,0,0,0,0,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,
1,2,2,2,1,1,2,0,2,1,1,1,1,0,2,2,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1,1,
0,0,1,0,1,1,0,0,0,0,1,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
1,0,2,1,2,0,0,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,
0,0,1,0,1,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,
1,0,0,0,0,2,0,1,2,1,0,1,1,1,0,1,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,1,
0,0,0,0,0,1,0,0,1,1,0,0,1,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,
2,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
1,0,0,0,1,0,0,0,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,0,0,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
1,1,1,0,1,0,1,0,0,1,1,1,1,0,0,0,1,0,0,0,0,1,0,0,0,1,0,1,0,0,0,0,
1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
1,1,0,1,1,0,1,0,1,0,0,0,0,1,1,0,1,1,0,0,0,0,0,1,0,1,1,0,1,0,0,0,
0,1,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,
)
Koi8rModel = {
'charToOrderMap': KOI8R_CharToOrderMap,
'precedenceMatrix': RussianLangModel,
'mTypicalPositiveRatio': 0.976601,
'keepEnglishLetter': False,
'charsetName': "KOI8-R"
}
Win1251CyrillicModel = {
'charToOrderMap': win1251_CharToOrderMap,
'precedenceMatrix': RussianLangModel,
'mTypicalPositiveRatio': 0.976601,
'keepEnglishLetter': False,
'charsetName': "windows-1251"
}
Latin5CyrillicModel = {
'charToOrderMap': latin5_CharToOrderMap,
'precedenceMatrix': RussianLangModel,
'mTypicalPositiveRatio': 0.976601,
'keepEnglishLetter': False,
'charsetName': "ISO-8859-5"
}
MacCyrillicModel = {
'charToOrderMap': macCyrillic_CharToOrderMap,
'precedenceMatrix': RussianLangModel,
'mTypicalPositiveRatio': 0.976601,
'keepEnglishLetter': False,
'charsetName': "MacCyrillic"
};
Ibm866Model = {
'charToOrderMap': IBM866_CharToOrderMap,
'precedenceMatrix': RussianLangModel,
'mTypicalPositiveRatio': 0.976601,
'keepEnglishLetter': False,
'charsetName': "IBM866"
}
Ibm855Model = {
'charToOrderMap': IBM855_CharToOrderMap,
'precedenceMatrix': RussianLangModel,
'mTypicalPositiveRatio': 0.976601,
'keepEnglishLetter': False,
'charsetName': "IBM855"
}
# flake8: noqa
| mit |
joostvdg/jenkins-job-builder | tests/cmd/test_cmd.py | 1 | 1383 | import os
import testtools
from jenkins_jobs.cli import entry
from tests.base import LoggingFixture
from tests.base import mock
class CmdTestsBase(LoggingFixture, testtools.TestCase):
fixtures_path = os.path.join(os.path.dirname(__file__), 'fixtures')
def setUp(self):
super(CmdTestsBase, self).setUp()
# Testing the cmd module can sometimes result in the CacheStorage class
# attempting to create the cache directory multiple times as the tests
# are run in parallel. Stub out the CacheStorage to ensure that each
# test can safely create the cache directory without risk of
# interference.
cache_patch = mock.patch('jenkins_jobs.builder.CacheStorage',
autospec=True)
self.cache_mock = cache_patch.start()
self.addCleanup(cache_patch.stop)
self.default_config_file = os.path.join(self.fixtures_path,
'empty_builder.ini')
def execute_jenkins_jobs_with_args(self, args):
jenkins_jobs = entry.JenkinsJobs(args)
jenkins_jobs.execute()
class TestCmd(CmdTestsBase):
def test_with_empty_args(self):
"""
User passes no args, should fail with SystemExit
"""
with mock.patch('sys.stderr'):
self.assertRaises(SystemExit, entry.JenkinsJobs, [])
| apache-2.0 |
Javier-Acosta/meran | dev-plugins/node64/lib/node/wafadmin/Tools/cc.py | 4 | 4728 | #!/usr/bin/env python
# Meran - MERAN UNLP is a ILS (Integrated Library System) wich provides Catalog,
# Circulation and User's Management. It's written in Perl, and uses Apache2
# Web-Server, MySQL database and Sphinx 2 indexing.
# Copyright (C) 2009-2013 Grupo de desarrollo de Meran CeSPI-UNLP
#
# This file is part of Meran.
#
# Meran is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Meran is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Meran. If not, see <http://www.gnu.org/licenses/>.
# encoding: utf-8
# Meran - MERAN UNLP is a ILS (Integrated Library System) wich provides Catalog,
# Circulation and User's Management. It's written in Perl, and uses Apache2
# Web-Server, MySQL database and Sphinx 2 indexing.
# Copyright (C) 2009-2013 Grupo de desarrollo de Meran CeSPI-UNLP
#
# This file is part of Meran.
#
# Meran is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Meran is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Meran. If not, see <http://www.gnu.org/licenses/>.
# Thomas Nagy, 2006 (ita)
"Base for c programs/libraries"
import os
import TaskGen, Build, Utils, Task
from Logs import debug
import ccroot
from TaskGen import feature, before, extension, after
g_cc_flag_vars = [
'CCDEPS', 'FRAMEWORK', 'FRAMEWORKPATH',
'STATICLIB', 'LIB', 'LIBPATH', 'LINKFLAGS', 'RPATH',
'CCFLAGS', 'CPPPATH', 'CPPFLAGS', 'CCDEFINES']
EXT_CC = ['.c']
g_cc_type_vars = ['CCFLAGS', 'LINKFLAGS']
# TODO remove in waf 1.6
class cc_taskgen(ccroot.ccroot_abstract):
pass
@feature('cc')
@before('apply_type_vars')
@after('default_cc')
def init_cc(self):
self.p_flag_vars = set(self.p_flag_vars).union(g_cc_flag_vars)
self.p_type_vars = set(self.p_type_vars).union(g_cc_type_vars)
if not self.env['CC_NAME']:
raise Utils.WafError("At least one compiler (gcc, ..) must be selected")
@feature('cc')
@after('apply_incpaths')
def apply_obj_vars_cc(self):
"""after apply_incpaths for INC_PATHS"""
env = self.env
app = env.append_unique
cpppath_st = env['CPPPATH_ST']
# local flags come first
# set the user-defined includes paths
for i in env['INC_PATHS']:
app('_CCINCFLAGS', cpppath_st % i.bldpath(env))
app('_CCINCFLAGS', cpppath_st % i.srcpath(env))
# set the library include paths
for i in env['CPPPATH']:
app('_CCINCFLAGS', cpppath_st % i)
@feature('cc')
@after('apply_lib_vars')
def apply_defines_cc(self):
"""after uselib is set for CCDEFINES"""
self.defines = getattr(self, 'defines', [])
lst = self.to_list(self.defines) + self.to_list(self.env['CCDEFINES'])
milst = []
# now process the local defines
for defi in lst:
if not defi in milst:
milst.append(defi)
# CCDEFINES_
libs = self.to_list(self.uselib)
for l in libs:
val = self.env['CCDEFINES_'+l]
if val: milst += val
self.env['DEFLINES'] = ["%s %s" % (x[0], Utils.trimquotes('='.join(x[1:]))) for x in [y.split('=') for y in milst]]
y = self.env['CCDEFINES_ST']
self.env['_CCDEFFLAGS'] = [y%x for x in milst]
@extension(EXT_CC)
def c_hook(self, node):
# create the compilation task: cpp or cc
if getattr(self, 'obj_ext', None):
obj_ext = self.obj_ext
else:
obj_ext = '_%d.o' % self.idx
task = self.create_task('cc', node, node.change_ext(obj_ext))
try:
self.compiled_tasks.append(task)
except AttributeError:
raise Utils.WafError('Have you forgotten to set the feature "cc" on %s?' % str(self))
return task
cc_str = '${CC} ${CCFLAGS} ${CPPFLAGS} ${_CCINCFLAGS} ${_CCDEFFLAGS} ${CC_SRC_F}${SRC} ${CC_TGT_F}${TGT}'
cls = Task.simple_task_type('cc', cc_str, 'GREEN', ext_out='.o', ext_in='.c', shell=False)
cls.scan = ccroot.scan
cls.vars.append('CCDEPS')
link_str = '${LINK_CC} ${CCLNK_SRC_F}${SRC} ${CCLNK_TGT_F}${TGT[0].abspath(env)} ${LINKFLAGS}'
cls = Task.simple_task_type('cc_link', link_str, color='YELLOW', ext_in='.o', ext_out='.bin', shell=False)
cls.maxjobs = 1
cls.install = Utils.nada | gpl-3.0 |
jtimberman/omnibus | source/otp_src_R14B02/lib/asn1/test/asn1_SUITE_data/Enum.py | 97 | 1047 | Enum DEFINITIONS IMPLICIT TAGS ::=
BEGIN
-- EXPORTS P1, P2;
-- F.2.3.1
-- Use an enumerated type to model the values of a variable
-- with three or more states.
-- Assign values starting with zero if their only
-- constraint is distinctness.
-- EXAMPLE
DayOfTheWeek ::= ENUMERATED {sunday(0), monday(1), tuesday(2),
wednesday(3), thursday(4), friday(5), saturday(6)}
firstDay DayOfTheWeek ::= sunday
-- F.2.3.2
-- Use an enumerated type to model the values of a variable that
-- has just two states now,
-- but that may have additional states in a future version of the protocol.
-- EXAMPLE
MaritalStatus ::= ENUMERATED {single(0), married(1)}
-- in anticipation of
MaritalStatus2 ::= ENUMERATED {single(0), married(1), widowed(2)}
E1 ::= ENUMERATED {blue,green,yellow}
E2 ::= ENUMERATED {monday(0),thuesday(1),wednesday(2),thursday(3),friday(4)}
E3 ::= ENUMERATED {monday,thuesday(0)}
S ::= SEQUENCE {
e1 ENUMERATED {hej,hopp},
e2 [2] EXPLICIT ENUMERATED {san,sa}
}
enumVal E3 ::= monday
--enumWrongVal E3 ::= sunday
END
| apache-2.0 |
upliftaero/MissionPlanner | Lib/site-packages/numpy/ma/tests/test_core.py | 53 | 133084 | # pylint: disable-msg=W0401,W0511,W0611,W0612,W0614,R0201,E1102
"""Tests suite for MaskedArray & subclassing.
:author: Pierre Gerard-Marchant
:contact: pierregm_at_uga_dot_edu
"""
__author__ = "Pierre GF Gerard-Marchant"
import types
import warnings
import numpy as np
import numpy.core.fromnumeric as fromnumeric
from numpy import ndarray
from numpy.ma.testutils import *
import numpy.ma.core
from numpy.ma.core import *
from numpy.compat import asbytes, asbytes_nested
pi = np.pi
import sys
if sys.version_info[0] >= 3:
from functools import reduce
if sys.platform == 'cli':
def arand(shape):
import random
result = np.empty(shape, 'd')
result.flat = [ random.random() for i in range(result.size) ]
return result
def uniform(low=0.0, high=1.0, aize=1):
import random
result = np.empty(size, 'd')
d = high-low
result.flat = [ random.random()*d+low for i in range(result.size) ]
return result
else:
arand = np.random.rand
uniform = np.random.uniform
#..............................................................................
class TestMaskedArray(TestCase):
"Base test class for MaskedArrays."
def setUp (self):
"Base data definition."
x = np.array([1., 1., 1., -2., pi / 2.0, 4., 5., -10., 10., 1., 2., 3.])
y = np.array([5., 0., 3., 2., -1., -4., 0., -10., 10., 1., 0., 3.])
a10 = 10.
m1 = [1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0]
m2 = [0, 0, 1, 0, 0, 1, 1, 0, 0, 0 , 0, 1]
xm = masked_array(x, mask=m1)
ym = masked_array(y, mask=m2)
z = np.array([-.5, 0., .5, .8])
zm = masked_array(z, mask=[0, 1, 0, 0])
xf = np.where(m1, 1e+20, x)
xm.set_fill_value(1e+20)
self.d = (x, y, a10, m1, m2, xm, ym, z, zm, xf)
def test_basicattributes(self):
"Tests some basic array attributes."
a = array([1, 3, 2])
b = array([1, 3, 2], mask=[1, 0, 1])
assert_equal(a.ndim, 1)
assert_equal(b.ndim, 1)
assert_equal(a.size, 3)
assert_equal(b.size, 3)
assert_equal(a.shape, (3,))
assert_equal(b.shape, (3,))
def test_basic0d(self):
"Checks masking a scalar"
x = masked_array(0)
assert_equal(str(x), '0')
x = masked_array(0, mask=True)
assert_equal(str(x), str(masked_print_option))
x = masked_array(0, mask=False)
assert_equal(str(x), '0')
x = array(0, mask=1)
self.assertTrue(x.filled().dtype is x._data.dtype)
def test_basic1d(self):
"Test of basic array creation and properties in 1 dimension."
(x, y, a10, m1, m2, xm, ym, z, zm, xf) = self.d
self.assertTrue(not isMaskedArray(x))
self.assertTrue(isMaskedArray(xm))
self.assertTrue((xm - ym).filled(0).any())
fail_if_equal(xm.mask.astype(int), ym.mask.astype(int))
s = x.shape
assert_equal(np.shape(xm), s)
assert_equal(xm.shape, s)
assert_equal(xm.dtype, x.dtype)
assert_equal(zm.dtype, z.dtype)
assert_equal(xm.size , reduce(lambda x, y:x * y, s))
assert_equal(count(xm) , len(m1) - reduce(lambda x, y:x + y, m1))
assert_array_equal(xm, xf)
assert_array_equal(filled(xm, 1.e20), xf)
assert_array_equal(x, xm)
def test_basic2d(self):
"Test of basic array creation and properties in 2 dimensions."
(x, y, a10, m1, m2, xm, ym, z, zm, xf) = self.d
for s in [(4, 3), (6, 2)]:
x.shape = s
y.shape = s
xm.shape = s
ym.shape = s
xf.shape = s
#
self.assertTrue(not isMaskedArray(x))
self.assertTrue(isMaskedArray(xm))
assert_equal(shape(xm), s)
assert_equal(xm.shape, s)
assert_equal(xm.size , reduce(lambda x, y:x * y, s))
assert_equal(count(xm) , len(m1) - reduce(lambda x, y:x + y, m1))
assert_equal(xm, xf)
assert_equal(filled(xm, 1.e20), xf)
assert_equal(x, xm)
def test_concatenate_basic(self):
"Tests concatenations."
(x, y, a10, m1, m2, xm, ym, z, zm, xf) = self.d
# basic concatenation
assert_equal(np.concatenate((x, y)), concatenate((xm, ym)))
assert_equal(np.concatenate((x, y)), concatenate((x, y)))
assert_equal(np.concatenate((x, y)), concatenate((xm, y)))
assert_equal(np.concatenate((x, y, x)), concatenate((x, ym, x)))
def test_concatenate_alongaxis(self):
"Tests concatenations."
(x, y, a10, m1, m2, xm, ym, z, zm, xf) = self.d
# Concatenation along an axis
s = (3, 4)
x.shape = y.shape = xm.shape = ym.shape = s
assert_equal(xm.mask, np.reshape(m1, s))
assert_equal(ym.mask, np.reshape(m2, s))
xmym = concatenate((xm, ym), 1)
assert_equal(np.concatenate((x, y), 1), xmym)
assert_equal(np.concatenate((xm.mask, ym.mask), 1), xmym._mask)
#
x = zeros(2)
y = array(ones(2), mask=[False, True])
z = concatenate((x, y))
assert_array_equal(z, [0, 0, 1, 1])
assert_array_equal(z.mask, [False, False, False, True])
z = concatenate((y, x))
assert_array_equal(z, [1, 1, 0, 0])
assert_array_equal(z.mask, [False, True, False, False])
def test_concatenate_flexible(self):
"Tests the concatenation on flexible arrays."
data = masked_array(zip(arand(10),
np.arange(10)),
dtype=[('a', float), ('b', int)])
#
test = concatenate([data[:5], data[5:]])
assert_equal_records(test, data)
def test_creation_ndmin(self):
"Check the use of ndmin"
x = array([1, 2, 3], mask=[1, 0, 0], ndmin=2)
assert_equal(x.shape, (1, 3))
assert_equal(x._data, [[1, 2, 3]])
assert_equal(x._mask, [[1, 0, 0]])
def test_creation_ndmin_from_maskedarray(self):
"Make sure we're not losing the original mask w/ ndmin"
x = array([1, 2, 3])
x[-1] = masked
xx = array(x, ndmin=2, dtype=float)
assert_equal(x.shape, x._mask.shape)
assert_equal(xx.shape, xx._mask.shape)
def test_creation_maskcreation(self):
"Tests how masks are initialized at the creation of Maskedarrays."
data = arange(24, dtype=float)
data[[3, 6, 15]] = masked
dma_1 = MaskedArray(data)
assert_equal(dma_1.mask, data.mask)
dma_2 = MaskedArray(dma_1)
assert_equal(dma_2.mask, dma_1.mask)
dma_3 = MaskedArray(dma_1, mask=[1, 0, 0, 0] * 6)
fail_if_equal(dma_3.mask, dma_1.mask)
def test_creation_with_list_of_maskedarrays(self):
"Tests creaating a masked array from alist of masked arrays."
x = array(np.arange(5), mask=[1, 0, 0, 0, 0])
data = array((x, x[::-1]))
assert_equal(data, [[0, 1, 2, 3, 4], [4, 3, 2, 1, 0]])
assert_equal(data._mask, [[1, 0, 0, 0, 0], [0, 0, 0, 0, 1]])
#
x.mask = nomask
data = array((x, x[::-1]))
assert_equal(data, [[0, 1, 2, 3, 4], [4, 3, 2, 1, 0]])
self.assertTrue(data.mask is nomask)
def test_asarray(self):
(x, y, a10, m1, m2, xm, ym, z, zm, xf) = self.d
xm.fill_value = -9999
xm._hardmask = True
xmm = asarray(xm)
assert_equal(xmm._data, xm._data)
assert_equal(xmm._mask, xm._mask)
assert_equal(xmm.fill_value, xm.fill_value)
assert_equal(xmm._hardmask, xm._hardmask)
def test_fix_invalid(self):
"Checks fix_invalid."
err_status_ini = np.geterr()
try:
np.seterr(invalid='ignore')
data = masked_array([np.nan, 0., 1.], mask=[0, 0, 1])
data_fixed = fix_invalid(data)
assert_equal(data_fixed._data, [data.fill_value, 0., 1.])
assert_equal(data_fixed._mask, [1., 0., 1.])
finally:
np.seterr(**err_status_ini)
def test_maskedelement(self):
"Test of masked element"
x = arange(6)
x[1] = masked
self.assertTrue(str(masked) == '--')
self.assertTrue(x[1] is masked)
assert_equal(filled(x[1], 0), 0)
# don't know why these should raise an exception...
#self.assertRaises(Exception, lambda x,y: x+y, masked, masked)
#self.assertRaises(Exception, lambda x,y: x+y, masked, 2)
#self.assertRaises(Exception, lambda x,y: x+y, masked, xx)
#self.assertRaises(Exception, lambda x,y: x+y, xx, masked)
def test_set_element_as_object(self):
"""Tests setting elements with object"""
a = empty(1, dtype=object)
x = (1, 2, 3, 4, 5)
a[0] = x
assert_equal(a[0], x)
self.assertTrue(a[0] is x)
#
import datetime
dt = datetime.datetime.now()
a[0] = dt
self.assertTrue(a[0] is dt)
def test_indexing(self):
"Tests conversions and indexing"
x1 = np.array([1, 2, 4, 3])
x2 = array(x1, mask=[1, 0, 0, 0])
x3 = array(x1, mask=[0, 1, 0, 1])
x4 = array(x1)
# test conversion to strings
junk, garbage = str(x2), repr(x2)
assert_equal(np.sort(x1), sort(x2, endwith=False))
# tests of indexing
assert type(x2[1]) is type(x1[1])
assert x1[1] == x2[1]
assert x2[0] is masked
assert_equal(x1[2], x2[2])
assert_equal(x1[2:5], x2[2:5])
assert_equal(x1[:], x2[:])
assert_equal(x1[1:], x3[1:])
x1[2] = 9
x2[2] = 9
assert_equal(x1, x2)
x1[1:3] = 99
x2[1:3] = 99
assert_equal(x1, x2)
x2[1] = masked
assert_equal(x1, x2)
x2[1:3] = masked
assert_equal(x1, x2)
x2[:] = x1
x2[1] = masked
assert allequal(getmask(x2), array([0, 1, 0, 0]))
x3[:] = masked_array([1, 2, 3, 4], [0, 1, 1, 0])
assert allequal(getmask(x3), array([0, 1, 1, 0]))
x4[:] = masked_array([1, 2, 3, 4], [0, 1, 1, 0])
assert allequal(getmask(x4), array([0, 1, 1, 0]))
assert allequal(x4, array([1, 2, 3, 4]))
x1 = np.arange(5) * 1.0
x2 = masked_values(x1, 3.0)
assert_equal(x1, x2)
assert allequal(array([0, 0, 0, 1, 0], MaskType), x2.mask)
assert_equal(3.0, x2.fill_value)
x1 = array([1, 'hello', 2, 3], object)
x2 = np.array([1, 'hello', 2, 3], object)
s1 = x1[1]
s2 = x2[1]
assert_equal(type(s2), str)
assert_equal(type(s1), str)
assert_equal(s1, s2)
assert x1[1:1].shape == (0,)
def test_copy(self):
"Tests of some subtle points of copying and sizing."
n = [0, 0, 1, 0, 0]
m = make_mask(n)
m2 = make_mask(m)
self.assertTrue(m is m2)
m3 = make_mask(m, copy=1)
self.assertTrue(m is not m3)
warnings.simplefilter('ignore', DeprecationWarning)
x1 = np.arange(5)
y1 = array(x1, mask=m)
#self.assertTrue( y1._data is x1)
assert_equal(y1._data.__array_interface__, x1.__array_interface__)
self.assertTrue(allequal(x1, y1.raw_data()))
#self.assertTrue( y1.mask is m)
assert_equal(y1._mask.__array_interface__, m.__array_interface__)
warnings.resetwarnings()
y1a = array(y1)
#self.assertTrue( y1a.raw_data() is y1.raw_data())
self.assertTrue(y1a._data.__array_interface__ == y1._data.__array_interface__)
self.assertTrue(y1a.mask is y1.mask)
y2 = array(x1, mask=m)
#self.assertTrue( y2.raw_data() is x1)
self.assertTrue(y2._data.__array_interface__ == x1.__array_interface__)
#self.assertTrue( y2.mask is m)
self.assertTrue(y2._mask.__array_interface__ == m.__array_interface__)
self.assertTrue(y2[2] is masked)
y2[2] = 9
self.assertTrue(y2[2] is not masked)
#self.assertTrue( y2.mask is not m)
self.assertTrue(y2._mask.__array_interface__ != m.__array_interface__)
self.assertTrue(allequal(y2.mask, 0))
y3 = array(x1 * 1.0, mask=m)
self.assertTrue(filled(y3).dtype is (x1 * 1.0).dtype)
x4 = arange(4)
x4[2] = masked
y4 = resize(x4, (8,))
assert_equal(concatenate([x4, x4]), y4)
assert_equal(getmask(y4), [0, 0, 1, 0, 0, 0, 1, 0])
y5 = repeat(x4, (2, 2, 2, 2), axis=0)
assert_equal(y5, [0, 0, 1, 1, 2, 2, 3, 3])
y6 = repeat(x4, 2, axis=0)
assert_equal(y5, y6)
y7 = x4.repeat((2, 2, 2, 2), axis=0)
assert_equal(y5, y7)
y8 = x4.repeat(2, 0)
assert_equal(y5, y8)
y9 = x4.copy()
assert_equal(y9._data, x4._data)
assert_equal(y9._mask, x4._mask)
#
x = masked_array([1, 2, 3], mask=[0, 1, 0])
# Copy is False by default
y = masked_array(x)
assert_equal(y._data.ctypes.data, x._data.ctypes.data)
assert_equal(y._mask.ctypes.data, x._mask.ctypes.data)
y = masked_array(x, copy=True)
assert_not_equal(y._data.ctypes.data, x._data.ctypes.data)
assert_not_equal(y._mask.ctypes.data, x._mask.ctypes.data)
def test_deepcopy(self):
from copy import deepcopy
a = array([0, 1, 2], mask=[False, True, False])
copied = deepcopy(a)
assert_equal(copied.mask, a.mask)
assert_not_equal(id(a._mask), id(copied._mask))
#
copied[1] = 1
assert_equal(copied.mask, [0, 0, 0])
assert_equal(a.mask, [0, 1, 0])
#
copied = deepcopy(a)
assert_equal(copied.mask, a.mask)
copied.mask[1] = False
assert_equal(copied.mask, [0, 0, 0])
assert_equal(a.mask, [0, 1, 0])
def test_pickling(self):
"Tests pickling"
import cPickle
a = arange(10)
a[::3] = masked
a.fill_value = 999
a_pickled = cPickle.loads(a.dumps())
assert_equal(a_pickled._mask, a._mask)
assert_equal(a_pickled._data, a._data)
assert_equal(a_pickled.fill_value, 999)
def test_pickling_subbaseclass(self):
"Test pickling w/ a subclass of ndarray"
import cPickle
a = array(np.matrix(range(10)), mask=[1, 0, 1, 0, 0] * 2)
a_pickled = cPickle.loads(a.dumps())
assert_equal(a_pickled._mask, a._mask)
assert_equal(a_pickled, a)
self.assertTrue(isinstance(a_pickled._data, np.matrix))
def test_pickling_wstructured(self):
"Tests pickling w/ structured array"
import cPickle
a = array([(1, 1.), (2, 2.)], mask=[(0, 0), (0, 1)],
dtype=[('a', int), ('b', float)])
a_pickled = cPickle.loads(a.dumps())
assert_equal(a_pickled._mask, a._mask)
assert_equal(a_pickled, a)
def test_pickling_keepalignment(self):
"Tests pickling w/ F_CONTIGUOUS arrays"
import cPickle
a = arange(10)
a.shape = (-1, 2)
b = a.T
test = cPickle.loads(cPickle.dumps(b))
assert_equal(test, b)
# def test_pickling_oddity(self):
# "Test some pickling oddity"
# import cPickle
# a = array([{'a':1}, {'b':2}, 3], dtype=object)
# test = cPickle.loads(cPickle.dumps(a))
# assert_equal(test, a)
def test_single_element_subscript(self):
"Tests single element subscripts of Maskedarrays."
a = array([1, 3, 2])
b = array([1, 3, 2], mask=[1, 0, 1])
assert_equal(a[0].shape, ())
assert_equal(b[0].shape, ())
assert_equal(b[1].shape, ())
def test_topython(self):
"Tests some communication issues with Python."
assert_equal(1, int(array(1)))
assert_equal(1.0, float(array(1)))
assert_equal(1, int(array([[[1]]])))
assert_equal(1.0, float(array([[1]])))
self.assertRaises(TypeError, float, array([1, 1]))
#
warnings.simplefilter('ignore', UserWarning)
assert np.isnan(float(array([1], mask=[1])))
warnings.resetwarnings()
#
a = array([1, 2, 3], mask=[1, 0, 0])
self.assertRaises(TypeError, lambda:float(a))
assert_equal(float(a[-1]), 3.)
self.assertTrue(np.isnan(float(a[0])))
self.assertRaises(TypeError, int, a)
assert_equal(int(a[-1]), 3)
self.assertRaises(MAError, lambda:int(a[0]))
def test_oddfeatures_1(self):
"Test of other odd features"
x = arange(20)
x = x.reshape(4, 5)
x.flat[5] = 12
assert x[1, 0] == 12
z = x + 10j * x
assert_equal(z.real, x)
assert_equal(z.imag, 10 * x)
assert_equal((z * conjugate(z)).real, 101 * x * x)
z.imag[...] = 0.0
#
x = arange(10)
x[3] = masked
assert str(x[3]) == str(masked)
c = x >= 8
assert count(where(c, masked, masked)) == 0
assert shape(where(c, masked, masked)) == c.shape
#
z = masked_where(c, x)
assert z.dtype is x.dtype
assert z[3] is masked
assert z[4] is not masked
assert z[7] is not masked
assert z[8] is masked
assert z[9] is masked
assert_equal(x, z)
def test_oddfeatures_2(self):
"Tests some more features."
x = array([1., 2., 3., 4., 5.])
c = array([1, 1, 1, 0, 0])
x[2] = masked
z = where(c, x, -x)
assert_equal(z, [1., 2., 0., -4., -5])
c[0] = masked
z = where(c, x, -x)
assert_equal(z, [1., 2., 0., -4., -5])
assert z[0] is masked
assert z[1] is not masked
assert z[2] is masked
def test_oddfeatures_3(self):
"""Tests some generic features."""
atest = array([10], mask=True)
btest = array([20])
idx = atest.mask
atest[idx] = btest[idx]
assert_equal(atest, [20])
def test_filled_w_flexible_dtype(self):
"Test filled w/ flexible dtype"
flexi = array([(1, 1, 1)],
dtype=[('i', int), ('s', '|S8'), ('f', float)])
flexi[0] = masked
assert_equal(flexi.filled(),
np.array([(default_fill_value(0),
default_fill_value('0'),
default_fill_value(0.),)], dtype=flexi.dtype))
flexi[0] = masked
assert_equal(flexi.filled(1),
np.array([(1, '1', 1.)], dtype=flexi.dtype))
def test_filled_w_mvoid(self):
"Test filled w/ mvoid"
ndtype = [('a', int), ('b', float)]
a = mvoid((1, 2.), mask=[(0, 1)], dtype=ndtype)
# Filled using default
test = a.filled()
assert_equal(tuple(test), (1, default_fill_value(1.)))
# Explicit fill_value
test = a.filled((-1, -1))
assert_equal(tuple(test), (1, -1))
# Using predefined filling values
a.fill_value = (-999, -999)
assert_equal(tuple(a.filled()), (1, -999))
def test_filled_w_nested_dtype(self):
"Test filled w/ nested dtype"
ndtype = [('A', int), ('B', [('BA', int), ('BB', int)])]
a = array([(1, (1, 1)), (2, (2, 2))],
mask=[(0, (1, 0)), (0, (0, 1))], dtype=ndtype)
test = a.filled(0)
control = np.array([(1, (0, 1)), (2, (2, 0))], dtype=ndtype)
assert_equal(test, control)
#
test = a['B'].filled(0)
control = np.array([(0, 1), (2, 0)], dtype=a['B'].dtype)
assert_equal(test, control)
def test_optinfo_propagation(self):
"Checks that _optinfo dictionary isn't back-propagated"
x = array([1, 2, 3, ], dtype=float)
x._optinfo['info'] = '???'
y = x.copy()
assert_equal(y._optinfo['info'], '???')
y._optinfo['info'] = '!!!'
assert_equal(x._optinfo['info'], '???')
def test_fancy_printoptions(self):
"Test printing a masked array w/ fancy dtype."
fancydtype = np.dtype([('x', int), ('y', [('t', int), ('s', float)])])
test = array([(1, (2, 3.0)), (4, (5, 6.0))],
mask=[(1, (0, 1)), (0, (1, 0))],
dtype=fancydtype)
control = "[(--, (2, --)) (4, (--, 6.0))]"
assert_equal(str(test), control)
def test_flatten_structured_array(self):
"Test flatten_structured_array on arrays"
# On ndarray
ndtype = [('a', int), ('b', float)]
a = np.array([(1, 1), (2, 2)], dtype=ndtype)
test = flatten_structured_array(a)
control = np.array([[1., 1.], [2., 2.]], dtype=np.float)
assert_equal(test, control)
assert_equal(test.dtype, control.dtype)
# On masked_array
a = array([(1, 1), (2, 2)], mask=[(0, 1), (1, 0)], dtype=ndtype)
test = flatten_structured_array(a)
control = array([[1., 1.], [2., 2.]],
mask=[[0, 1], [1, 0]], dtype=np.float)
assert_equal(test, control)
assert_equal(test.dtype, control.dtype)
assert_equal(test.mask, control.mask)
# On masked array with nested structure
ndtype = [('a', int), ('b', [('ba', int), ('bb', float)])]
a = array([(1, (1, 1.1)), (2, (2, 2.2))],
mask=[(0, (1, 0)), (1, (0, 1))], dtype=ndtype)
test = flatten_structured_array(a)
control = array([[1., 1., 1.1], [2., 2., 2.2]],
mask=[[0, 1, 0], [1, 0, 1]], dtype=np.float)
assert_equal(test, control)
assert_equal(test.dtype, control.dtype)
assert_equal(test.mask, control.mask)
# Keeping the initial shape
ndtype = [('a', int), ('b', float)]
a = np.array([[(1, 1), ], [(2, 2), ]], dtype=ndtype)
test = flatten_structured_array(a)
control = np.array([[[1., 1.], ], [[2., 2.], ]], dtype=np.float)
assert_equal(test, control)
assert_equal(test.dtype, control.dtype)
def test_void0d(self):
"Test creating a mvoid object"
ndtype = [('a', int), ('b', int)]
a = np.array([(1, 2,)], dtype=ndtype)[0]
f = mvoid(a)
assert(isinstance(f, mvoid))
#
a = masked_array([(1, 2)], mask=[(1, 0)], dtype=ndtype)[0]
assert(isinstance(a, mvoid))
#
a = masked_array([(1, 2), (1, 2)], mask=[(1, 0), (0, 0)], dtype=ndtype)
f = mvoid(a._data[0], a._mask[0])
assert(isinstance(f, mvoid))
def test_mvoid_getitem(self):
"Test mvoid.__getitem__"
ndtype = [('a', int), ('b', int)]
a = masked_array([(1, 2,), (3, 4)], mask=[(0, 0), (1, 0)], dtype=ndtype)
# w/o mask
f = a[0]
self.assertTrue(isinstance(f, np.void))
assert_equal((f[0], f['a']), (1, 1))
assert_equal(f['b'], 2)
# w/ mask
f = a[1]
self.assertTrue(isinstance(f, mvoid))
self.assertTrue(f[0] is masked)
self.assertTrue(f['a'] is masked)
assert_equal(f[1], 4)
def test_mvoid_iter(self):
"Test iteration on __getitem__"
ndtype = [('a', int), ('b', int)]
a = masked_array([(1, 2,), (3, 4)], mask=[(0, 0), (1, 0)], dtype=ndtype)
# w/o mask
assert_equal(list(a[0]), [1, 2])
# w/ mask
assert_equal(list(a[1]), [masked, 4])
def test_mvoid_print(self):
"Test printing a mvoid"
mx = array([(1, 1), (2, 2)], dtype=[('a', int), ('b', int)])
assert_equal(str(mx[0]), "(1, 1)")
mx['b'][0] = masked
ini_display = masked_print_option._display
masked_print_option.set_display("-X-")
try:
assert_equal(str(mx[0]), "(1, -X-)")
assert_equal(repr(mx[0]), "(1, -X-)")
finally:
masked_print_option.set_display(ini_display)
#------------------------------------------------------------------------------
class TestMaskedArrayArithmetic(TestCase):
"Base test class for MaskedArrays."
def setUp (self):
"Base data definition."
x = np.array([1., 1., 1., -2., pi / 2.0, 4., 5., -10., 10., 1., 2., 3.])
y = np.array([5., 0., 3., 2., -1., -4., 0., -10., 10., 1., 0., 3.])
a10 = 10.
m1 = [1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0]
m2 = [0, 0, 1, 0, 0, 1, 1, 0, 0, 0 , 0, 1]
xm = masked_array(x, mask=m1)
ym = masked_array(y, mask=m2)
z = np.array([-.5, 0., .5, .8])
zm = masked_array(z, mask=[0, 1, 0, 0])
xf = np.where(m1, 1e+20, x)
xm.set_fill_value(1e+20)
self.d = (x, y, a10, m1, m2, xm, ym, z, zm, xf)
self.err_status = np.geterr()
np.seterr(divide='ignore', invalid='ignore')
def tearDown(self):
np.seterr(**self.err_status)
def test_basic_arithmetic (self):
"Test of basic arithmetic."
(x, y, a10, m1, m2, xm, ym, z, zm, xf) = self.d
a2d = array([[1, 2], [0, 4]])
a2dm = masked_array(a2d, [[0, 0], [1, 0]])
assert_equal(a2d * a2d, a2d * a2dm)
assert_equal(a2d + a2d, a2d + a2dm)
assert_equal(a2d - a2d, a2d - a2dm)
for s in [(12,), (4, 3), (2, 6)]:
x = x.reshape(s)
y = y.reshape(s)
xm = xm.reshape(s)
ym = ym.reshape(s)
xf = xf.reshape(s)
assert_equal(-x, -xm)
assert_equal(x + y, xm + ym)
assert_equal(x - y, xm - ym)
assert_equal(x * y, xm * ym)
assert_equal(x / y, xm / ym)
assert_equal(a10 + y, a10 + ym)
assert_equal(a10 - y, a10 - ym)
assert_equal(a10 * y, a10 * ym)
assert_equal(a10 / y, a10 / ym)
assert_equal(x + a10, xm + a10)
assert_equal(x - a10, xm - a10)
assert_equal(x * a10, xm * a10)
assert_equal(x / a10, xm / a10)
assert_equal(x ** 2, xm ** 2)
assert_equal(abs(x) ** 2.5, abs(xm) ** 2.5)
assert_equal(x ** y, xm ** ym)
assert_equal(np.add(x, y), add(xm, ym))
assert_equal(np.subtract(x, y), subtract(xm, ym))
assert_equal(np.multiply(x, y), multiply(xm, ym))
assert_equal(np.divide(x, y), divide(xm, ym))
def test_divide_on_different_shapes(self):
x = arange(6, dtype=float)
x.shape = (2, 3)
y = arange(3, dtype=float)
#
z = x / y
assert_equal(z, [[-1., 1., 1.], [-1., 4., 2.5]])
assert_equal(z.mask, [[1, 0, 0], [1, 0, 0]])
#
z = x / y[None, :]
assert_equal(z, [[-1., 1., 1.], [-1., 4., 2.5]])
assert_equal(z.mask, [[1, 0, 0], [1, 0, 0]])
#
y = arange(2, dtype=float)
z = x / y[:, None]
assert_equal(z, [[-1., -1., -1.], [3., 4., 5.]])
assert_equal(z.mask, [[1, 1, 1], [0, 0, 0]])
def test_mixed_arithmetic(self):
"Tests mixed arithmetics."
na = np.array([1])
ma = array([1])
self.assertTrue(isinstance(na + ma, MaskedArray))
self.assertTrue(isinstance(ma + na, MaskedArray))
def test_limits_arithmetic(self):
tiny = np.finfo(float).tiny
a = array([tiny, 1. / tiny, 0.])
assert_equal(getmaskarray(a / 2), [0, 0, 0])
assert_equal(getmaskarray(2 / a), [1, 0, 1])
def test_masked_singleton_arithmetic(self):
"Tests some scalar arithmetics on MaskedArrays."
# Masked singleton should remain masked no matter what
xm = array(0, mask=1)
self.assertTrue((1 / array(0)).mask)
self.assertTrue((1 + xm).mask)
self.assertTrue((-xm).mask)
self.assertTrue(maximum(xm, xm).mask)
self.assertTrue(minimum(xm, xm).mask)
def test_masked_singleton_equality(self):
"Tests (in)equality on masked snigleton"
a = array([1, 2, 3], mask=[1, 1, 0])
assert((a[0] == 0) is masked)
assert((a[0] != 0) is masked)
assert_equal((a[-1] == 0), False)
assert_equal((a[-1] != 0), True)
def test_arithmetic_with_masked_singleton(self):
"Checks that there's no collapsing to masked"
x = masked_array([1, 2])
y = x * masked
assert_equal(y.shape, x.shape)
assert_equal(y._mask, [True, True])
y = x[0] * masked
assert y is masked
y = x + masked
assert_equal(y.shape, x.shape)
assert_equal(y._mask, [True, True])
def test_arithmetic_with_masked_singleton_on_1d_singleton(self):
"Check that we're not losing the shape of a singleton"
x = masked_array([1, ])
y = x + masked
assert_equal(y.shape, x.shape)
assert_equal(y.mask, [True, ])
def test_scalar_arithmetic(self):
x = array(0, mask=0)
assert_equal(x.filled().ctypes.data, x.ctypes.data)
# Make sure we don't lose the shape in some circumstances
xm = array((0, 0)) / 0.
assert_equal(xm.shape, (2,))
assert_equal(xm.mask, [1, 1])
def test_basic_ufuncs (self):
"Test various functions such as sin, cos."
(x, y, a10, m1, m2, xm, ym, z, zm, xf) = self.d
assert_equal(np.cos(x), cos(xm))
assert_equal(np.cosh(x), cosh(xm))
assert_equal(np.sin(x), sin(xm))
assert_equal(np.sinh(x), sinh(xm))
assert_equal(np.tan(x), tan(xm))
assert_equal(np.tanh(x), tanh(xm))
assert_equal(np.sqrt(abs(x)), sqrt(xm))
assert_equal(np.log(abs(x)), log(xm))
assert_equal(np.log10(abs(x)), log10(xm))
assert_equal(np.exp(x), exp(xm))
assert_equal(np.arcsin(z), arcsin(zm))
assert_equal(np.arccos(z), arccos(zm))
assert_equal(np.arctan(z), arctan(zm))
assert_equal(np.arctan2(x, y), arctan2(xm, ym))
assert_equal(np.absolute(x), absolute(xm))
assert_equal(np.equal(x, y), equal(xm, ym))
assert_equal(np.not_equal(x, y), not_equal(xm, ym))
assert_equal(np.less(x, y), less(xm, ym))
assert_equal(np.greater(x, y), greater(xm, ym))
assert_equal(np.less_equal(x, y), less_equal(xm, ym))
assert_equal(np.greater_equal(x, y), greater_equal(xm, ym))
assert_equal(np.conjugate(x), conjugate(xm))
def test_count_func (self):
"Tests count"
ott = array([0., 1., 2., 3.], mask=[1, 0, 0, 0])
if sys.version_info[0] >= 3 or sys.platform == 'cli':
self.assertTrue(isinstance(count(ott), np.integer))
else:
self.assertTrue(isinstance(count(ott), int))
assert_equal(3, count(ott))
assert_equal(1, count(1))
assert_equal(0, array(1, mask=[1]))
ott = ott.reshape((2, 2))
assert isinstance(count(ott, 0), ndarray)
if sys.version_info[0] >= 3 or sys.platform == 'cli':
assert isinstance(count(ott), np.integer)
else:
assert isinstance(count(ott), types.IntType)
assert_equal(3, count(ott))
assert getmask(count(ott, 0)) is nomask
assert_equal([1, 2], count(ott, 0))
def test_minmax_func (self):
"Tests minimum and maximum."
(x, y, a10, m1, m2, xm, ym, z, zm, xf) = self.d
xr = np.ravel(x) #max doesn't work if shaped
xmr = ravel(xm)
assert_equal(max(xr), maximum(xmr)) #true because of careful selection of data
assert_equal(min(xr), minimum(xmr)) #true because of careful selection of data
#
assert_equal(minimum([1, 2, 3], [4, 0, 9]), [1, 0, 3])
assert_equal(maximum([1, 2, 3], [4, 0, 9]), [4, 2, 9])
x = arange(5)
y = arange(5) - 2
x[3] = masked
y[0] = masked
assert_equal(minimum(x, y), where(less(x, y), x, y))
assert_equal(maximum(x, y), where(greater(x, y), x, y))
assert minimum(x) == 0
assert maximum(x) == 4
#
x = arange(4).reshape(2, 2)
x[-1, -1] = masked
assert_equal(maximum(x), 2)
def test_minimummaximum_func(self):
a = np.ones((2, 2))
aminimum = minimum(a, a)
self.assertTrue(isinstance(aminimum, MaskedArray))
assert_equal(aminimum, np.minimum(a, a))
#
aminimum = minimum.outer(a, a)
self.assertTrue(isinstance(aminimum, MaskedArray))
assert_equal(aminimum, np.minimum.outer(a, a))
#
amaximum = maximum(a, a)
self.assertTrue(isinstance(amaximum, MaskedArray))
assert_equal(amaximum, np.maximum(a, a))
#
amaximum = maximum.outer(a, a)
self.assertTrue(isinstance(amaximum, MaskedArray))
assert_equal(amaximum, np.maximum.outer(a, a))
def test_minmax_reduce(self):
"Test np.min/maximum.reduce on array w/ full False mask"
a = array([1, 2, 3], mask=[False, False, False])
b = np.maximum.reduce(a)
assert_equal(b, 3)
def test_minmax_funcs_with_output(self):
"Tests the min/max functions with explicit outputs"
mask = arand(12).round()
xm = array(uniform(0, 10, 12), mask=mask)
xm.shape = (3, 4)
for funcname in ('min', 'max'):
# Initialize
npfunc = getattr(np, funcname)
mafunc = getattr(numpy.ma.core, funcname)
# Use the np version
nout = np.empty((4,), dtype=int)
try:
result = npfunc(xm, axis=0, out=nout)
except MaskError:
pass
nout = np.empty((4,), dtype=float)
result = npfunc(xm, axis=0, out=nout)
self.assertTrue(result is nout)
# Use the ma version
nout.fill(-999)
result = mafunc(xm, axis=0, out=nout)
self.assertTrue(result is nout)
def test_minmax_methods(self):
"Additional tests on max/min"
(_, _, _, _, _, xm, _, _, _, _) = self.d
xm.shape = (xm.size,)
assert_equal(xm.max(), 10)
self.assertTrue(xm[0].max() is masked)
self.assertTrue(xm[0].max(0) is masked)
self.assertTrue(xm[0].max(-1) is masked)
assert_equal(xm.min(), -10.)
self.assertTrue(xm[0].min() is masked)
self.assertTrue(xm[0].min(0) is masked)
self.assertTrue(xm[0].min(-1) is masked)
assert_equal(xm.ptp(), 20.)
self.assertTrue(xm[0].ptp() is masked)
self.assertTrue(xm[0].ptp(0) is masked)
self.assertTrue(xm[0].ptp(-1) is masked)
#
x = array([1, 2, 3], mask=True)
self.assertTrue(x.min() is masked)
self.assertTrue(x.max() is masked)
self.assertTrue(x.ptp() is masked)
def test_addsumprod (self):
"Tests add, sum, product."
(x, y, a10, m1, m2, xm, ym, z, zm, xf) = self.d
assert_equal(np.add.reduce(x), add.reduce(x))
assert_equal(np.add.accumulate(x), add.accumulate(x))
assert_equal(4, sum(array(4), axis=0))
assert_equal(4, sum(array(4), axis=0))
assert_equal(np.sum(x, axis=0), sum(x, axis=0))
assert_equal(np.sum(filled(xm, 0), axis=0), sum(xm, axis=0))
assert_equal(np.sum(x, 0), sum(x, 0))
assert_equal(np.product(x, axis=0), product(x, axis=0))
assert_equal(np.product(x, 0), product(x, 0))
assert_equal(np.product(filled(xm, 1), axis=0), product(xm, axis=0))
s = (3, 4)
x.shape = y.shape = xm.shape = ym.shape = s
if len(s) > 1:
assert_equal(np.concatenate((x, y), 1), concatenate((xm, ym), 1))
assert_equal(np.add.reduce(x, 1), add.reduce(x, 1))
assert_equal(np.sum(x, 1), sum(x, 1))
assert_equal(np.product(x, 1), product(x, 1))
def test_binops_d2D(self):
"Test binary operations on 2D data"
a = array([[1.], [2.], [3.]], mask=[[False], [True], [True]])
b = array([[2., 3.], [4., 5.], [6., 7.]])
#
test = a * b
control = array([[2., 3.], [2., 2.], [3., 3.]],
mask=[[0, 0], [1, 1], [1, 1]])
assert_equal(test, control)
assert_equal(test.data, control.data)
assert_equal(test.mask, control.mask)
#
test = b * a
control = array([[2., 3.], [4., 5.], [6., 7.]],
mask=[[0, 0], [1, 1], [1, 1]])
assert_equal(test, control)
assert_equal(test.data, control.data)
assert_equal(test.mask, control.mask)
#
a = array([[1.], [2.], [3.]])
b = array([[2., 3.], [4., 5.], [6., 7.]],
mask=[[0, 0], [0, 0], [0, 1]])
test = a * b
control = array([[2, 3], [8, 10], [18, 3]],
mask=[[0, 0], [0, 0], [0, 1]])
assert_equal(test, control)
assert_equal(test.data, control.data)
assert_equal(test.mask, control.mask)
#
test = b * a
control = array([[2, 3], [8, 10], [18, 7]],
mask=[[0, 0], [0, 0], [0, 1]])
assert_equal(test, control)
assert_equal(test.data, control.data)
assert_equal(test.mask, control.mask)
def test_domained_binops_d2D(self):
"Test domained binary operations on 2D data"
a = array([[1.], [2.], [3.]], mask=[[False], [True], [True]])
b = array([[2., 3.], [4., 5.], [6., 7.]])
#
test = a / b
control = array([[1. / 2., 1. / 3.], [2., 2.], [3., 3.]],
mask=[[0, 0], [1, 1], [1, 1]])
assert_equal(test, control)
assert_equal(test.data, control.data)
assert_equal(test.mask, control.mask)
#
test = b / a
control = array([[2. / 1., 3. / 1.], [4., 5.], [6., 7.]],
mask=[[0, 0], [1, 1], [1, 1]])
assert_equal(test, control)
assert_equal(test.data, control.data)
assert_equal(test.mask, control.mask)
#
a = array([[1.], [2.], [3.]])
b = array([[2., 3.], [4., 5.], [6., 7.]],
mask=[[0, 0], [0, 0], [0, 1]])
test = a / b
control = array([[1. / 2, 1. / 3], [2. / 4, 2. / 5], [3. / 6, 3]],
mask=[[0, 0], [0, 0], [0, 1]])
assert_equal(test, control)
assert_equal(test.data, control.data)
assert_equal(test.mask, control.mask)
#
test = b / a
control = array([[2 / 1., 3 / 1.], [4 / 2., 5 / 2.], [6 / 3., 7]],
mask=[[0, 0], [0, 0], [0, 1]])
assert_equal(test, control)
assert_equal(test.data, control.data)
assert_equal(test.mask, control.mask)
def test_noshrinking(self):
"Check that we don't shrink a mask when not wanted"
# Binary operations
a = masked_array([1, 2, 3], mask=[False, False, False], shrink=False)
b = a + 1
assert_equal(b.mask, [0, 0, 0])
# In place binary operation
a += 1
assert_equal(a.mask, [0, 0, 0])
# Domained binary operation
b = a / 1.
assert_equal(b.mask, [0, 0, 0])
# In place binary operation
a /= 1.
assert_equal(a.mask, [0, 0, 0])
def test_mod(self):
"Tests mod"
(x, y, a10, m1, m2, xm, ym, z, zm, xf) = self.d
assert_equal(mod(x, y), mod(xm, ym))
test = mod(ym, xm)
assert_equal(test, np.mod(ym, xm))
assert_equal(test.mask, mask_or(xm.mask, ym.mask))
test = mod(xm, ym)
assert_equal(test, np.mod(xm, ym))
assert_equal(test.mask, mask_or(mask_or(xm.mask, ym.mask), (ym == 0)))
def test_TakeTransposeInnerOuter(self):
"Test of take, transpose, inner, outer products"
x = arange(24)
y = np.arange(24)
x[5:6] = masked
x = x.reshape(2, 3, 4)
y = y.reshape(2, 3, 4)
assert_equal(np.transpose(y, (2, 0, 1)), transpose(x, (2, 0, 1)))
assert_equal(np.take(y, (2, 0, 1), 1), take(x, (2, 0, 1), 1))
assert_equal(np.inner(filled(x, 0), filled(y, 0)),
inner(x, y))
assert_equal(np.outer(filled(x, 0), filled(y, 0)),
outer(x, y))
y = array(['abc', 1, 'def', 2, 3], object)
y[2] = masked
t = take(y, [0, 3, 4])
assert t[0] == 'abc'
assert t[1] == 2
assert t[2] == 3
def test_imag_real(self):
"Check complex"
xx = array([1 + 10j, 20 + 2j], mask=[1, 0])
assert_equal(xx.imag, [10, 2])
assert_equal(xx.imag.filled(), [1e+20, 2])
assert_equal(xx.imag.dtype, xx._data.imag.dtype)
assert_equal(xx.real, [1, 20])
assert_equal(xx.real.filled(), [1e+20, 20])
assert_equal(xx.real.dtype, xx._data.real.dtype)
def test_methods_with_output(self):
xm = array(uniform(0, 10, 12)).reshape(3, 4)
xm[:, 0] = xm[0] = xm[-1, -1] = masked
#
funclist = ('sum', 'prod', 'var', 'std', 'max', 'min', 'ptp', 'mean',)
#
for funcname in funclist:
npfunc = getattr(np, funcname)
xmmeth = getattr(xm, funcname)
# A ndarray as explicit input
output = np.empty(4, dtype=float)
output.fill(-9999)
result = npfunc(xm, axis=0, out=output)
# ... the result should be the given output
self.assertTrue(result is output)
assert_equal(result, xmmeth(axis=0, out=output))
#
output = empty(4, dtype=int)
result = xmmeth(axis=0, out=output)
self.assertTrue(result is output)
self.assertTrue(output[0] is masked)
def test_eq_on_structured(self):
"Test the equality of structured arrays"
ndtype = [('A', int), ('B', int)]
a = array([(1, 1), (2, 2)], mask=[(0, 1), (0, 0)], dtype=ndtype)
test = (a == a)
assert_equal(test, [True, True])
assert_equal(test.mask, [False, False])
b = array([(1, 1), (2, 2)], mask=[(1, 0), (0, 0)], dtype=ndtype)
test = (a == b)
assert_equal(test, [False, True])
assert_equal(test.mask, [True, False])
b = array([(1, 1), (2, 2)], mask=[(0, 1), (1, 0)], dtype=ndtype)
test = (a == b)
assert_equal(test, [True, False])
assert_equal(test.mask, [False, False])
def test_ne_on_structured(self):
"Test the inequality of structured arrays"
ndtype = [('A', int), ('B', int)]
a = array([(1, 1), (2, 2)], mask=[(0, 1), (0, 0)], dtype=ndtype)
test = (a != a)
assert_equal(test, [False, False])
assert_equal(test.mask, [False, False])
b = array([(1, 1), (2, 2)], mask=[(1, 0), (0, 0)], dtype=ndtype)
test = (a != b)
assert_equal(test, [True, False])
assert_equal(test.mask, [True, False])
b = array([(1, 1), (2, 2)], mask=[(0, 1), (1, 0)], dtype=ndtype)
test = (a != b)
assert_equal(test, [False, True])
assert_equal(test.mask, [False, False])
def test_eq_w_None(self):
# With partial mask
a = array([1, 2], mask=[0, 1])
assert_equal(a == None, False)
assert_equal(a.data == None, False)
assert_equal(a.mask == None, False)
assert_equal(a != None, True)
# With nomask
a = array([1, 2], mask=False)
assert_equal(a == None, False)
assert_equal(a != None, True)
# With complete mask
a = array([1, 2], mask=True)
assert_equal(a == None, False)
assert_equal(a != None, True)
# With masked
a = masked
assert_equal(a == None, masked)
def test_eq_w_scalar(self):
a = array(1)
assert_equal(a == 1, True)
assert_equal(a == 0, False)
assert_equal(a != 1, False)
assert_equal(a != 0, True)
def test_numpyarithmetics(self):
"Check that the mask is not back-propagated when using numpy functions"
a = masked_array([-1, 0, 1, 2, 3], mask=[0, 0, 0, 0, 1])
control = masked_array([np.nan, np.nan, 0, np.log(2), -1],
mask=[1, 1, 0, 0, 1])
#
test = log(a)
assert_equal(test, control)
assert_equal(test.mask, control.mask)
assert_equal(a.mask, [0, 0, 0, 0, 1])
#
test = np.log(a)
assert_equal(test, control)
assert_equal(test.mask, control.mask)
assert_equal(a.mask, [0, 0, 0, 0, 1])
#------------------------------------------------------------------------------
class TestMaskedArrayAttributes(TestCase):
def test_keepmask(self):
"Tests the keep mask flag"
x = masked_array([1, 2, 3], mask=[1, 0, 0])
mx = masked_array(x)
assert_equal(mx.mask, x.mask)
mx = masked_array(x, mask=[0, 1, 0], keep_mask=False)
assert_equal(mx.mask, [0, 1, 0])
mx = masked_array(x, mask=[0, 1, 0], keep_mask=True)
assert_equal(mx.mask, [1, 1, 0])
# We default to true
mx = masked_array(x, mask=[0, 1, 0])
assert_equal(mx.mask, [1, 1, 0])
def test_hardmask(self):
"Test hard_mask"
d = arange(5)
n = [0, 0, 0, 1, 1]
m = make_mask(n)
xh = array(d, mask=m, hard_mask=True)
# We need to copy, to avoid updating d in xh !
xs = array(d, mask=m, hard_mask=False, copy=True)
xh[[1, 4]] = [10, 40]
xs[[1, 4]] = [10, 40]
assert_equal(xh._data, [0, 10, 2, 3, 4])
assert_equal(xs._data, [0, 10, 2, 3, 40])
#assert_equal(xh.mask.ctypes._data, m.ctypes._data)
assert_equal(xs.mask, [0, 0, 0, 1, 0])
self.assertTrue(xh._hardmask)
self.assertTrue(not xs._hardmask)
xh[1:4] = [10, 20, 30]
xs[1:4] = [10, 20, 30]
assert_equal(xh._data, [0, 10, 20, 3, 4])
assert_equal(xs._data, [0, 10, 20, 30, 40])
#assert_equal(xh.mask.ctypes._data, m.ctypes._data)
assert_equal(xs.mask, nomask)
xh[0] = masked
xs[0] = masked
assert_equal(xh.mask, [1, 0, 0, 1, 1])
assert_equal(xs.mask, [1, 0, 0, 0, 0])
xh[:] = 1
xs[:] = 1
assert_equal(xh._data, [0, 1, 1, 3, 4])
assert_equal(xs._data, [1, 1, 1, 1, 1])
assert_equal(xh.mask, [1, 0, 0, 1, 1])
assert_equal(xs.mask, nomask)
# Switch to soft mask
xh.soften_mask()
xh[:] = arange(5)
assert_equal(xh._data, [0, 1, 2, 3, 4])
assert_equal(xh.mask, nomask)
# Switch back to hard mask
xh.harden_mask()
xh[xh < 3] = masked
assert_equal(xh._data, [0, 1, 2, 3, 4])
assert_equal(xh._mask, [1, 1, 1, 0, 0])
xh[filled(xh > 1, False)] = 5
assert_equal(xh._data, [0, 1, 2, 5, 5])
assert_equal(xh._mask, [1, 1, 1, 0, 0])
#
xh = array([[1, 2], [3, 4]], mask=[[1, 0], [0, 0]], hard_mask=True)
xh[0] = 0
assert_equal(xh._data, [[1, 0], [3, 4]])
assert_equal(xh._mask, [[1, 0], [0, 0]])
xh[-1, -1] = 5
assert_equal(xh._data, [[1, 0], [3, 5]])
assert_equal(xh._mask, [[1, 0], [0, 0]])
xh[filled(xh < 5, False)] = 2
assert_equal(xh._data, [[1, 2], [2, 5]])
assert_equal(xh._mask, [[1, 0], [0, 0]])
def test_hardmask_again(self):
"Another test of hardmask"
d = arange(5)
n = [0, 0, 0, 1, 1]
m = make_mask(n)
xh = array(d, mask=m, hard_mask=True)
xh[4:5] = 999
#assert_equal(xh.mask.ctypes._data, m.ctypes._data)
xh[0:1] = 999
assert_equal(xh._data, [999, 1, 2, 3, 4])
def test_hardmask_oncemore_yay(self):
"OK, yet another test of hardmask"
"Make sure that harden_mask/soften_mask//unshare_mask retursn self"
a = array([1, 2, 3], mask=[1, 0, 0])
b = a.harden_mask()
assert_equal(a, b)
b[0] = 0
assert_equal(a, b)
assert_equal(b, array([1, 2, 3], mask=[1, 0, 0]))
a = b.soften_mask()
a[0] = 0
assert_equal(a, b)
assert_equal(b, array([0, 2, 3], mask=[0, 0, 0]))
def test_smallmask(self):
"Checks the behaviour of _smallmask"
a = arange(10)
a[1] = masked
a[1] = 1
assert_equal(a._mask, nomask)
a = arange(10)
a._smallmask = False
a[1] = masked
a[1] = 1
assert_equal(a._mask, zeros(10))
def test_shrink_mask(self):
"Tests .shrink_mask()"
a = array([1, 2, 3], mask=[0, 0, 0])
b = a.shrink_mask()
assert_equal(a, b)
assert_equal(a.mask, nomask)
def test_flat(self):
"Test flat on masked_matrices"
test = masked_array(np.matrix([[1, 2, 3]]), mask=[0, 0, 1])
test.flat = masked_array([3, 2, 1], mask=[1, 0, 0])
control = masked_array(np.matrix([[3, 2, 1]]), mask=[1, 0, 0])
assert_equal(test, control)
#
test = masked_array(np.matrix([[1, 2, 3]]), mask=[0, 0, 1])
testflat = test.flat
testflat[:] = testflat[[2, 1, 0]]
assert_equal(test, control)
#------------------------------------------------------------------------------
class TestFillingValues(TestCase):
#
def test_check_on_scalar(self):
"Test _check_fill_value"
_check_fill_value = np.ma.core._check_fill_value
#
fval = _check_fill_value(0, int)
assert_equal(fval, 0)
fval = _check_fill_value(None, int)
assert_equal(fval, default_fill_value(0))
#
fval = _check_fill_value(0, "|S3")
assert_equal(fval, asbytes("0"))
fval = _check_fill_value(None, "|S3")
assert_equal(fval, default_fill_value("|S3"))
#
fval = _check_fill_value(1e+20, int)
assert_equal(fval, default_fill_value(0))
def test_check_on_fields(self):
"Tests _check_fill_value with records"
_check_fill_value = np.ma.core._check_fill_value
ndtype = [('a', int), ('b', float), ('c', "|S3")]
# A check on a list should return a single record
fval = _check_fill_value([-999, -12345678.9, "???"], ndtype)
self.assertTrue(isinstance(fval, ndarray))
assert_equal(fval.item(), [-999, -12345678.9, asbytes("???")])
# A check on None should output the defaults
fval = _check_fill_value(None, ndtype)
self.assertTrue(isinstance(fval, ndarray))
assert_equal(fval.item(), [default_fill_value(0),
default_fill_value(0.),
asbytes(default_fill_value("0"))])
#.....Using a structured type as fill_value should work
fill_val = np.array((-999, -12345678.9, "???"), dtype=ndtype)
fval = _check_fill_value(fill_val, ndtype)
self.assertTrue(isinstance(fval, ndarray))
assert_equal(fval.item(), [-999, -12345678.9, asbytes("???")])
#.....Using a flexible type w/ a different type shouldn't matter
fill_val = np.array((-999, -12345678.9, "???"),
dtype=[("A", int), ("B", float), ("C", "|S3")])
fval = _check_fill_value(fill_val, ndtype)
self.assertTrue(isinstance(fval, ndarray))
assert_equal(fval.item(), [-999, -12345678.9, asbytes("???")])
#.....Using an object-array shouldn't matter either
fill_value = np.array((-999, -12345678.9, "???"), dtype=object)
fval = _check_fill_value(fill_val, ndtype)
self.assertTrue(isinstance(fval, ndarray))
assert_equal(fval.item(), [-999, -12345678.9, asbytes("???")])
#
fill_value = np.array((-999, -12345678.9, "???"))
fval = _check_fill_value(fill_val, ndtype)
self.assertTrue(isinstance(fval, ndarray))
assert_equal(fval.item(), [-999, -12345678.9, asbytes("???")])
#.....One-field-only flexible type should work as well
ndtype = [("a", int)]
fval = _check_fill_value(-999999999, ndtype)
self.assertTrue(isinstance(fval, ndarray))
assert_equal(fval.item(), (-999999999,))
def test_fillvalue_conversion(self):
"Tests the behavior of fill_value during conversion"
# We had a tailored comment to make sure special attributes are properly
# dealt with
a = array(asbytes_nested(['3', '4', '5']))
a._optinfo.update({'comment':"updated!"})
#
b = array(a, dtype=int)
assert_equal(b._data, [3, 4, 5])
assert_equal(b.fill_value, default_fill_value(0))
#
b = array(a, dtype=float)
assert_equal(b._data, [3, 4, 5])
assert_equal(b.fill_value, default_fill_value(0.))
#
b = a.astype(int)
assert_equal(b._data, [3, 4, 5])
assert_equal(b.fill_value, default_fill_value(0))
assert_equal(b._optinfo['comment'], "updated!")
#
b = a.astype([('a', '|S3')])
assert_equal(b['a']._data, a._data)
assert_equal(b['a'].fill_value, a.fill_value)
@dec.knownfailureif(True, "Multiple index values (arr[(1,3)]) are not supported")
def test_fillvalue(self):
"Yet more fun with the fill_value"
data = masked_array([1, 2, 3], fill_value= -999)
series = data[[0, 2, 1]]
assert_equal(series._fill_value, data._fill_value)
#
mtype = [('f', float), ('s', '|S3')]
x = array([(1, 'a'), (2, 'b'), (pi, 'pi')], dtype=mtype)
x.fill_value = 999
assert_equal(x.fill_value.item(), [999., asbytes('999')])
assert_equal(x['f'].fill_value, 999)
assert_equal(x['s'].fill_value, asbytes('999'))
#
x.fill_value = (9, '???')
assert_equal(x.fill_value.item(), (9, asbytes('???')))
assert_equal(x['f'].fill_value, 9)
assert_equal(x['s'].fill_value, asbytes('???'))
#
x = array([1, 2, 3.1])
x.fill_value = 999
assert_equal(np.asarray(x.fill_value).dtype, float)
assert_equal(x.fill_value, 999.)
assert_equal(x._fill_value, np.array(999.))
def test_fillvalue_exotic_dtype(self):
"Tests yet more exotic flexible dtypes"
_check_fill_value = np.ma.core._check_fill_value
ndtype = [('i', int), ('s', '|S8'), ('f', float)]
control = np.array((default_fill_value(0),
default_fill_value('0'),
default_fill_value(0.),),
dtype=ndtype)
assert_equal(_check_fill_value(None, ndtype), control)
# The shape shouldn't matter
ndtype = [('f0', float, (2, 2))]
control = np.array((default_fill_value(0.),),
dtype=[('f0', float)]).astype(ndtype)
assert_equal(_check_fill_value(None, ndtype), control)
control = np.array((0,), dtype=[('f0', float)]).astype(ndtype)
assert_equal(_check_fill_value(0, ndtype), control)
#
ndtype = np.dtype("int, (2,3)float, float")
control = np.array((default_fill_value(0),
default_fill_value(0.),
default_fill_value(0.),),
dtype="int, float, float").astype(ndtype)
test = _check_fill_value(None, ndtype)
assert_equal(test, control)
control = np.array((0, 0, 0), dtype="int, float, float").astype(ndtype)
assert_equal(_check_fill_value(0, ndtype), control)
def test_extremum_fill_value(self):
"Tests extremum fill values for flexible type."
a = array([(1, (2, 3)), (4, (5, 6))],
dtype=[('A', int), ('B', [('BA', int), ('BB', int)])])
test = a.fill_value
assert_equal(test['A'], default_fill_value(a['A']))
assert_equal(test['B']['BA'], default_fill_value(a['B']['BA']))
assert_equal(test['B']['BB'], default_fill_value(a['B']['BB']))
#
test = minimum_fill_value(a)
assert_equal(test[0], minimum_fill_value(a['A']))
assert_equal(test[1][0], minimum_fill_value(a['B']['BA']))
assert_equal(test[1][1], minimum_fill_value(a['B']['BB']))
assert_equal(test[1], minimum_fill_value(a['B']))
#
test = maximum_fill_value(a)
assert_equal(test[0], maximum_fill_value(a['A']))
assert_equal(test[1][0], maximum_fill_value(a['B']['BA']))
assert_equal(test[1][1], maximum_fill_value(a['B']['BB']))
assert_equal(test[1], maximum_fill_value(a['B']))
def test_fillvalue_individual_fields(self):
"Test setting fill_value on individual fields"
ndtype = [('a', int), ('b', int)]
# Explicit fill_value
a = array(zip([1, 2, 3], [4, 5, 6]),
fill_value=(-999, -999), dtype=ndtype)
f = a._fill_value
aa = a['a']
aa.set_fill_value(10)
assert_equal(aa._fill_value, np.array(10))
assert_equal(tuple(a.fill_value), (10, -999))
a.fill_value['b'] = -10
assert_equal(tuple(a.fill_value), (10, -10))
# Implicit fill_value
t = array(zip([1, 2, 3], [4, 5, 6]), dtype=[('a', int), ('b', int)])
tt = t['a']
tt.set_fill_value(10)
assert_equal(tt._fill_value, np.array(10))
assert_equal(tuple(t.fill_value), (10, default_fill_value(0)))
def test_fillvalue_implicit_structured_array(self):
"Check that fill_value is always defined for structured arrays"
ndtype = ('b', float)
adtype = ('a', float)
a = array([(1.,), (2.,)], mask=[(False,), (False,)],
fill_value=(np.nan,), dtype=np.dtype([adtype]))
b = empty(a.shape, dtype=[adtype, ndtype])
b['a'] = a['a']
b['a'].set_fill_value(a['a'].fill_value)
f = b._fill_value[()]
assert(np.isnan(f[0]))
assert_equal(f[-1], default_fill_value(1.))
def test_fillvalue_as_arguments(self):
"Test adding a fill_value parameter to empty/ones/zeros"
a = empty(3, fill_value=999.)
assert_equal(a.fill_value, 999.)
#
a = ones(3, fill_value=999., dtype=float)
assert_equal(a.fill_value, 999.)
#
a = zeros(3, fill_value=0., dtype=complex)
assert_equal(a.fill_value, 0.)
#
a = identity(3, fill_value=0., dtype=complex)
assert_equal(a.fill_value, 0.)
#------------------------------------------------------------------------------
class TestUfuncs(TestCase):
"Test class for the application of ufuncs on MaskedArrays."
def setUp(self):
"Base data definition."
self.d = (array([1.0, 0, -1, pi / 2] * 2, mask=[0, 1] + [0] * 6),
array([1.0, 0, -1, pi / 2] * 2, mask=[1, 0] + [0] * 6),)
self.err_status = np.geterr()
np.seterr(divide='ignore', invalid='ignore')
def tearDown(self):
np.seterr(**self.err_status)
def test_testUfuncRegression(self):
"Tests new ufuncs on MaskedArrays."
for f in ['sqrt', 'log', 'log10', 'exp', 'conjugate',
'sin', 'cos', 'tan',
'arcsin', 'arccos', 'arctan',
'sinh', 'cosh', 'tanh',
'arcsinh',
'arccosh',
'arctanh',
'absolute', 'fabs', 'negative',
# 'nonzero', 'around',
'floor', 'ceil',
# 'sometrue', 'alltrue',
'logical_not',
'add', 'subtract', 'multiply',
'divide', 'true_divide', 'floor_divide',
'remainder', 'fmod', 'hypot', 'arctan2',
'equal', 'not_equal', 'less_equal', 'greater_equal',
'less', 'greater',
'logical_and', 'logical_or', 'logical_xor',
]:
try:
uf = getattr(umath, f)
except AttributeError:
uf = getattr(fromnumeric, f)
mf = getattr(numpy.ma.core, f)
args = self.d[:uf.nin]
ur = uf(*args)
mr = mf(*args)
assert_equal(ur.filled(0), mr.filled(0), f)
assert_mask_equal(ur.mask, mr.mask, err_msg=f)
def test_reduce(self):
"Tests reduce on MaskedArrays."
a = self.d[0]
self.assertTrue(not alltrue(a, axis=0))
self.assertTrue(sometrue(a, axis=0))
assert_equal(sum(a[:3], axis=0), 0)
assert_equal(product(a, axis=0), 0)
assert_equal(add.reduce(a), pi)
def test_minmax(self):
"Tests extrema on MaskedArrays."
a = arange(1, 13).reshape(3, 4)
amask = masked_where(a < 5, a)
assert_equal(amask.max(), a.max())
assert_equal(amask.min(), 5)
assert_equal(amask.max(0), a.max(0))
assert_equal(amask.min(0), [5, 6, 7, 8])
self.assertTrue(amask.max(1)[0].mask)
self.assertTrue(amask.min(1)[0].mask)
def test_ndarray_mask(self):
"Check that the mask of the result is a ndarray (not a MaskedArray...)"
a = masked_array([-1, 0, 1, 2, 3], mask=[0, 0, 0, 0, 1])
test = np.sqrt(a)
control = masked_array([-1, 0, 1, np.sqrt(2), -1],
mask=[1, 0, 0, 0, 1])
assert_equal(test, control)
assert_equal(test.mask, control.mask)
self.assertTrue(not isinstance(test.mask, MaskedArray))
#------------------------------------------------------------------------------
class TestMaskedArrayInPlaceArithmetics(TestCase):
"Test MaskedArray Arithmetics"
def setUp(self):
x = arange(10)
y = arange(10)
xm = arange(10)
xm[2] = masked
self.intdata = (x, y, xm)
self.floatdata = (x.astype(float), y.astype(float), xm.astype(float))
def test_inplace_addition_scalar(self):
"""Test of inplace additions"""
(x, y, xm) = self.intdata
xm[2] = masked
x += 1
assert_equal(x, y + 1)
xm += 1
assert_equal(xm, y + 1)
#
warnings.simplefilter('ignore', DeprecationWarning)
(x, _, xm) = self.floatdata
id1 = x.raw_data().ctypes._data
x += 1.
assert (id1 == x.raw_data().ctypes._data)
assert_equal(x, y + 1.)
warnings.resetwarnings()
def test_inplace_addition_array(self):
"""Test of inplace additions"""
(x, y, xm) = self.intdata
m = xm.mask
a = arange(10, dtype=float)
a[-1] = masked
x += a
xm += a
assert_equal(x, y + a)
assert_equal(xm, y + a)
assert_equal(xm.mask, mask_or(m, a.mask))
def test_inplace_subtraction_scalar(self):
"""Test of inplace subtractions"""
(x, y, xm) = self.intdata
x -= 1
assert_equal(x, y - 1)
xm -= 1
assert_equal(xm, y - 1)
def test_inplace_subtraction_array(self):
"""Test of inplace subtractions"""
(x, y, xm) = self.floatdata
m = xm.mask
a = arange(10, dtype=float)
a[-1] = masked
x -= a
xm -= a
assert_equal(x, y - a)
assert_equal(xm, y - a)
assert_equal(xm.mask, mask_or(m, a.mask))
def test_inplace_multiplication_scalar(self):
"""Test of inplace multiplication"""
(x, y, xm) = self.floatdata
x *= 2.0
assert_equal(x, y * 2)
xm *= 2.0
assert_equal(xm, y * 2)
def test_inplace_multiplication_array(self):
"""Test of inplace multiplication"""
(x, y, xm) = self.floatdata
m = xm.mask
a = arange(10, dtype=float)
a[-1] = masked
x *= a
xm *= a
assert_equal(x, y * a)
assert_equal(xm, y * a)
assert_equal(xm.mask, mask_or(m, a.mask))
def test_inplace_division_scalar_int(self):
"""Test of inplace division"""
(x, y, xm) = self.intdata
x = arange(10) * 2
xm = arange(10) * 2
xm[2] = masked
x /= 2
assert_equal(x, y)
xm /= 2
assert_equal(xm, y)
def test_inplace_division_scalar_float(self):
"""Test of inplace division"""
(x, y, xm) = self.floatdata
x /= 2.0
assert_equal(x, y / 2.0)
xm /= arange(10)
assert_equal(xm, ones((10,)))
def test_inplace_division_array_float(self):
"""Test of inplace division"""
(x, y, xm) = self.floatdata
m = xm.mask
a = arange(10, dtype=float)
a[-1] = masked
x /= a
xm /= a
assert_equal(x, y / a)
assert_equal(xm, y / a)
assert_equal(xm.mask, mask_or(mask_or(m, a.mask), (a == 0)))
def test_inplace_division_misc(self):
#
x = [1., 1., 1., -2., pi / 2., 4., 5., -10., 10., 1., 2., 3.]
y = [5., 0., 3., 2., -1., -4., 0., -10., 10., 1., 0., 3.]
m1 = [1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0]
m2 = [0, 0, 1, 0, 0, 1, 1, 0, 0, 0 , 0, 1]
xm = masked_array(x, mask=m1)
ym = masked_array(y, mask=m2)
#
z = xm / ym
assert_equal(z._mask, [1, 1, 1, 0, 0, 1, 1, 0, 0, 0, 1, 1])
assert_equal(z._data, [1., 1., 1., -1., -pi / 2., 4., 5., 1., 1., 1., 2., 3.])
#assert_equal(z._data, [0.2,1.,1./3.,-1.,-pi/2.,-1.,5.,1.,1.,1.,2.,1.])
#
xm = xm.copy()
xm /= ym
assert_equal(xm._mask, [1, 1, 1, 0, 0, 1, 1, 0, 0, 0, 1, 1])
assert_equal(z._data, [1., 1., 1., -1., -pi / 2., 4., 5., 1., 1., 1., 2., 3.])
#assert_equal(xm._data, [1/5.,1.,1./3.,-1.,-pi/2.,-1.,5.,1.,1.,1.,2.,1.])
def test_datafriendly_add(self):
"Test keeping data w/ (inplace) addition"
x = array([1, 2, 3], mask=[0, 0, 1])
# Test add w/ scalar
xx = x + 1
assert_equal(xx.data, [2, 3, 3])
assert_equal(xx.mask, [0, 0, 1])
# Test iadd w/ scalar
x += 1
assert_equal(x.data, [2, 3, 3])
assert_equal(x.mask, [0, 0, 1])
# Test add w/ array
x = array([1, 2, 3], mask=[0, 0, 1])
xx = x + array([1, 2, 3], mask=[1, 0, 0])
assert_equal(xx.data, [1, 4, 3])
assert_equal(xx.mask, [1, 0, 1])
# Test iadd w/ array
x = array([1, 2, 3], mask=[0, 0, 1])
x += array([1, 2, 3], mask=[1, 0, 0])
assert_equal(x.data, [1, 4, 3])
assert_equal(x.mask, [1, 0, 1])
def test_datafriendly_sub(self):
"Test keeping data w/ (inplace) subtraction"
# Test sub w/ scalar
x = array([1, 2, 3], mask=[0, 0, 1])
xx = x - 1
assert_equal(xx.data, [0, 1, 3])
assert_equal(xx.mask, [0, 0, 1])
# Test isub w/ scalar
x = array([1, 2, 3], mask=[0, 0, 1])
x -= 1
assert_equal(x.data, [0, 1, 3])
assert_equal(x.mask, [0, 0, 1])
# Test sub w/ array
x = array([1, 2, 3], mask=[0, 0, 1])
xx = x - array([1, 2, 3], mask=[1, 0, 0])
assert_equal(xx.data, [1, 0, 3])
assert_equal(xx.mask, [1, 0, 1])
# Test isub w/ array
x = array([1, 2, 3], mask=[0, 0, 1])
x -= array([1, 2, 3], mask=[1, 0, 0])
assert_equal(x.data, [1, 0, 3])
assert_equal(x.mask, [1, 0, 1])
def test_datafriendly_mul(self):
"Test keeping data w/ (inplace) multiplication"
# Test mul w/ scalar
x = array([1, 2, 3], mask=[0, 0, 1])
xx = x * 2
assert_equal(xx.data, [2, 4, 3])
assert_equal(xx.mask, [0, 0, 1])
# Test imul w/ scalar
x = array([1, 2, 3], mask=[0, 0, 1])
x *= 2
assert_equal(x.data, [2, 4, 3])
assert_equal(x.mask, [0, 0, 1])
# Test mul w/ array
x = array([1, 2, 3], mask=[0, 0, 1])
xx = x * array([10, 20, 30], mask=[1, 0, 0])
assert_equal(xx.data, [1, 40, 3])
assert_equal(xx.mask, [1, 0, 1])
# Test imul w/ array
x = array([1, 2, 3], mask=[0, 0, 1])
x *= array([10, 20, 30], mask=[1, 0, 0])
assert_equal(x.data, [1, 40, 3])
assert_equal(x.mask, [1, 0, 1])
def test_datafriendly_div(self):
"Test keeping data w/ (inplace) division"
# Test div on scalar
x = array([1, 2, 3], mask=[0, 0, 1])
xx = x / 2.
assert_equal(xx.data, [1 / 2., 2 / 2., 3])
assert_equal(xx.mask, [0, 0, 1])
# Test idiv on scalar
x = array([1., 2., 3.], mask=[0, 0, 1])
x /= 2.
assert_equal(x.data, [1 / 2., 2 / 2., 3])
assert_equal(x.mask, [0, 0, 1])
# Test div on array
x = array([1., 2., 3.], mask=[0, 0, 1])
xx = x / array([10., 20., 30.], mask=[1, 0, 0])
assert_equal(xx.data, [1., 2. / 20., 3.])
assert_equal(xx.mask, [1, 0, 1])
# Test idiv on array
x = array([1., 2., 3.], mask=[0, 0, 1])
x /= array([10., 20., 30.], mask=[1, 0, 0])
assert_equal(x.data, [1., 2 / 20., 3.])
assert_equal(x.mask, [1, 0, 1])
def test_datafriendly_pow(self):
"Test keeping data w/ (inplace) power"
# Test pow on scalar
x = array([1., 2., 3.], mask=[0, 0, 1])
xx = x ** 2.5
assert_equal(xx.data, [1., 2. ** 2.5, 3.])
assert_equal(xx.mask, [0, 0, 1])
# Test ipow on scalar
x **= 2.5
assert_equal(x.data, [1., 2. ** 2.5, 3])
assert_equal(x.mask, [0, 0, 1])
def test_datafriendly_add_arrays(self):
a = array([[1, 1], [3, 3]])
b = array([1, 1], mask=[0, 0])
a += b
assert_equal(a, [[2, 2], [4, 4]])
if a.mask is not nomask:
assert_equal(a.mask, [[0, 0], [0, 0]])
#
a = array([[1, 1], [3, 3]])
b = array([1, 1], mask=[0, 1])
a += b
assert_equal(a, [[2, 2], [4, 4]])
assert_equal(a.mask, [[0, 1], [0, 1]])
def test_datafriendly_sub_arrays(self):
a = array([[1, 1], [3, 3]])
b = array([1, 1], mask=[0, 0])
a -= b
assert_equal(a, [[0, 0], [2, 2]])
if a.mask is not nomask:
assert_equal(a.mask, [[0, 0], [0, 0]])
#
a = array([[1, 1], [3, 3]])
b = array([1, 1], mask=[0, 1])
a -= b
assert_equal(a, [[0, 0], [2, 2]])
assert_equal(a.mask, [[0, 1], [0, 1]])
def test_datafriendly_mul_arrays(self):
a = array([[1, 1], [3, 3]])
b = array([1, 1], mask=[0, 0])
a *= b
assert_equal(a, [[1, 1], [3, 3]])
if a.mask is not nomask:
assert_equal(a.mask, [[0, 0], [0, 0]])
#
a = array([[1, 1], [3, 3]])
b = array([1, 1], mask=[0, 1])
a *= b
assert_equal(a, [[1, 1], [3, 3]])
assert_equal(a.mask, [[0, 1], [0, 1]])
#------------------------------------------------------------------------------
class TestMaskedArrayMethods(TestCase):
"Test class for miscellaneous MaskedArrays methods."
def setUp(self):
"Base data definition."
x = np.array([ 8.375, 7.545, 8.828, 8.5 , 1.757, 5.928,
8.43 , 7.78 , 9.865, 5.878, 8.979, 4.732,
3.012, 6.022, 5.095, 3.116, 5.238, 3.957,
6.04 , 9.63 , 7.712, 3.382, 4.489, 6.479,
7.189, 9.645, 5.395, 4.961, 9.894, 2.893,
7.357, 9.828, 6.272, 3.758, 6.693, 0.993])
X = x.reshape(6, 6)
XX = x.reshape(3, 2, 2, 3)
m = np.array([0, 1, 0, 1, 0, 0,
1, 0, 1, 1, 0, 1,
0, 0, 0, 1, 0, 1,
0, 0, 0, 1, 1, 1,
1, 0, 0, 1, 0, 0,
0, 0, 1, 0, 1, 0])
mx = array(data=x, mask=m)
mX = array(data=X, mask=m.reshape(X.shape))
mXX = array(data=XX, mask=m.reshape(XX.shape))
m2 = np.array([1, 1, 0, 1, 0, 0,
1, 1, 1, 1, 0, 1,
0, 0, 1, 1, 0, 1,
0, 0, 0, 1, 1, 1,
1, 0, 0, 1, 1, 0,
0, 0, 1, 0, 1, 1])
m2x = array(data=x, mask=m2)
m2X = array(data=X, mask=m2.reshape(X.shape))
m2XX = array(data=XX, mask=m2.reshape(XX.shape))
self.d = (x, X, XX, m, mx, mX, mXX, m2x, m2X, m2XX)
def test_generic_methods(self):
"Tests some MaskedArray methods."
a = array([1, 3, 2])
b = array([1, 3, 2], mask=[1, 0, 1])
assert_equal(a.any(), a._data.any())
assert_equal(a.all(), a._data.all())
assert_equal(a.argmax(), a._data.argmax())
assert_equal(a.argmin(), a._data.argmin())
assert_equal(a.choose(0, 1, 2, 3, 4), a._data.choose(0, 1, 2, 3, 4))
assert_equal(a.compress([1, 0, 1]), a._data.compress([1, 0, 1]))
assert_equal(a.conj(), a._data.conj())
assert_equal(a.conjugate(), a._data.conjugate())
#
m = array([[1, 2], [3, 4]])
assert_equal(m.diagonal(), m._data.diagonal())
assert_equal(a.sum(), a._data.sum())
assert_equal(a.take([1, 2]), a._data.take([1, 2]))
assert_equal(m.transpose(), m._data.transpose())
def test_allclose(self):
"Tests allclose on arrays"
a = arand(10)
b = a + arand(10) * 1e-8
self.assertTrue(allclose(a, b))
# Test allclose w/ infs
a[0] = np.inf
self.assertTrue(not allclose(a, b))
b[0] = np.inf
self.assertTrue(allclose(a, b))
# Test all close w/ masked
a = masked_array(a)
a[-1] = masked
self.assertTrue(allclose(a, b, masked_equal=True))
self.assertTrue(not allclose(a, b, masked_equal=False))
# Test comparison w/ scalar
a *= 1e-8
a[0] = 0
self.assertTrue(allclose(a, 0, masked_equal=True))
def test_allany(self):
"""Checks the any/all methods/functions."""
x = np.array([[ 0.13, 0.26, 0.90],
[ 0.28, 0.33, 0.63],
[ 0.31, 0.87, 0.70]])
m = np.array([[ True, False, False],
[False, False, False],
[True, True, False]], dtype=np.bool_)
mx = masked_array(x, mask=m)
xbig = np.array([[False, False, True],
[False, False, True],
[False, True, True]], dtype=np.bool_)
mxbig = (mx > 0.5)
mxsmall = (mx < 0.5)
#
assert (mxbig.all() == False)
assert (mxbig.any() == True)
assert_equal(mxbig.all(0), [False, False, True])
assert_equal(mxbig.all(1), [False, False, True])
assert_equal(mxbig.any(0), [False, False, True])
assert_equal(mxbig.any(1), [True, True, True])
#
assert (mxsmall.all() == False)
assert (mxsmall.any() == True)
assert_equal(mxsmall.all(0), [True, True, False])
assert_equal(mxsmall.all(1), [False, False, False])
assert_equal(mxsmall.any(0), [True, True, False])
assert_equal(mxsmall.any(1), [True, True, False])
def test_allany_onmatrices(self):
x = np.array([[ 0.13, 0.26, 0.90],
[ 0.28, 0.33, 0.63],
[ 0.31, 0.87, 0.70]])
X = np.matrix(x)
m = np.array([[ True, False, False],
[False, False, False],
[True, True, False]], dtype=np.bool_)
mX = masked_array(X, mask=m)
mXbig = (mX > 0.5)
mXsmall = (mX < 0.5)
#
assert (mXbig.all() == False)
assert (mXbig.any() == True)
assert_equal(mXbig.all(0), np.matrix([False, False, True]))
assert_equal(mXbig.all(1), np.matrix([False, False, True]).T)
assert_equal(mXbig.any(0), np.matrix([False, False, True]))
assert_equal(mXbig.any(1), np.matrix([ True, True, True]).T)
#
assert (mXsmall.all() == False)
assert (mXsmall.any() == True)
assert_equal(mXsmall.all(0), np.matrix([True, True, False]))
assert_equal(mXsmall.all(1), np.matrix([False, False, False]).T)
assert_equal(mXsmall.any(0), np.matrix([True, True, False]))
assert_equal(mXsmall.any(1), np.matrix([True, True, False]).T)
def test_allany_oddities(self):
"Some fun with all and any"
store = empty(1, dtype=bool)
full = array([1, 2, 3], mask=True)
#
self.assertTrue(full.all() is masked)
full.all(out=store)
self.assertTrue(store)
self.assertTrue(store._mask, True)
self.assertTrue(store is not masked)
#
store = empty(1, dtype=bool)
self.assertTrue(full.any() is masked)
full.any(out=store)
self.assertTrue(not store)
self.assertTrue(store._mask, True)
self.assertTrue(store is not masked)
def test_argmax_argmin(self):
"Tests argmin & argmax on MaskedArrays."
(x, X, XX, m, mx, mX, mXX, m2x, m2X, m2XX) = self.d
#
assert_equal(mx.argmin(), 35)
assert_equal(mX.argmin(), 35)
assert_equal(m2x.argmin(), 4)
assert_equal(m2X.argmin(), 4)
assert_equal(mx.argmax(), 28)
assert_equal(mX.argmax(), 28)
assert_equal(m2x.argmax(), 31)
assert_equal(m2X.argmax(), 31)
#
assert_equal(mX.argmin(0), [2, 2, 2, 5, 0, 5])
assert_equal(m2X.argmin(0), [2, 2, 4, 5, 0, 4])
assert_equal(mX.argmax(0), [0, 5, 0, 5, 4, 0])
assert_equal(m2X.argmax(0), [5, 5, 0, 5, 1, 0])
#
assert_equal(mX.argmin(1), [4, 1, 0, 0, 5, 5, ])
assert_equal(m2X.argmin(1), [4, 4, 0, 0, 5, 3])
assert_equal(mX.argmax(1), [2, 4, 1, 1, 4, 1])
assert_equal(m2X.argmax(1), [2, 4, 1, 1, 1, 1])
def test_clip(self):
"Tests clip on MaskedArrays."
x = np.array([ 8.375, 7.545, 8.828, 8.5 , 1.757, 5.928,
8.43 , 7.78 , 9.865, 5.878, 8.979, 4.732,
3.012, 6.022, 5.095, 3.116, 5.238, 3.957,
6.04 , 9.63 , 7.712, 3.382, 4.489, 6.479,
7.189, 9.645, 5.395, 4.961, 9.894, 2.893,
7.357, 9.828, 6.272, 3.758, 6.693, 0.993])
m = np.array([0, 1, 0, 1, 0, 0, 1, 0, 1, 1, 0, 1,
0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 1, 1,
1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 0])
mx = array(x, mask=m)
clipped = mx.clip(2, 8)
assert_equal(clipped.mask, mx.mask)
assert_equal(clipped._data, x.clip(2, 8))
assert_equal(clipped._data, mx._data.clip(2, 8))
def test_compress(self):
"test compress"
a = masked_array([1., 2., 3., 4., 5.], fill_value=9999)
condition = (a > 1.5) & (a < 3.5)
assert_equal(a.compress(condition), [2., 3.])
#
a[[2, 3]] = masked
b = a.compress(condition)
assert_equal(b._data, [2., 3.])
assert_equal(b._mask, [0, 1])
assert_equal(b.fill_value, 9999)
assert_equal(b, a[condition])
#
condition = (a < 4.)
b = a.compress(condition)
assert_equal(b._data, [1., 2., 3.])
assert_equal(b._mask, [0, 0, 1])
assert_equal(b.fill_value, 9999)
assert_equal(b, a[condition])
#
a = masked_array([[10, 20, 30], [40, 50, 60]], mask=[[0, 0, 1], [1, 0, 0]])
b = a.compress(a.ravel() >= 22)
assert_equal(b._data, [30, 40, 50, 60])
assert_equal(b._mask, [1, 1, 0, 0])
#
x = np.array([3, 1, 2])
b = a.compress(x >= 2, axis=1)
assert_equal(b._data, [[10, 30], [40, 60]])
assert_equal(b._mask, [[0, 1], [1, 0]])
def test_compressed(self):
"Tests compressed"
a = array([1, 2, 3, 4], mask=[0, 0, 0, 0])
b = a.compressed()
assert_equal(b, a)
a[0] = masked
b = a.compressed()
assert_equal(b, [2, 3, 4])
#
a = array(np.matrix([1, 2, 3, 4]), mask=[0, 0, 0, 0])
b = a.compressed()
assert_equal(b, a)
self.assertTrue(isinstance(b, np.matrix))
a[0, 0] = masked
b = a.compressed()
assert_equal(b, [[2, 3, 4]])
def test_empty(self):
"Tests empty/like"
datatype = [('a', int), ('b', float), ('c', '|S8')]
a = masked_array([(1, 1.1, '1.1'), (2, 2.2, '2.2'), (3, 3.3, '3.3')],
dtype=datatype)
assert_equal(len(a.fill_value.item()), len(datatype))
#
b = empty_like(a)
assert_equal(b.shape, a.shape)
assert_equal(b.fill_value, a.fill_value)
#
b = empty(len(a), dtype=datatype)
assert_equal(b.shape, a.shape)
assert_equal(b.fill_value, a.fill_value)
def test_put(self):
"Tests put."
d = arange(5)
n = [0, 0, 0, 1, 1]
m = make_mask(n)
x = array(d, mask=m)
self.assertTrue(x[3] is masked)
self.assertTrue(x[4] is masked)
x[[1, 4]] = [10, 40]
#self.assertTrue(x.mask is not m)
self.assertTrue(x[3] is masked)
self.assertTrue(x[4] is not masked)
assert_equal(x, [0, 10, 2, -1, 40])
#
x = masked_array(arange(10), mask=[1, 0, 0, 0, 0] * 2)
i = [0, 2, 4, 6]
x.put(i, [6, 4, 2, 0])
assert_equal(x, asarray([6, 1, 4, 3, 2, 5, 0, 7, 8, 9, ]))
assert_equal(x.mask, [0, 0, 0, 0, 0, 1, 0, 0, 0, 0])
x.put(i, masked_array([0, 2, 4, 6], [1, 0, 1, 0]))
assert_array_equal(x, [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, ])
assert_equal(x.mask, [1, 0, 0, 0, 1, 1, 0, 0, 0, 0])
#
x = masked_array(arange(10), mask=[1, 0, 0, 0, 0] * 2)
put(x, i, [6, 4, 2, 0])
assert_equal(x, asarray([6, 1, 4, 3, 2, 5, 0, 7, 8, 9, ]))
assert_equal(x.mask, [0, 0, 0, 0, 0, 1, 0, 0, 0, 0])
put(x, i, masked_array([0, 2, 4, 6], [1, 0, 1, 0]))
assert_array_equal(x, [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, ])
assert_equal(x.mask, [1, 0, 0, 0, 1, 1, 0, 0, 0, 0])
def test_put_hardmask(self):
"Tests put on hardmask"
d = arange(5)
n = [0, 0, 0, 1, 1]
m = make_mask(n)
xh = array(d + 1, mask=m, hard_mask=True, copy=True)
xh.put([4, 2, 0, 1, 3], [1, 2, 3, 4, 5])
assert_equal(xh._data, [3, 4, 2, 4, 5])
def test_putmask(self):
x = arange(6) + 1
mx = array(x, mask=[0, 0, 0, 1, 1, 1])
mask = [0, 0, 1, 0, 0, 1]
# w/o mask, w/o masked values
xx = x.copy()
putmask(xx, mask, 99)
assert_equal(xx, [1, 2, 99, 4, 5, 99])
# w/ mask, w/o masked values
mxx = mx.copy()
putmask(mxx, mask, 99)
assert_equal(mxx._data, [1, 2, 99, 4, 5, 99])
assert_equal(mxx._mask, [0, 0, 0, 1, 1, 0])
# w/o mask, w/ masked values
values = array([10, 20, 30, 40, 50, 60], mask=[1, 1, 1, 0, 0, 0])
xx = x.copy()
putmask(xx, mask, values)
assert_equal(xx._data, [1, 2, 30, 4, 5, 60])
assert_equal(xx._mask, [0, 0, 1, 0, 0, 0])
# w/ mask, w/ masked values
mxx = mx.copy()
putmask(mxx, mask, values)
assert_equal(mxx._data, [1, 2, 30, 4, 5, 60])
assert_equal(mxx._mask, [0, 0, 1, 1, 1, 0])
# w/ mask, w/ masked values + hardmask
mxx = mx.copy()
mxx.harden_mask()
putmask(mxx, mask, values)
assert_equal(mxx, [1, 2, 30, 4, 5, 60])
def test_ravel(self):
"Tests ravel"
a = array([[1, 2, 3, 4, 5]], mask=[[0, 1, 0, 0, 0]])
aravel = a.ravel()
assert_equal(a._mask.shape, a.shape)
a = array([0, 0], mask=[1, 1])
aravel = a.ravel()
assert_equal(a._mask.shape, a.shape)
a = array(np.matrix([1, 2, 3, 4, 5]), mask=[[0, 1, 0, 0, 0]])
aravel = a.ravel()
assert_equal(a.shape, (1, 5))
assert_equal(a._mask.shape, a.shape)
# Checks that small_mask is preserved
a = array([1, 2, 3, 4], mask=[0, 0, 0, 0], shrink=False)
assert_equal(a.ravel()._mask, [0, 0, 0, 0])
# Test that the fill_value is preserved
a.fill_value = -99
a.shape = (2, 2)
ar = a.ravel()
assert_equal(ar._mask, [0, 0, 0, 0])
assert_equal(ar._data, [1, 2, 3, 4])
assert_equal(ar.fill_value, -99)
def test_reshape(self):
"Tests reshape"
x = arange(4)
x[0] = masked
y = x.reshape(2, 2)
assert_equal(y.shape, (2, 2,))
assert_equal(y._mask.shape, (2, 2,))
assert_equal(x.shape, (4,))
assert_equal(x._mask.shape, (4,))
def test_sort(self):
"Test sort"
x = array([1, 4, 2, 3], mask=[0, 1, 0, 0], dtype=np.uint8)
#
sortedx = sort(x)
assert_equal(sortedx._data, [1, 2, 3, 4])
assert_equal(sortedx._mask, [0, 0, 0, 1])
#
sortedx = sort(x, endwith=False)
assert_equal(sortedx._data, [4, 1, 2, 3])
assert_equal(sortedx._mask, [1, 0, 0, 0])
#
x.sort()
assert_equal(x._data, [1, 2, 3, 4])
assert_equal(x._mask, [0, 0, 0, 1])
#
x = array([1, 4, 2, 3], mask=[0, 1, 0, 0], dtype=np.uint8)
x.sort(endwith=False)
assert_equal(x._data, [4, 1, 2, 3])
assert_equal(x._mask, [1, 0, 0, 0])
#
x = [1, 4, 2, 3]
sortedx = sort(x)
self.assertTrue(not isinstance(sorted, MaskedArray))
#
x = array([0, 1, -1, -2, 2], mask=nomask, dtype=np.int8)
sortedx = sort(x, endwith=False)
assert_equal(sortedx._data, [-2, -1, 0, 1, 2])
x = array([0, 1, -1, -2, 2], mask=[0, 1, 0, 0, 1], dtype=np.int8)
sortedx = sort(x, endwith=False)
assert_equal(sortedx._data, [1, 2, -2, -1, 0])
assert_equal(sortedx._mask, [1, 1, 0, 0, 0])
def test_sort_2d(self):
"Check sort of 2D array."
# 2D array w/o mask
a = masked_array([[8, 4, 1], [2, 0, 9]])
a.sort(0)
assert_equal(a, [[2, 0, 1], [8, 4, 9]])
a = masked_array([[8, 4, 1], [2, 0, 9]])
a.sort(1)
assert_equal(a, [[1, 4, 8], [0, 2, 9]])
# 2D array w/mask
a = masked_array([[8, 4, 1], [2, 0, 9]], mask=[[1, 0, 0], [0, 0, 1]])
a.sort(0)
assert_equal(a, [[2, 0, 1], [8, 4, 9]])
assert_equal(a._mask, [[0, 0, 0], [1, 0, 1]])
a = masked_array([[8, 4, 1], [2, 0, 9]], mask=[[1, 0, 0], [0, 0, 1]])
a.sort(1)
assert_equal(a, [[1, 4, 8], [0, 2, 9]])
assert_equal(a._mask, [[0, 0, 1], [0, 0, 1]])
# 3D
a = masked_array([[[7, 8, 9], [4, 5, 6], [1, 2, 3]],
[[1, 2, 3], [7, 8, 9], [4, 5, 6]],
[[7, 8, 9], [1, 2, 3], [4, 5, 6]],
[[4, 5, 6], [1, 2, 3], [7, 8, 9]]])
a[a % 4 == 0] = masked
am = a.copy()
an = a.filled(99)
am.sort(0)
an.sort(0)
assert_equal(am, an)
am = a.copy()
an = a.filled(99)
am.sort(1)
an.sort(1)
assert_equal(am, an)
am = a.copy()
an = a.filled(99)
am.sort(2)
an.sort(2)
assert_equal(am, an)
def test_sort_flexible(self):
"Test sort on flexible dtype."
a = array([(3, 3), (3, 2), (2, 2), (2, 1), (1, 0), (1, 1), (1, 2)],
mask=[(0, 0), (0, 1), (0, 0), (0, 0), (1, 0), (0, 0), (0, 0)],
dtype=[('A', int), ('B', int)])
#
test = sort(a)
b = array([(1, 1), (1, 2), (2, 1), (2, 2), (3, 3), (3, 2), (1, 0)],
mask=[(0, 0), (0, 0), (0, 0), (0, 0), (0, 0), (0, 1), (1, 0)],
dtype=[('A', int), ('B', int)])
assert_equal(test, b)
assert_equal(test.mask, b.mask)
#
test = sort(a, endwith=False)
b = array([(1, 0), (1, 1), (1, 2), (2, 1), (2, 2), (3, 2), (3, 3), ],
mask=[(1, 0), (0, 0), (0, 0), (0, 0), (0, 0), (0, 1), (0, 0), ],
dtype=[('A', int), ('B', int)])
assert_equal(test, b)
assert_equal(test.mask, b.mask)
def test_argsort(self):
"Test argsort"
a = array([1, 5, 2, 4, 3], mask=[1, 0, 0, 1, 0])
assert_equal(np.argsort(a), argsort(a))
def test_squeeze(self):
"Check squeeze"
data = masked_array([[1, 2, 3]])
assert_equal(data.squeeze(), [1, 2, 3])
data = masked_array([[1, 2, 3]], mask=[[1, 1, 1]])
assert_equal(data.squeeze(), [1, 2, 3])
assert_equal(data.squeeze()._mask, [1, 1, 1])
data = masked_array([[1]], mask=True)
self.assertTrue(data.squeeze() is masked)
def test_swapaxes(self):
"Tests swapaxes on MaskedArrays."
x = np.array([ 8.375, 7.545, 8.828, 8.5 , 1.757, 5.928,
8.43 , 7.78 , 9.865, 5.878, 8.979, 4.732,
3.012, 6.022, 5.095, 3.116, 5.238, 3.957,
6.04 , 9.63 , 7.712, 3.382, 4.489, 6.479,
7.189, 9.645, 5.395, 4.961, 9.894, 2.893,
7.357, 9.828, 6.272, 3.758, 6.693, 0.993])
m = np.array([0, 1, 0, 1, 0, 0,
1, 0, 1, 1, 0, 1,
0, 0, 0, 1, 0, 1,
0, 0, 0, 1, 1, 1,
1, 0, 0, 1, 0, 0,
0, 0, 1, 0, 1, 0])
mX = array(x, mask=m).reshape(6, 6)
mXX = mX.reshape(3, 2, 2, 3)
#
mXswapped = mX.swapaxes(0, 1)
assert_equal(mXswapped[-1], mX[:, -1])
mXXswapped = mXX.swapaxes(0, 2)
assert_equal(mXXswapped.shape, (2, 2, 3, 3))
def test_take(self):
"Tests take"
x = masked_array([10, 20, 30, 40], [0, 1, 0, 1])
assert_equal(x.take([0, 0, 3]), masked_array([10, 10, 40], [0, 0, 1]))
assert_equal(x.take([0, 0, 3]), x[[0, 0, 3]])
assert_equal(x.take([[0, 1], [0, 1]]),
masked_array([[10, 20], [10, 20]], [[0, 1], [0, 1]]))
#
x = array([[10, 20, 30], [40, 50, 60]], mask=[[0, 0, 1], [1, 0, 0, ]])
assert_equal(x.take([0, 2], axis=1),
array([[10, 30], [40, 60]], mask=[[0, 1], [1, 0]]))
assert_equal(take(x, [0, 2], axis=1),
array([[10, 30], [40, 60]], mask=[[0, 1], [1, 0]]))
def test_take_masked_indices(self):
"Test take w/ masked indices"
a = np.array((40, 18, 37, 9, 22))
indices = np.arange(3)[None, :] + np.arange(5)[:, None]
mindices = array(indices, mask=(indices >= len(a)))
# No mask
test = take(a, mindices, mode='clip')
ctrl = array([[40, 18, 37],
[18, 37, 9],
[37, 9, 22],
[ 9, 22, 22],
[22, 22, 22]])
assert_equal(test, ctrl)
# Masked indices
test = take(a, mindices)
ctrl = array([[40, 18, 37],
[18, 37, 9],
[37, 9, 22],
[ 9, 22, 40],
[22, 40, 40]])
ctrl[3, 2] = ctrl[4, 1] = ctrl[4, 2] = masked
assert_equal(test, ctrl)
assert_equal(test.mask, ctrl.mask)
# Masked input + masked indices
a = array((40, 18, 37, 9, 22), mask=(0, 1, 0, 0, 0))
test = take(a, mindices)
ctrl[0, 1] = ctrl[1, 0] = masked
assert_equal(test, ctrl)
assert_equal(test.mask, ctrl.mask)
def test_tolist(self):
"Tests to list"
# ... on 1D
x = array(np.arange(12))
x[[1, -2]] = masked
xlist = x.tolist()
self.assertTrue(xlist[1] is None)
self.assertTrue(xlist[-2] is None)
# ... on 2D
x.shape = (3, 4)
xlist = x.tolist()
ctrl = [[0, None, 2, 3], [4, 5, 6, 7], [8, 9, None, 11]]
assert_equal(xlist[0], [0, None, 2, 3])
assert_equal(xlist[1], [4, 5, 6, 7])
assert_equal(xlist[2], [8, 9, None, 11])
assert_equal(xlist, ctrl)
# ... on structured array w/ masked records
x = array(zip([1, 2, 3],
[1.1, 2.2, 3.3],
['one', 'two', 'thr']),
dtype=[('a', int), ('b', float), ('c', '|S8')])
x[-1] = masked
assert_equal(x.tolist(),
[(1, 1.1, asbytes('one')),
(2, 2.2, asbytes('two')),
(None, None, None)])
# ... on structured array w/ masked fields
a = array([(1, 2,), (3, 4)], mask=[(0, 1), (0, 0)],
dtype=[('a', int), ('b', int)])
test = a.tolist()
assert_equal(test, [[1, None], [3, 4]])
# ... on mvoid
a = a[0]
test = a.tolist()
assert_equal(test, [1, None])
def test_tolist_specialcase(self):
"Test mvoid.tolist: make sure we return a standard Python object"
a = array([(0, 1), (2, 3)], dtype=[('a', int), ('b', int)])
# w/o mask: each entry is a np.void whose elements are standard Python
for entry in a:
for item in entry.tolist():
assert(not isinstance(item, np.generic))
# w/ mask: each entry is a ma.void whose elements should be standard Python
a.mask[0] = (0, 1)
for entry in a:
for item in entry.tolist():
assert(not isinstance(item, np.generic))
def test_toflex(self):
"Test the conversion to records"
data = arange(10)
record = data.toflex()
assert_equal(record['_data'], data._data)
assert_equal(record['_mask'], data._mask)
#
data[[0, 1, 2, -1]] = masked
record = data.toflex()
assert_equal(record['_data'], data._data)
assert_equal(record['_mask'], data._mask)
#
ndtype = [('i', int), ('s', '|S3'), ('f', float)]
data = array([(i, s, f) for (i, s, f) in zip(np.arange(10),
'ABCDEFGHIJKLM',
arand(10))],
dtype=ndtype)
data[[0, 1, 2, -1]] = masked
record = data.toflex()
assert_equal(record['_data'], data._data)
assert_equal(record['_mask'], data._mask)
#
ndtype = np.dtype("int, (2,3)float, float")
data = array([(i, f, ff) for (i, f, ff) in zip(np.arange(10),
arand(10),
arand(10))],
dtype=ndtype)
data[[0, 1, 2, -1]] = masked
record = data.toflex()
assert_equal_records(record['_data'], data._data)
assert_equal_records(record['_mask'], data._mask)
def test_fromflex(self):
"Test the reconstruction of a masked_array from a record"
a = array([1, 2, 3])
test = fromflex(a.toflex())
assert_equal(test, a)
assert_equal(test.mask, a.mask)
#
a = array([1, 2, 3], mask=[0, 0, 1])
test = fromflex(a.toflex())
assert_equal(test, a)
assert_equal(test.mask, a.mask)
#
a = array([(1, 1.), (2, 2.), (3, 3.)], mask=[(1, 0), (0, 0), (0, 1)],
dtype=[('A', int), ('B', float)])
test = fromflex(a.toflex())
assert_equal(test, a)
assert_equal(test.data, a.data)
def test_arraymethod(self):
"Test a _arraymethod w/ n argument"
marray = masked_array([[1, 2, 3, 4, 5]], mask=[0, 0, 1, 0, 0])
control = masked_array([[1], [2], [3], [4], [5]],
mask=[0, 0, 1, 0, 0])
assert_equal(marray.T, control)
assert_equal(marray.transpose(), control)
#
assert_equal(MaskedArray.cumsum(marray.T, 0), control.cumsum(0))
#------------------------------------------------------------------------------
class TestMaskedArrayMathMethods(TestCase):
def setUp(self):
"Base data definition."
x = np.array([ 8.375, 7.545, 8.828, 8.5 , 1.757, 5.928,
8.43 , 7.78 , 9.865, 5.878, 8.979, 4.732,
3.012, 6.022, 5.095, 3.116, 5.238, 3.957,
6.04 , 9.63 , 7.712, 3.382, 4.489, 6.479,
7.189, 9.645, 5.395, 4.961, 9.894, 2.893,
7.357, 9.828, 6.272, 3.758, 6.693, 0.993])
X = x.reshape(6, 6)
XX = x.reshape(3, 2, 2, 3)
m = np.array([0, 1, 0, 1, 0, 0,
1, 0, 1, 1, 0, 1,
0, 0, 0, 1, 0, 1,
0, 0, 0, 1, 1, 1,
1, 0, 0, 1, 0, 0,
0, 0, 1, 0, 1, 0])
mx = array(data=x, mask=m)
mX = array(data=X, mask=m.reshape(X.shape))
mXX = array(data=XX, mask=m.reshape(XX.shape))
m2 = np.array([1, 1, 0, 1, 0, 0,
1, 1, 1, 1, 0, 1,
0, 0, 1, 1, 0, 1,
0, 0, 0, 1, 1, 1,
1, 0, 0, 1, 1, 0,
0, 0, 1, 0, 1, 1])
m2x = array(data=x, mask=m2)
m2X = array(data=X, mask=m2.reshape(X.shape))
m2XX = array(data=XX, mask=m2.reshape(XX.shape))
self.d = (x, X, XX, m, mx, mX, mXX, m2x, m2X, m2XX)
def test_cumsumprod(self):
"Tests cumsum & cumprod on MaskedArrays."
(x, X, XX, m, mx, mX, mXX, m2x, m2X, m2XX) = self.d
mXcp = mX.cumsum(0)
assert_equal(mXcp._data, mX.filled(0).cumsum(0))
mXcp = mX.cumsum(1)
assert_equal(mXcp._data, mX.filled(0).cumsum(1))
#
mXcp = mX.cumprod(0)
assert_equal(mXcp._data, mX.filled(1).cumprod(0))
mXcp = mX.cumprod(1)
assert_equal(mXcp._data, mX.filled(1).cumprod(1))
def test_cumsumprod_with_output(self):
"Tests cumsum/cumprod w/ output"
xm = array(uniform(0, 10, 12)).reshape(3, 4)
xm[:, 0] = xm[0] = xm[-1, -1] = masked
#
for funcname in ('cumsum', 'cumprod'):
npfunc = getattr(np, funcname)
xmmeth = getattr(xm, funcname)
# A ndarray as explicit input
output = np.empty((3, 4), dtype=float)
output.fill(-9999)
result = npfunc(xm, axis=0, out=output)
# ... the result should be the given output
self.assertTrue(result is output)
assert_equal(result, xmmeth(axis=0, out=output))
#
output = empty((3, 4), dtype=int)
result = xmmeth(axis=0, out=output)
self.assertTrue(result is output)
def test_ptp(self):
"Tests ptp on MaskedArrays."
(x, X, XX, m, mx, mX, mXX, m2x, m2X, m2XX) = self.d
(n, m) = X.shape
assert_equal(mx.ptp(), mx.compressed().ptp())
rows = np.zeros(n, np.float)
cols = np.zeros(m, np.float)
for k in range(m):
cols[k] = mX[:, k].compressed().ptp()
for k in range(n):
rows[k] = mX[k].compressed().ptp()
assert_equal(mX.ptp(0), cols)
assert_equal(mX.ptp(1), rows)
def test_sum_object(self):
"Test sum on object dtype"
a = masked_array([1, 2, 3], mask=[1, 0, 0], dtype=np.object)
assert_equal(a.sum(), 5)
a = masked_array([[1, 2, 3], [4, 5, 6]], dtype=object)
assert_equal(a.sum(axis=0), [5, 7, 9])
def test_prod_object(self):
"Test prod on object dtype"
a = masked_array([1, 2, 3], mask=[1, 0, 0], dtype=np.object)
assert_equal(a.prod(), 2 * 3)
a = masked_array([[1, 2, 3], [4, 5, 6]], dtype=object)
assert_equal(a.prod(axis=0), [4, 10, 18])
def test_meananom_object(self):
"Test mean/anom on object dtype"
a = masked_array([1, 2, 3], dtype=np.object)
assert_equal(a.mean(), 2)
assert_equal(a.anom(), [-1, 0, 1])
def test_trace(self):
"Tests trace on MaskedArrays."
(x, X, XX, m, mx, mX, mXX, m2x, m2X, m2XX) = self.d
mXdiag = mX.diagonal()
assert_equal(mX.trace(), mX.diagonal().compressed().sum())
assert_almost_equal(mX.trace(),
X.trace() - sum(mXdiag.mask * X.diagonal(), axis=0))
def test_varstd(self):
"Tests var & std on MaskedArrays."
(x, X, XX, m, mx, mX, mXX, m2x, m2X, m2XX) = self.d
assert_almost_equal(mX.var(axis=None), mX.compressed().var())
assert_almost_equal(mX.std(axis=None), mX.compressed().std())
assert_almost_equal(mX.std(axis=None, ddof=1),
mX.compressed().std(ddof=1))
assert_almost_equal(mX.var(axis=None, ddof=1),
mX.compressed().var(ddof=1))
assert_equal(mXX.var(axis=3).shape, XX.var(axis=3).shape)
assert_equal(mX.var().shape, X.var().shape)
(mXvar0, mXvar1) = (mX.var(axis=0), mX.var(axis=1))
assert_almost_equal(mX.var(axis=None, ddof=2), mX.compressed().var(ddof=2))
assert_almost_equal(mX.std(axis=None, ddof=2), mX.compressed().std(ddof=2))
for k in range(6):
assert_almost_equal(mXvar1[k], mX[k].compressed().var())
assert_almost_equal(mXvar0[k], mX[:, k].compressed().var())
assert_almost_equal(np.sqrt(mXvar0[k]), mX[:, k].compressed().std())
def test_varstd_specialcases(self):
"Test a special case for var"
nout = np.empty(1, dtype=float)
mout = empty(1, dtype=float)
#
x = array(arange(10), mask=True)
for methodname in ('var', 'std'):
method = getattr(x, methodname)
self.assertTrue(method() is masked)
self.assertTrue(method(0) is masked)
self.assertTrue(method(-1) is masked)
# Using a masked array as explicit output
_ = method(out=mout)
self.assertTrue(mout is not masked)
assert_equal(mout.mask, True)
# Using a ndarray as explicit output
_ = method(out=nout)
self.assertTrue(np.isnan(nout))
#
x = array(arange(10), mask=True)
x[-1] = 9
for methodname in ('var', 'std'):
method = getattr(x, methodname)
self.assertTrue(method(ddof=1) is masked)
self.assertTrue(method(0, ddof=1) is masked)
self.assertTrue(method(-1, ddof=1) is masked)
# Using a masked array as explicit output
_ = method(out=mout, ddof=1)
self.assertTrue(mout is not masked)
assert_equal(mout.mask, True)
# Using a ndarray as explicit output
_ = method(out=nout, ddof=1)
self.assertTrue(np.isnan(nout))
def test_varstd_ddof(self):
a = array([[1, 1, 0], [1, 1, 0]], mask=[[0, 0, 1], [0, 0, 1]])
test = a.std(axis=0, ddof=0)
assert_equal(test.filled(0), [0, 0, 0])
assert_equal(test.mask, [0, 0, 1])
test = a.std(axis=0, ddof=1)
assert_equal(test.filled(0), [0, 0, 0])
assert_equal(test.mask, [0, 0, 1])
test = a.std(axis=0, ddof=2)
assert_equal(test.filled(0), [0, 0, 0])
assert_equal(test.mask, [1, 1, 1])
def test_diag(self):
"Test diag"
x = arange(9).reshape((3, 3))
x[1, 1] = masked
out = np.diag(x)
assert_equal(out, [0, 4, 8])
out = diag(x)
assert_equal(out, [0, 4, 8])
assert_equal(out.mask, [0, 1, 0])
out = diag(out)
control = array([[0, 0, 0], [0, 4, 0], [0, 0, 8]],
mask=[[0, 0, 0], [0, 1, 0], [0, 0, 0]])
assert_equal(out, control)
def test_axis_methods_nomask(self):
"Test the combination nomask & methods w/ axis"
a = array([[1, 2, 3], [4, 5, 6]])
#
assert_equal(a.sum(0), [5, 7, 9])
assert_equal(a.sum(-1), [6, 15])
assert_equal(a.sum(1), [6, 15])
#
assert_equal(a.prod(0), [4, 10, 18])
assert_equal(a.prod(-1), [6, 120])
assert_equal(a.prod(1), [6, 120])
#
assert_equal(a.min(0), [1, 2, 3])
assert_equal(a.min(-1), [1, 4])
assert_equal(a.min(1), [1, 4])
#
assert_equal(a.max(0), [4, 5, 6])
assert_equal(a.max(-1), [3, 6])
assert_equal(a.max(1), [3, 6])
#------------------------------------------------------------------------------
class TestMaskedArrayMathMethodsComplex(TestCase):
"Test class for miscellaneous MaskedArrays methods."
def setUp(self):
"Base data definition."
x = np.array([ 8.375j, 7.545j, 8.828j, 8.5j , 1.757j, 5.928,
8.43 , 7.78 , 9.865, 5.878, 8.979, 4.732,
3.012, 6.022, 5.095, 3.116, 5.238, 3.957,
6.04 , 9.63 , 7.712, 3.382, 4.489, 6.479j,
7.189j, 9.645, 5.395, 4.961, 9.894, 2.893,
7.357, 9.828, 6.272, 3.758, 6.693, 0.993j])
X = x.reshape(6, 6)
XX = x.reshape(3, 2, 2, 3)
m = np.array([0, 1, 0, 1, 0, 0,
1, 0, 1, 1, 0, 1,
0, 0, 0, 1, 0, 1,
0, 0, 0, 1, 1, 1,
1, 0, 0, 1, 0, 0,
0, 0, 1, 0, 1, 0])
mx = array(data=x, mask=m)
mX = array(data=X, mask=m.reshape(X.shape))
mXX = array(data=XX, mask=m.reshape(XX.shape))
m2 = np.array([1, 1, 0, 1, 0, 0,
1, 1, 1, 1, 0, 1,
0, 0, 1, 1, 0, 1,
0, 0, 0, 1, 1, 1,
1, 0, 0, 1, 1, 0,
0, 0, 1, 0, 1, 1])
m2x = array(data=x, mask=m2)
m2X = array(data=X, mask=m2.reshape(X.shape))
m2XX = array(data=XX, mask=m2.reshape(XX.shape))
self.d = (x, X, XX, m, mx, mX, mXX, m2x, m2X, m2XX)
def test_varstd(self):
"Tests var & std on MaskedArrays."
(x, X, XX, m, mx, mX, mXX, m2x, m2X, m2XX) = self.d
assert_almost_equal(mX.var(axis=None), mX.compressed().var())
assert_almost_equal(mX.std(axis=None), mX.compressed().std())
assert_equal(mXX.var(axis=3).shape, XX.var(axis=3).shape)
assert_equal(mX.var().shape, X.var().shape)
(mXvar0, mXvar1) = (mX.var(axis=0), mX.var(axis=1))
assert_almost_equal(mX.var(axis=None, ddof=2), mX.compressed().var(ddof=2))
assert_almost_equal(mX.std(axis=None, ddof=2), mX.compressed().std(ddof=2))
for k in range(6):
assert_almost_equal(mXvar1[k], mX[k].compressed().var())
assert_almost_equal(mXvar0[k], mX[:, k].compressed().var())
assert_almost_equal(np.sqrt(mXvar0[k]), mX[:, k].compressed().std())
#------------------------------------------------------------------------------
class TestMaskedArrayFunctions(TestCase):
"Test class for miscellaneous functions."
def setUp(self):
x = np.array([1., 1., 1., -2., pi / 2.0, 4., 5., -10., 10., 1., 2., 3.])
y = np.array([5., 0., 3., 2., -1., -4., 0., -10., 10., 1., 0., 3.])
a10 = 10.
m1 = [1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0]
m2 = [0, 0, 1, 0, 0, 1, 1, 0, 0, 0 , 0, 1]
xm = masked_array(x, mask=m1)
ym = masked_array(y, mask=m2)
z = np.array([-.5, 0., .5, .8])
zm = masked_array(z, mask=[0, 1, 0, 0])
xf = np.where(m1, 1e+20, x)
xm.set_fill_value(1e+20)
self.info = (xm, ym)
def test_masked_where_bool(self):
x = [1, 2]
y = masked_where(False, x)
assert_equal(y, [1, 2])
assert_equal(y[1], 2)
def test_masked_equal_wlist(self):
x = [1, 2, 3]
mx = masked_equal(x, 3)
assert_equal(mx, x)
assert_equal(mx._mask, [0, 0, 1])
mx = masked_not_equal(x, 3)
assert_equal(mx, x)
assert_equal(mx._mask, [1, 1, 0])
def test_masked_equal_fill_value(self):
x = [1, 2, 3]
mx = masked_equal(x, 3)
assert_equal(mx._mask, [0, 0, 1])
assert_equal(mx.fill_value, 3)
def test_masked_where_condition(self):
"Tests masking functions."
x = array([1., 2., 3., 4., 5.])
x[2] = masked
assert_equal(masked_where(greater(x, 2), x), masked_greater(x, 2))
assert_equal(masked_where(greater_equal(x, 2), x), masked_greater_equal(x, 2))
assert_equal(masked_where(less(x, 2), x), masked_less(x, 2))
assert_equal(masked_where(less_equal(x, 2), x), masked_less_equal(x, 2))
assert_equal(masked_where(not_equal(x, 2), x), masked_not_equal(x, 2))
assert_equal(masked_where(equal(x, 2), x), masked_equal(x, 2))
assert_equal(masked_where(not_equal(x, 2), x), masked_not_equal(x, 2))
assert_equal(masked_where([1, 1, 0, 0, 0], [1, 2, 3, 4, 5]), [99, 99, 3, 4, 5])
def test_masked_where_oddities(self):
"""Tests some generic features."""
atest = ones((10, 10, 10), dtype=float)
btest = zeros(atest.shape, MaskType)
ctest = masked_where(btest, atest)
assert_equal(atest, ctest)
def test_masked_where_shape_constraint(self):
a = arange(10)
try:
test = masked_equal(1, a)
except IndexError:
pass
else:
raise AssertionError("Should have failed...")
test = masked_equal(a, 1)
assert_equal(test.mask, [0, 1, 0, 0, 0, 0, 0, 0, 0, 0])
def test_masked_otherfunctions(self):
assert_equal(masked_inside(range(5), 1, 3), [0, 199, 199, 199, 4])
assert_equal(masked_outside(range(5), 1, 3), [199, 1, 2, 3, 199])
assert_equal(masked_inside(array(range(5), mask=[1, 0, 0, 0, 0]), 1, 3).mask, [1, 1, 1, 1, 0])
assert_equal(masked_outside(array(range(5), mask=[0, 1, 0, 0, 0]), 1, 3).mask, [1, 1, 0, 0, 1])
assert_equal(masked_equal(array(range(5), mask=[1, 0, 0, 0, 0]), 2).mask, [1, 0, 1, 0, 0])
assert_equal(masked_not_equal(array([2, 2, 1, 2, 1], mask=[1, 0, 0, 0, 0]), 2).mask, [1, 0, 1, 0, 1])
def test_round(self):
a = array([1.23456, 2.34567, 3.45678, 4.56789, 5.67890],
mask=[0, 1, 0, 0, 0])
assert_equal(a.round(), [1., 2., 3., 5., 6.])
assert_equal(a.round(1), [1.2, 2.3, 3.5, 4.6, 5.7])
assert_equal(a.round(3), [1.235, 2.346, 3.457, 4.568, 5.679])
b = empty_like(a)
a.round(out=b)
assert_equal(b, [1., 2., 3., 5., 6.])
x = array([1., 2., 3., 4., 5.])
c = array([1, 1, 1, 0, 0])
x[2] = masked
z = where(c, x, -x)
assert_equal(z, [1., 2., 0., -4., -5])
c[0] = masked
z = where(c, x, -x)
assert_equal(z, [1., 2., 0., -4., -5])
assert z[0] is masked
assert z[1] is not masked
assert z[2] is masked
def test_round_with_output(self):
"Testing round with an explicit output"
xm = array(uniform(0, 10, 12)).reshape(3, 4)
xm[:, 0] = xm[0] = xm[-1, -1] = masked
# A ndarray as explicit input
output = np.empty((3, 4), dtype=float)
output.fill(-9999)
result = np.round(xm, decimals=2, out=output)
# ... the result should be the given output
self.assertTrue(result is output)
assert_equal(result, xm.round(decimals=2, out=output))
#
output = empty((3, 4), dtype=float)
result = xm.round(decimals=2, out=output)
self.assertTrue(result is output)
def test_identity(self):
a = identity(5)
self.assertTrue(isinstance(a, MaskedArray))
assert_equal(a, np.identity(5))
def test_power(self):
x = -1.1
assert_almost_equal(power(x, 2.), 1.21)
self.assertTrue(power(x, masked) is masked)
x = array([-1.1, -1.1, 1.1, 1.1, 0.])
b = array([0.5, 2., 0.5, 2., -1.], mask=[0, 0, 0, 0, 1])
y = power(x, b)
assert_almost_equal(y, [0, 1.21, 1.04880884817, 1.21, 0.])
assert_equal(y._mask, [1, 0, 0, 0, 1])
b.mask = nomask
y = power(x, b)
assert_equal(y._mask, [1, 0, 0, 0, 1])
z = x ** b
assert_equal(z._mask, y._mask)
assert_almost_equal(z, y)
assert_almost_equal(z._data, y._data)
x **= b
assert_equal(x._mask, y._mask)
assert_almost_equal(x, y)
assert_almost_equal(x._data, y._data)
def test_power_w_broadcasting(self):
"Test power w/ broadcasting"
a2 = np.array([[1., 2., 3.], [4., 5., 6.]])
a2m = array(a2, mask=[[1, 0, 0], [0, 0, 1]])
b1 = np.array([2, 4, 3])
b1m = array(b1, mask=[0, 1, 0])
b2 = np.array([b1, b1])
b2m = array(b2, mask=[[0, 1, 0], [0, 1, 0]])
#
ctrl = array([[1 ** 2, 2 ** 4, 3 ** 3], [4 ** 2, 5 ** 4, 6 ** 3]],
mask=[[1, 1, 0], [0, 1, 1]])
# No broadcasting, base & exp w/ mask
test = a2m ** b2m
assert_equal(test, ctrl)
assert_equal(test.mask, ctrl.mask)
# No broadcasting, base w/ mask, exp w/o mask
test = a2m ** b2
assert_equal(test, ctrl)
assert_equal(test.mask, a2m.mask)
# No broadcasting, base w/o mask, exp w/ mask
test = a2 ** b2m
assert_equal(test, ctrl)
assert_equal(test.mask, b2m.mask)
#
ctrl = array([[2 ** 2, 4 ** 4, 3 ** 3], [2 ** 2, 4 ** 4, 3 ** 3]],
mask=[[0, 1, 0], [0, 1, 0]])
test = b1 ** b2m
assert_equal(test, ctrl)
assert_equal(test.mask, ctrl.mask)
test = b2m ** b1
assert_equal(test, ctrl)
assert_equal(test.mask, ctrl.mask)
def test_where(self):
"Test the where function"
x = np.array([1., 1., 1., -2., pi / 2.0, 4., 5., -10., 10., 1., 2., 3.])
y = np.array([5., 0., 3., 2., -1., -4., 0., -10., 10., 1., 0., 3.])
a10 = 10.
m1 = [1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0]
m2 = [0, 0, 1, 0, 0, 1, 1, 0, 0, 0 , 0, 1]
xm = masked_array(x, mask=m1)
ym = masked_array(y, mask=m2)
z = np.array([-.5, 0., .5, .8])
zm = masked_array(z, mask=[0, 1, 0, 0])
xf = np.where(m1, 1e+20, x)
xm.set_fill_value(1e+20)
#
d = where(xm > 2, xm, -9)
assert_equal(d, [-9., -9., -9., -9., -9., 4., -9., -9., 10., -9., -9., 3.])
assert_equal(d._mask, xm._mask)
d = where(xm > 2, -9, ym)
assert_equal(d, [5., 0., 3., 2., -1., -9., -9., -10., -9., 1., 0., -9.])
assert_equal(d._mask, [1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0])
d = where(xm > 2, xm, masked)
assert_equal(d, [-9., -9., -9., -9., -9., 4., -9., -9., 10., -9., -9., 3.])
tmp = xm._mask.copy()
tmp[(xm <= 2).filled(True)] = True
assert_equal(d._mask, tmp)
#
ixm = xm.astype(int)
d = where(ixm > 2, ixm, masked)
assert_equal(d, [-9, -9, -9, -9, -9, 4, -9, -9, 10, -9, -9, 3])
assert_equal(d.dtype, ixm.dtype)
def test_where_with_masked_choice(self):
x = arange(10)
x[3] = masked
c = x >= 8
# Set False to masked
z = where(c , x, masked)
assert z.dtype is x.dtype
assert z[3] is masked
assert z[4] is masked
assert z[7] is masked
assert z[8] is not masked
assert z[9] is not masked
assert_equal(x, z)
# Set True to masked
z = where(c , masked, x)
assert z.dtype is x.dtype
assert z[3] is masked
assert z[4] is not masked
assert z[7] is not masked
assert z[8] is masked
assert z[9] is masked
def test_where_with_masked_condition(self):
x = array([1., 2., 3., 4., 5.])
c = array([1, 1, 1, 0, 0])
x[2] = masked
z = where(c, x, -x)
assert_equal(z, [1., 2., 0., -4., -5])
c[0] = masked
z = where(c, x, -x)
assert_equal(z, [1., 2., 0., -4., -5])
assert z[0] is masked
assert z[1] is not masked
assert z[2] is masked
#
x = arange(1, 6)
x[-1] = masked
y = arange(1, 6) * 10
y[2] = masked
c = array([1, 1, 1, 0, 0], mask=[1, 0, 0, 0, 0])
cm = c.filled(1)
z = where(c, x, y)
zm = where(cm, x, y)
assert_equal(z, zm)
assert getmask(zm) is nomask
assert_equal(zm, [1, 2, 3, 40, 50])
z = where(c, masked, 1)
assert_equal(z, [99, 99, 99, 1, 1])
z = where(c, 1, masked)
assert_equal(z, [99, 1, 1, 99, 99])
def test_where_type(self):
"Test the type conservation with where"
x = np.arange(4, dtype=np.int32)
y = np.arange(4, dtype=np.float32) * 2.2
test = where(x > 1.5, y, x).dtype
control = np.find_common_type([np.int32, np.float32], [])
assert_equal(test, control)
def test_choose(self):
"Test choose"
choices = [[0, 1, 2, 3], [10, 11, 12, 13],
[20, 21, 22, 23], [30, 31, 32, 33]]
chosen = choose([2, 3, 1, 0], choices)
assert_equal(chosen, array([20, 31, 12, 3]))
chosen = choose([2, 4, 1, 0], choices, mode='clip')
assert_equal(chosen, array([20, 31, 12, 3]))
chosen = choose([2, 4, 1, 0], choices, mode='wrap')
assert_equal(chosen, array([20, 1, 12, 3]))
# Check with some masked indices
indices_ = array([2, 4, 1, 0], mask=[1, 0, 0, 1])
chosen = choose(indices_, choices, mode='wrap')
assert_equal(chosen, array([99, 1, 12, 99]))
assert_equal(chosen.mask, [1, 0, 0, 1])
# Check with some masked choices
choices = array(choices, mask=[[0, 0, 0, 1], [1, 1, 0, 1],
[1, 0, 0, 0], [0, 0, 0, 0]])
indices_ = [2, 3, 1, 0]
chosen = choose(indices_, choices, mode='wrap')
assert_equal(chosen, array([20, 31, 12, 3]))
assert_equal(chosen.mask, [1, 0, 0, 1])
def test_choose_with_out(self):
"Test choose with an explicit out keyword"
choices = [[0, 1, 2, 3], [10, 11, 12, 13],
[20, 21, 22, 23], [30, 31, 32, 33]]
store = empty(4, dtype=int)
chosen = choose([2, 3, 1, 0], choices, out=store)
assert_equal(store, array([20, 31, 12, 3]))
self.assertTrue(store is chosen)
# Check with some masked indices + out
store = empty(4, dtype=int)
indices_ = array([2, 3, 1, 0], mask=[1, 0, 0, 1])
chosen = choose(indices_, choices, mode='wrap', out=store)
assert_equal(store, array([99, 31, 12, 99]))
assert_equal(store.mask, [1, 0, 0, 1])
# Check with some masked choices + out ina ndarray !
choices = array(choices, mask=[[0, 0, 0, 1], [1, 1, 0, 1],
[1, 0, 0, 0], [0, 0, 0, 0]])
indices_ = [2, 3, 1, 0]
store = empty(4, dtype=int).view(ndarray)
chosen = choose(indices_, choices, mode='wrap', out=store)
assert_equal(store, array([999999, 31, 12, 999999]))
def test_reshape(self):
a = arange(10)
a[0] = masked
# Try the default
b = a.reshape((5, 2))
assert_equal(b.shape, (5, 2))
self.assertTrue(b.flags['C'])
# Try w/ arguments as list instead of tuple
b = a.reshape(5, 2)
assert_equal(b.shape, (5, 2))
self.assertTrue(b.flags['C'])
# Try w/ order
b = a.reshape((5, 2), order='F')
assert_equal(b.shape, (5, 2))
self.assertTrue(b.flags['F'])
# Try w/ order
b = a.reshape(5, 2, order='F')
assert_equal(b.shape, (5, 2))
self.assertTrue(b.flags['F'])
#
c = np.reshape(a, (2, 5))
self.assertTrue(isinstance(c, MaskedArray))
assert_equal(c.shape, (2, 5))
self.assertTrue(c[0, 0] is masked)
self.assertTrue(c.flags['C'])
def test_make_mask_descr(self):
"Test make_mask_descr"
# Flexible
ntype = [('a', np.float), ('b', np.float)]
test = make_mask_descr(ntype)
assert_equal(test, [('a', np.bool), ('b', np.bool)])
# Standard w/ shape
ntype = (np.float, 2)
test = make_mask_descr(ntype)
assert_equal(test, (np.bool, 2))
# Standard standard
ntype = np.float
test = make_mask_descr(ntype)
assert_equal(test, np.dtype(np.bool))
# Nested
ntype = [('a', np.float), ('b', [('ba', np.float), ('bb', np.float)])]
test = make_mask_descr(ntype)
control = np.dtype([('a', 'b1'), ('b', [('ba', 'b1'), ('bb', 'b1')])])
assert_equal(test, control)
# Named+ shape
ntype = [('a', (np.float, 2))]
test = make_mask_descr(ntype)
assert_equal(test, np.dtype([('a', (np.bool, 2))]))
# 2 names
ntype = [(('A', 'a'), float)]
test = make_mask_descr(ntype)
assert_equal(test, np.dtype([(('A', 'a'), bool)]))
def test_make_mask(self):
"Test make_mask"
# w/ a list as an input
mask = [0, 1]
test = make_mask(mask)
assert_equal(test.dtype, MaskType)
assert_equal(test, [0, 1])
# w/ a ndarray as an input
mask = np.array([0, 1], dtype=np.bool)
test = make_mask(mask)
assert_equal(test.dtype, MaskType)
assert_equal(test, [0, 1])
# w/ a flexible-type ndarray as an input - use default
mdtype = [('a', np.bool), ('b', np.bool)]
mask = np.array([(0, 0), (0, 1)], dtype=mdtype)
test = make_mask(mask)
assert_equal(test.dtype, MaskType)
assert_equal(test, [1, 1])
# w/ a flexible-type ndarray as an input - use input dtype
mdtype = [('a', np.bool), ('b', np.bool)]
mask = np.array([(0, 0), (0, 1)], dtype=mdtype)
test = make_mask(mask, dtype=mask.dtype)
assert_equal(test.dtype, mdtype)
assert_equal(test, mask)
# w/ a flexible-type ndarray as an input - use input dtype
mdtype = [('a', np.float), ('b', np.float)]
bdtype = [('a', np.bool), ('b', np.bool)]
mask = np.array([(0, 0), (0, 1)], dtype=mdtype)
test = make_mask(mask, dtype=mask.dtype)
assert_equal(test.dtype, bdtype)
assert_equal(test, np.array([(0, 0), (0, 1)], dtype=bdtype))
def test_mask_or(self):
# Initialize
mtype = [('a', np.bool), ('b', np.bool)]
mask = np.array([(0, 0), (0, 1), (1, 0), (0, 0)], dtype=mtype)
# Test using nomask as input
test = mask_or(mask, nomask)
assert_equal(test, mask)
test = mask_or(nomask, mask)
assert_equal(test, mask)
# Using False as input
test = mask_or(mask, False)
assert_equal(test, mask)
# Using True as input. Won't work, but keep it for the kicks
# test = mask_or(mask, True)
# control = np.array([(1, 1), (1, 1), (1, 1), (1, 1)], dtype=mtype)
# assert_equal(test, control)
# Using another array w / the same dtype
other = np.array([(0, 1), (0, 1), (0, 1), (0, 1)], dtype=mtype)
test = mask_or(mask, other)
control = np.array([(0, 1), (0, 1), (1, 1), (0, 1)], dtype=mtype)
assert_equal(test, control)
# Using another array w / a different dtype
othertype = [('A', np.bool), ('B', np.bool)]
other = np.array([(0, 1), (0, 1), (0, 1), (0, 1)], dtype=othertype)
try:
test = mask_or(mask, other)
except ValueError:
pass
# Using nested arrays
dtype = [('a', np.bool), ('b', [('ba', np.bool), ('bb', np.bool)])]
amask = np.array([(0, (1, 0)), (0, (1, 0))], dtype=dtype)
bmask = np.array([(1, (0, 1)), (0, (0, 0))], dtype=dtype)
cntrl = np.array([(1, (1, 1)), (0, (1, 0))], dtype=dtype)
assert_equal(mask_or(amask, bmask), cntrl)
def test_flatten_mask(self):
"Tests flatten mask"
# Standarad dtype
mask = np.array([0, 0, 1], dtype=np.bool)
assert_equal(flatten_mask(mask), mask)
# Flexible dtype
mask = np.array([(0, 0), (0, 1)], dtype=[('a', bool), ('b', bool)])
test = flatten_mask(mask)
control = np.array([0, 0, 0, 1], dtype=bool)
assert_equal(test, control)
mdtype = [('a', bool), ('b', [('ba', bool), ('bb', bool)])]
data = [(0, (0, 0)), (0, (0, 1))]
mask = np.array(data, dtype=mdtype)
test = flatten_mask(mask)
control = np.array([ 0, 0, 0, 0, 0, 1], dtype=bool)
assert_equal(test, control)
def test_on_ndarray(self):
"Test functions on ndarrays"
a = np.array([1, 2, 3, 4])
m = array(a, mask=False)
test = anom(a)
assert_equal(test, m.anom())
test = reshape(a, (2, 2))
assert_equal(test, m.reshape(2, 2))
#------------------------------------------------------------------------------
class TestMaskedFields(TestCase):
#
def setUp(self):
ilist = [1, 2, 3, 4, 5]
flist = [1.1, 2.2, 3.3, 4.4, 5.5]
slist = ['one', 'two', 'three', 'four', 'five']
ddtype = [('a', int), ('b', float), ('c', '|S8')]
mdtype = [('a', bool), ('b', bool), ('c', bool)]
mask = [0, 1, 0, 0, 1]
base = array(zip(ilist, flist, slist), mask=mask, dtype=ddtype)
self.data = dict(base=base, mask=mask, ddtype=ddtype, mdtype=mdtype)
def test_set_records_masks(self):
base = self.data['base']
mdtype = self.data['mdtype']
# Set w/ nomask or masked
base.mask = nomask
assert_equal_records(base._mask, np.zeros(base.shape, dtype=mdtype))
base.mask = masked
assert_equal_records(base._mask, np.ones(base.shape, dtype=mdtype))
# Set w/ simple boolean
base.mask = False
assert_equal_records(base._mask, np.zeros(base.shape, dtype=mdtype))
base.mask = True
assert_equal_records(base._mask, np.ones(base.shape, dtype=mdtype))
# Set w/ list
base.mask = [0, 0, 0, 1, 1]
assert_equal_records(base._mask,
np.array([(x, x, x) for x in [0, 0, 0, 1, 1]],
dtype=mdtype))
def test_set_record_element(self):
"Check setting an element of a record)"
base = self.data['base']
(base_a, base_b, base_c) = (base['a'], base['b'], base['c'])
base[0] = (pi, pi, 'pi')
assert_equal(base_a.dtype, int)
assert_equal(base_a._data, [3, 2, 3, 4, 5])
assert_equal(base_b.dtype, float)
assert_equal(base_b._data, [pi, 2.2, 3.3, 4.4, 5.5])
assert_equal(base_c.dtype, '|S8')
assert_equal(base_c._data,
asbytes_nested(['pi', 'two', 'three', 'four', 'five']))
def test_set_record_slice(self):
base = self.data['base']
(base_a, base_b, base_c) = (base['a'], base['b'], base['c'])
base[:3] = (pi, pi, 'pi')
assert_equal(base_a.dtype, int)
assert_equal(base_a._data, [3, 3, 3, 4, 5])
assert_equal(base_b.dtype, float)
assert_equal(base_b._data, [pi, pi, pi, 4.4, 5.5])
assert_equal(base_c.dtype, '|S8')
assert_equal(base_c._data,
asbytes_nested(['pi', 'pi', 'pi', 'four', 'five']))
def test_mask_element(self):
"Check record access"
base = self.data['base']
(base_a, base_b, base_c) = (base['a'], base['b'], base['c'])
base[0] = masked
#
for n in ('a', 'b', 'c'):
assert_equal(base[n].mask, [1, 1, 0, 0, 1])
assert_equal(base[n]._data, base._data[n])
#
def test_getmaskarray(self):
"Test getmaskarray on flexible dtype"
ndtype = [('a', int), ('b', float)]
test = empty(3, dtype=ndtype)
assert_equal(getmaskarray(test),
np.array([(0, 0) , (0, 0), (0, 0)],
dtype=[('a', '|b1'), ('b', '|b1')]))
test[:] = masked
assert_equal(getmaskarray(test),
np.array([(1, 1) , (1, 1), (1, 1)],
dtype=[('a', '|b1'), ('b', '|b1')]))
#
def test_view(self):
"Test view w/ flexible dtype"
iterator = zip(np.arange(10), arand(10))
data = np.array(iterator)
a = array(iterator, dtype=[('a', float), ('b', float)])
a.mask[0] = (1, 0)
controlmask = np.array([1] + 19 * [0], dtype=bool)
# Transform globally to simple dtype
test = a.view(float)
assert_equal(test, data.ravel())
assert_equal(test.mask, controlmask)
# Transform globally to dty
test = a.view((float, 2))
assert_equal(test, data)
assert_equal(test.mask, controlmask.reshape(-1, 2))
#
test = a.view((float, 2), np.matrix)
assert_equal(test, data)
self.assertTrue(isinstance(test, np.matrix))
#
def test_getitem(self):
ndtype = [('a', float), ('b', float)]
a = array(zip(arand(10), np.arange(10)), dtype=ndtype)
a.mask = np.array(zip([0, 0, 0, 0, 0, 0, 0, 0, 1, 1],
[1, 0, 0, 0, 0, 0, 0, 0, 1, 0]),
dtype=[('a', bool), ('b', bool)])
# No mask
self.assertTrue(isinstance(a[1], np.void))
# One element masked
self.assertTrue(isinstance(a[0], MaskedArray))
assert_equal_records(a[0]._data, a._data[0])
assert_equal_records(a[0]._mask, a._mask[0])
# All element masked
self.assertTrue(isinstance(a[-2], MaskedArray))
assert_equal_records(a[-2]._data, a._data[-2])
assert_equal_records(a[-2]._mask, a._mask[-2])
#------------------------------------------------------------------------------
class TestMaskedView(TestCase):
#
def setUp(self):
iterator = zip(np.arange(10), arand(10))
data = np.array(iterator)
a = array(iterator, dtype=[('a', float), ('b', float)])
a.mask[0] = (1, 0)
controlmask = np.array([1] + 19 * [0], dtype=bool)
self.data = (data, a, controlmask)
#
def test_view_to_nothing(self):
(data, a, controlmask) = self.data
test = a.view()
self.assertTrue(isinstance(test, MaskedArray))
assert_equal(test._data, a._data)
assert_equal(test._mask, a._mask)
#
def test_view_to_type(self):
(data, a, controlmask) = self.data
test = a.view(np.ndarray)
self.assertTrue(not isinstance(test, MaskedArray))
assert_equal(test, a._data)
assert_equal_records(test, data.view(a.dtype).squeeze())
#
def test_view_to_simple_dtype(self):
(data, a, controlmask) = self.data
# View globally
test = a.view(float)
self.assertTrue(isinstance(test, MaskedArray))
assert_equal(test, data.ravel())
assert_equal(test.mask, controlmask)
#
def test_view_to_flexible_dtype(self):
(data, a, controlmask) = self.data
#
test = a.view([('A', float), ('B', float)])
assert_equal(test.mask.dtype.names, ('A', 'B'))
assert_equal(test['A'], a['a'])
assert_equal(test['B'], a['b'])
#
test = a[0].view([('A', float), ('B', float)])
self.assertTrue(isinstance(test, MaskedArray))
assert_equal(test.mask.dtype.names, ('A', 'B'))
assert_equal(test['A'], a['a'][0])
assert_equal(test['B'], a['b'][0])
#
test = a[-1].view([('A', float), ('B', float)])
self.assertTrue(not isinstance(test, MaskedArray))
assert_equal(test.dtype.names, ('A', 'B'))
assert_equal(test['A'], a['a'][-1])
assert_equal(test['B'], a['b'][-1])
#
def test_view_to_subdtype(self):
(data, a, controlmask) = self.data
# View globally
test = a.view((float, 2))
self.assertTrue(isinstance(test, MaskedArray))
assert_equal(test, data)
assert_equal(test.mask, controlmask.reshape(-1, 2))
# View on 1 masked element
test = a[0].view((float, 2))
self.assertTrue(isinstance(test, MaskedArray))
assert_equal(test, data[0])
assert_equal(test.mask, (1, 0))
# View on 1 unmasked element
test = a[-1].view((float, 2))
self.assertTrue(not isinstance(test, MaskedArray))
assert_equal(test, data[-1])
#
def test_view_to_dtype_and_type(self):
(data, a, controlmask) = self.data
#
test = a.view((float, 2), np.matrix)
assert_equal(test, data)
self.assertTrue(isinstance(test, np.matrix))
self.assertTrue(not isinstance(test, MaskedArray))
def test_masked_array():
a = np.ma.array([0, 1, 2, 3], mask=[0, 0, 1, 0])
assert_equal(np.argwhere(a), [[1], [3]])
###############################################################################
if __name__ == "__main__":
run_module_suite()
| gpl-3.0 |
mollstam/UnrealPy | UnrealPyEmbed/Development/Python/2015.08.07-Python2710-x64-Source-vs2015/Python27/Source/Twisted-15.2.1/twisted/conch/test/test_knownhosts.py | 8 | 45570 | # Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for L{twisted.conch.client.knownhosts}.
"""
import os
from binascii import Error as BinasciiError, b2a_base64, a2b_base64
from twisted.python.reflect import requireModule
if requireModule('Crypto') and requireModule('pyasn1'):
from twisted.conch.ssh.keys import Key, BadKeyError
from twisted.conch.client.knownhosts import \
PlainEntry, HashedEntry, KnownHostsFile, UnparsedEntry, ConsoleUI
from twisted.conch.client import default
else:
skip = "PyCrypto and PyASN1 required for twisted.conch.knownhosts."
from zope.interface.verify import verifyObject
from twisted.python.filepath import FilePath
from twisted.trial.unittest import TestCase
from twisted.internet.defer import Deferred
from twisted.conch.interfaces import IKnownHostEntry
from twisted.conch.error import HostKeyChanged, UserRejectedKey, InvalidEntry
from twisted.test.testutils import ComparisonTestsMixin
sampleEncodedKey = (
'AAAAB3NzaC1yc2EAAAABIwAAAQEAsV0VMRbGmzhqxxayLRHmvnFvtyNqgbNKV46dU1bVFB+3y'
'tNvue4Riqv/SVkPRNwMb7eWH29SviXaBxUhYyzKkDoNUq3rTNnH1Vnif6d6X4JCrUb5d3W+Dm'
'YClyJrZ5HgD/hUpdSkTRqdbQ2TrvSAxRacj+vHHT4F4dm1bJSewm3B2D8HVOoi/CbVh3dsIiC'
'dp8VltdZx4qYVfYe2LwVINCbAa3d3tj9ma7RVfw3OH2Mfb+toLd1N5tBQFb7oqTt2nC6I/6Bd'
'4JwPUld+IEitw/suElq/AIJVQXXujeyiZlea90HE65U2mF1ytr17HTAIT2ySokJWyuBANGACk'
'6iIaw==')
otherSampleEncodedKey = (
'AAAAB3NzaC1yc2EAAAABIwAAAIEAwaeCZd3UCuPXhX39+/p9qO028jTF76DMVd9mPvYVDVXuf'
'WckKZauF7+0b7qm+ChT7kan6BzRVo4++gCVNfAlMzLysSt3ylmOR48tFpAfygg9UCX3DjHz0E'
'lOOUKh3iifc9aUShD0OPaK3pR5JJ8jfiBfzSYWt/hDi/iZ4igsSs8=')
thirdSampleEncodedKey = (
'AAAAB3NzaC1yc2EAAAABIwAAAQEAl/TQakPkePlnwCBRPitIVUTg6Z8VzN1en+DGkyo/evkmLw'
'7o4NWR5qbysk9A9jXW332nxnEuAnbcCam9SHe1su1liVfyIK0+3bdn0YRB0sXIbNEtMs2LtCho'
'/aV3cXPS+Cf1yut3wvIpaRnAzXxuKPCTXQ7/y0IXa8TwkRBH58OJa3RqfQ/NsSp5SAfdsrHyH2'
'aitiVKm2jfbTKzSEqOQG/zq4J9GXTkq61gZugory/Tvl5/yPgSnOR6C9jVOMHf27ZPoRtyj9SY'
'343Hd2QHiIE0KPZJEgCynKeWoKz8v6eTSK8n4rBnaqWdp8MnGZK1WGy05MguXbyCDuTC8AmJXQ'
'==')
sampleKey = a2b_base64(sampleEncodedKey)
otherSampleKey = a2b_base64(otherSampleEncodedKey)
thirdSampleKey = a2b_base64(thirdSampleEncodedKey)
samplePlaintextLine = (
"www.twistedmatrix.com ssh-rsa " + sampleEncodedKey + "\n")
otherSamplePlaintextLine = (
"divmod.com ssh-rsa " + otherSampleEncodedKey + "\n")
sampleHostIPLine = (
"www.twistedmatrix.com,198.49.126.131 ssh-rsa " + sampleEncodedKey + "\n")
sampleHashedLine = (
"|1|gJbSEPBG9ZSBoZpHNtZBD1bHKBA=|bQv+0Xa0dByrwkA1EB0E7Xop/Fo= ssh-rsa " +
sampleEncodedKey + "\n")
class EntryTestsMixin:
"""
Tests for implementations of L{IKnownHostEntry}. Subclasses must set the
'entry' attribute to a provider of that interface, the implementation of
that interface under test.
@ivar entry: a provider of L{IKnownHostEntry} with a hostname of
www.twistedmatrix.com and an RSA key of sampleKey.
"""
def test_providesInterface(self):
"""
The given entry should provide IKnownHostEntry.
"""
verifyObject(IKnownHostEntry, self.entry)
def test_fromString(self):
"""
Constructing a plain text entry from an unhashed known_hosts entry will
result in an L{IKnownHostEntry} provider with 'keyString', 'hostname',
and 'keyType' attributes. While outside the interface in question,
these attributes are held in common by L{PlainEntry} and L{HashedEntry}
implementations; other implementations should override this method in
subclasses.
"""
entry = self.entry
self.assertEqual(entry.publicKey, Key.fromString(sampleKey))
self.assertEqual(entry.keyType, "ssh-rsa")
def test_matchesKey(self):
"""
L{IKnownHostEntry.matchesKey} checks to see if an entry matches a given
SSH key.
"""
twistedmatrixDotCom = Key.fromString(sampleKey)
divmodDotCom = Key.fromString(otherSampleKey)
self.assertEqual(
True,
self.entry.matchesKey(twistedmatrixDotCom))
self.assertEqual(
False,
self.entry.matchesKey(divmodDotCom))
def test_matchesHost(self):
"""
L{IKnownHostEntry.matchesHost} checks to see if an entry matches a
given hostname.
"""
self.assertEqual(True, self.entry.matchesHost(
"www.twistedmatrix.com"))
self.assertEqual(False, self.entry.matchesHost(
"www.divmod.com"))
class PlainEntryTests(EntryTestsMixin, TestCase):
"""
Test cases for L{PlainEntry}.
"""
plaintextLine = samplePlaintextLine
hostIPLine = sampleHostIPLine
def setUp(self):
"""
Set 'entry' to a sample plain-text entry with sampleKey as its key.
"""
self.entry = PlainEntry.fromString(self.plaintextLine)
def test_matchesHostIP(self):
"""
A "hostname,ip" formatted line will match both the host and the IP.
"""
self.entry = PlainEntry.fromString(self.hostIPLine)
self.assertEqual(True, self.entry.matchesHost("198.49.126.131"))
self.test_matchesHost()
def test_toString(self):
"""
L{PlainEntry.toString} generates the serialized OpenSSL format string
for the entry, sans newline.
"""
self.assertEqual(self.entry.toString(), self.plaintextLine.rstrip("\n"))
multiHostEntry = PlainEntry.fromString(self.hostIPLine)
self.assertEqual(multiHostEntry.toString(),
self.hostIPLine.rstrip("\n"))
class PlainTextWithCommentTests(PlainEntryTests):
"""
Test cases for L{PlainEntry} when parsed from a line with a comment.
"""
plaintextLine = samplePlaintextLine[:-1] + " plain text comment.\n"
hostIPLine = sampleHostIPLine[:-1] + " text following host/IP line\n"
class HashedEntryTests(EntryTestsMixin, ComparisonTestsMixin, TestCase):
"""
Tests for L{HashedEntry}.
This suite doesn't include any tests for host/IP pairs because hashed
entries store IP addresses the same way as hostnames and does not support
comma-separated lists. (If you hash the IP and host together you can't
tell if you've got the key already for one or the other.)
"""
hashedLine = sampleHashedLine
def setUp(self):
"""
Set 'entry' to a sample hashed entry for twistedmatrix.com with
sampleKey as its key.
"""
self.entry = HashedEntry.fromString(self.hashedLine)
def test_toString(self):
"""
L{HashedEntry.toString} generates the serialized OpenSSL format string
for the entry, sans the newline.
"""
self.assertEqual(self.entry.toString(), self.hashedLine.rstrip("\n"))
def test_equality(self):
"""
Two L{HashedEntry} instances compare equal if and only if they represent
the same host and key in exactly the same way: the host salt, host hash,
public key type, public key, and comment fields must all be equal.
"""
hostSalt = "gJbSEPBG9ZSBoZpHNtZBD1bHKBA"
hostHash = "bQv+0Xa0dByrwkA1EB0E7Xop/Fo"
publicKey = Key.fromString(sampleKey)
comment = "hello, world"
entry = HashedEntry(
hostSalt, hostHash, publicKey.type(), publicKey, comment)
duplicate = HashedEntry(
hostSalt, hostHash, publicKey.type(), publicKey, comment)
# Vary the host salt
self.assertNormalEqualityImplementation(
entry, duplicate,
HashedEntry(
hostSalt[::-1], hostHash, publicKey.type(), publicKey,
comment))
# Vary the host hash
self.assertNormalEqualityImplementation(
entry, duplicate,
HashedEntry(
hostSalt, hostHash[::-1], publicKey.type(), publicKey,
comment))
# Vary the key type
self.assertNormalEqualityImplementation(
entry, duplicate,
HashedEntry(
hostSalt, hostHash, publicKey.type()[::-1], publicKey,
comment))
# Vary the key
self.assertNormalEqualityImplementation(
entry, duplicate,
HashedEntry(
hostSalt, hostHash, publicKey.type(),
Key.fromString(otherSampleKey), comment))
# Vary the comment
self.assertNormalEqualityImplementation(
entry, duplicate,
HashedEntry(
hostSalt, hostHash, publicKey.type(), publicKey,
comment[::-1]))
class HashedEntryWithCommentTests(HashedEntryTests):
"""
Test cases for L{PlainEntry} when parsed from a line with a comment.
"""
hashedLine = sampleHashedLine[:-1] + " plain text comment.\n"
class UnparsedEntryTests(TestCase, EntryTestsMixin):
"""
Tests for L{UnparsedEntry}
"""
def setUp(self):
"""
Set up the 'entry' to be an unparsed entry for some random text.
"""
self.entry = UnparsedEntry(" This is a bogus entry. \n")
def test_fromString(self):
"""
Creating an L{UnparsedEntry} should simply record the string it was
passed.
"""
self.assertEqual(" This is a bogus entry. \n",
self.entry._string)
def test_matchesHost(self):
"""
An unparsed entry can't match any hosts.
"""
self.assertEqual(False, self.entry.matchesHost("www.twistedmatrix.com"))
def test_matchesKey(self):
"""
An unparsed entry can't match any keys.
"""
self.assertEqual(False, self.entry.matchesKey(Key.fromString(sampleKey)))
def test_toString(self):
"""
L{UnparsedEntry.toString} returns its input string, sans trailing
newline.
"""
self.assertEqual(" This is a bogus entry. ", self.entry.toString())
class ParseErrorTests(TestCase):
"""
L{HashedEntry.fromString} and L{PlainEntry.fromString} can raise a variety
of errors depending on misformattings of certain strings. These tests make
sure those errors are caught. Since many of the ways that this can go
wrong are in the lower-level APIs being invoked by the parsing logic,
several of these are integration tests with the C{base64} and
L{twisted.conch.ssh.keys} modules.
"""
def invalidEntryTest(self, cls):
"""
If there are fewer than three elements, C{fromString} should raise
L{InvalidEntry}.
"""
self.assertRaises(InvalidEntry, cls.fromString, "invalid")
def notBase64Test(self, cls):
"""
If the key is not base64, C{fromString} should raise L{BinasciiError}.
"""
self.assertRaises(BinasciiError, cls.fromString, "x x x")
def badKeyTest(self, cls, prefix):
"""
If the key portion of the entry is valid base64, but is not actually an
SSH key, C{fromString} should raise L{BadKeyError}.
"""
self.assertRaises(BadKeyError, cls.fromString, ' '.join(
[prefix, "ssh-rsa", b2a_base64(
"Hey, this isn't an SSH key!").strip()]))
def test_invalidPlainEntry(self):
"""
If there are fewer than three whitespace-separated elements in an
entry, L{PlainEntry.fromString} should raise L{InvalidEntry}.
"""
self.invalidEntryTest(PlainEntry)
def test_invalidHashedEntry(self):
"""
If there are fewer than three whitespace-separated elements in an
entry, or the hostname salt/hash portion has more than two elements,
L{HashedEntry.fromString} should raise L{InvalidEntry}.
"""
self.invalidEntryTest(HashedEntry)
a, b, c = sampleHashedLine.split()
self.assertRaises(InvalidEntry, HashedEntry.fromString, ' '.join(
[a + "||", b, c]))
def test_plainNotBase64(self):
"""
If the key portion of a plain entry is not decodable as base64,
C{fromString} should raise L{BinasciiError}.
"""
self.notBase64Test(PlainEntry)
def test_hashedNotBase64(self):
"""
If the key, host salt, or host hash portion of a hashed entry is not
encoded, it will raise L{BinasciiError}.
"""
self.notBase64Test(HashedEntry)
a, b, c = sampleHashedLine.split()
# Salt not valid base64.
self.assertRaises(
BinasciiError, HashedEntry.fromString,
' '.join(["|1|x|" + b2a_base64("stuff").strip(), b, c]))
# Host hash not valid base64.
self.assertRaises(
BinasciiError, HashedEntry.fromString,
' '.join([HashedEntry.MAGIC + b2a_base64("stuff").strip() + "|x",
b, c]))
# Neither salt nor hash valid base64.
self.assertRaises(
BinasciiError, HashedEntry.fromString,
' '.join(["|1|x|x", b, c]))
def test_hashedBadKey(self):
"""
If the key portion of the entry is valid base64, but is not actually an
SSH key, C{HashedEntry.fromString} should raise L{BadKeyError}.
"""
a, b, c = sampleHashedLine.split()
self.badKeyTest(HashedEntry, a)
def test_plainBadKey(self):
"""
If the key portion of the entry is valid base64, but is not actually an
SSH key, C{PlainEntry.fromString} should raise L{BadKeyError}.
"""
self.badKeyTest(PlainEntry, "hostname")
class KnownHostsDatabaseTests(TestCase):
"""
Tests for L{KnownHostsFile}.
"""
def pathWithContent(self, content):
"""
Return a FilePath with the given initial content.
"""
fp = FilePath(self.mktemp())
fp.setContent(content)
return fp
def loadSampleHostsFile(self, content=(
sampleHashedLine + otherSamplePlaintextLine +
"\n# That was a blank line.\n"
"This is just unparseable.\n"
"|1|This also unparseable.\n")):
"""
Return a sample hosts file, with keys for www.twistedmatrix.com and
divmod.com present.
"""
return KnownHostsFile.fromPath(self.pathWithContent(content))
def test_readOnlySavePath(self):
"""
L{KnownHostsFile.savePath} is read-only; if an assignment is made to
it, L{AttributeError} is raised and the value is unchanged.
"""
path = FilePath(self.mktemp())
new = FilePath(self.mktemp())
hostsFile = KnownHostsFile(path)
self.assertRaises(AttributeError, setattr, hostsFile, "savePath", new)
self.assertEqual(path, hostsFile.savePath)
def test_defaultInitializerIgnoresExisting(self):
"""
The default initializer for L{KnownHostsFile} disregards any existing
contents in the save path.
"""
hostsFile = KnownHostsFile(self.pathWithContent(sampleHashedLine))
self.assertEqual([], list(hostsFile.iterentries()))
def test_defaultInitializerClobbersExisting(self):
"""
After using the default initializer for L{KnownHostsFile}, the first use
of L{KnownHostsFile.save} overwrites any existing contents in the save
path.
"""
path = self.pathWithContent(sampleHashedLine)
hostsFile = KnownHostsFile(path)
entry = hostsFile.addHostKey(
"www.example.com", Key.fromString(otherSampleKey))
hostsFile.save()
# Check KnownHostsFile to see what it thinks the state is
self.assertEqual([entry], list(hostsFile.iterentries()))
# And also directly check the underlying file itself
self.assertEqual(entry.toString() + "\n", path.getContent())
def test_saveResetsClobberState(self):
"""
After L{KnownHostsFile.save} is used once with an instance initialized
by the default initializer, contents of the save path are respected and
preserved.
"""
hostsFile = KnownHostsFile(self.pathWithContent(sampleHashedLine))
preSave = hostsFile.addHostKey(
"www.example.com", Key.fromString(otherSampleKey))
hostsFile.save()
postSave = hostsFile.addHostKey(
"another.example.com", Key.fromString(thirdSampleKey))
hostsFile.save()
self.assertEqual([preSave, postSave], list(hostsFile.iterentries()))
def test_loadFromPath(self):
"""
Loading a L{KnownHostsFile} from a path with six entries in it will
result in a L{KnownHostsFile} object with six L{IKnownHostEntry}
providers in it.
"""
hostsFile = self.loadSampleHostsFile()
self.assertEqual(6, len(list(hostsFile.iterentries())))
def test_iterentriesUnsaved(self):
"""
If the save path for a L{KnownHostsFile} does not exist,
L{KnownHostsFile.iterentries} still returns added but unsaved entries.
"""
hostsFile = KnownHostsFile(FilePath(self.mktemp()))
hostsFile.addHostKey("www.example.com", Key.fromString(sampleKey))
self.assertEqual(1, len(list(hostsFile.iterentries())))
def test_verifyHashedEntry(self):
"""
Loading a L{KnownHostsFile} from a path containing a single valid
L{HashedEntry} entry will result in a L{KnownHostsFile} object
with one L{IKnownHostEntry} provider.
"""
hostsFile = self.loadSampleHostsFile((sampleHashedLine))
entries = list(hostsFile.iterentries())
self.assertIsInstance(entries[0], HashedEntry)
self.assertEqual(True, entries[0].matchesHost("www.twistedmatrix.com"))
self.assertEqual(1, len(entries))
def test_verifyPlainEntry(self):
"""
Loading a L{KnownHostsFile} from a path containing a single valid
L{PlainEntry} entry will result in a L{KnownHostsFile} object
with one L{IKnownHostEntry} provider.
"""
hostsFile = self.loadSampleHostsFile((otherSamplePlaintextLine))
entries = list(hostsFile.iterentries())
self.assertIsInstance(entries[0], PlainEntry)
self.assertEqual(True, entries[0].matchesHost("divmod.com"))
self.assertEqual(1, len(entries))
def test_verifyUnparsedEntry(self):
"""
Loading a L{KnownHostsFile} from a path that only contains '\n' will
result in a L{KnownHostsFile} object containing a L{UnparsedEntry}
object.
"""
hostsFile = self.loadSampleHostsFile(("\n"))
entries = list(hostsFile.iterentries())
self.assertIsInstance(entries[0], UnparsedEntry)
self.assertEqual(entries[0].toString(), "")
self.assertEqual(1, len(entries))
def test_verifyUnparsedComment(self):
"""
Loading a L{KnownHostsFile} from a path that contains a comment will
result in a L{KnownHostsFile} object containing a L{UnparsedEntry}
object.
"""
hostsFile = self.loadSampleHostsFile(("# That was a blank line.\n"))
entries = list(hostsFile.iterentries())
self.assertIsInstance(entries[0], UnparsedEntry)
self.assertEqual(entries[0].toString(), "# That was a blank line.")
def test_verifyUnparsableLine(self):
"""
Loading a L{KnownHostsFile} from a path that contains an unparseable
line will be represented as an L{UnparsedEntry} instance.
"""
hostsFile = self.loadSampleHostsFile(("This is just unparseable.\n"))
entries = list(hostsFile.iterentries())
self.assertIsInstance(entries[0], UnparsedEntry)
self.assertEqual(entries[0].toString(), "This is just unparseable.")
self.assertEqual(1, len(entries))
def test_verifyUnparsableEncryptionMarker(self):
"""
Loading a L{KnownHostsFile} from a path containing an unparseable line
that starts with an encryption marker will be represented as an
L{UnparsedEntry} instance.
"""
hostsFile = self.loadSampleHostsFile(("|1|This is unparseable.\n"))
entries = list(hostsFile.iterentries())
self.assertIsInstance(entries[0], UnparsedEntry)
self.assertEqual(entries[0].toString(), "|1|This is unparseable.")
self.assertEqual(1, len(entries))
def test_loadNonExistent(self):
"""
Loading a L{KnownHostsFile} from a path that does not exist should
result in an empty L{KnownHostsFile} that will save back to that path.
"""
pn = self.mktemp()
knownHostsFile = KnownHostsFile.fromPath(FilePath(pn))
entries = list(knownHostsFile.iterentries())
self.assertEqual([], entries)
self.assertEqual(False, FilePath(pn).exists())
knownHostsFile.save()
self.assertEqual(True, FilePath(pn).exists())
def test_loadNonExistentParent(self):
"""
Loading a L{KnownHostsFile} from a path whose parent directory does not
exist should result in an empty L{KnownHostsFile} that will save back
to that path, creating its parent directory(ies) in the process.
"""
thePath = FilePath(self.mktemp())
knownHostsPath = thePath.child("foo").child("known_hosts")
knownHostsFile = KnownHostsFile.fromPath(knownHostsPath)
knownHostsFile.save()
knownHostsPath.restat(False)
self.assertEqual(True, knownHostsPath.exists())
def test_savingAddsEntry(self):
"""
L{KnownHostsFile.save} will write out a new file with any entries
that have been added.
"""
path = self.pathWithContent(sampleHashedLine +
otherSamplePlaintextLine)
knownHostsFile = KnownHostsFile.fromPath(path)
newEntry = knownHostsFile.addHostKey("some.example.com",
Key.fromString(thirdSampleKey))
expectedContent = (
sampleHashedLine +
otherSamplePlaintextLine + HashedEntry.MAGIC +
b2a_base64(newEntry._hostSalt).strip() + "|" +
b2a_base64(newEntry._hostHash).strip() + " ssh-rsa " +
thirdSampleEncodedKey + "\n")
# Sanity check, let's make sure the base64 API being used for the test
# isn't inserting spurious newlines.
self.assertEqual(3, expectedContent.count("\n"))
knownHostsFile.save()
self.assertEqual(expectedContent, path.getContent())
def test_savingAvoidsDuplication(self):
"""
L{KnownHostsFile.save} only writes new entries to the save path, not
entries which were added and already written by a previous call to
C{save}.
"""
path = FilePath(self.mktemp())
knownHosts = KnownHostsFile(path)
entry = knownHosts.addHostKey(
"some.example.com", Key.fromString(sampleKey))
knownHosts.save()
knownHosts.save()
knownHosts = KnownHostsFile.fromPath(path)
self.assertEqual([entry], list(knownHosts.iterentries()))
def test_savingsPreservesExisting(self):
"""
L{KnownHostsFile.save} will not overwrite existing entries in its save
path, even if they were only added after the L{KnownHostsFile} instance
was initialized.
"""
# Start off with one host/key pair in the file
path = self.pathWithContent(sampleHashedLine)
knownHosts = KnownHostsFile.fromPath(path)
# After initializing the KnownHostsFile instance, add a second host/key
# pair to the file directly - without the instance's help or knowledge.
with path.open("a") as hostsFileObj:
hostsFileObj.write(otherSamplePlaintextLine)
# Add a third host/key pair using the KnownHostsFile instance
key = Key.fromString(thirdSampleKey)
knownHosts.addHostKey("brandnew.example.com", key)
knownHosts.save()
# Check that all three host/key pairs are present.
knownHosts = KnownHostsFile.fromPath(path)
self.assertEqual([True, True, True], [
knownHosts.hasHostKey(
"www.twistedmatrix.com", Key.fromString(sampleKey)),
knownHosts.hasHostKey(
"divmod.com", Key.fromString(otherSampleKey)),
knownHosts.hasHostKey("brandnew.example.com", key)])
def test_hasPresentKey(self):
"""
L{KnownHostsFile.hasHostKey} returns C{True} when a key for the given
hostname is present and matches the expected key.
"""
hostsFile = self.loadSampleHostsFile()
self.assertEqual(True, hostsFile.hasHostKey(
"www.twistedmatrix.com", Key.fromString(sampleKey)))
def test_hasNonPresentKey(self):
"""
L{KnownHostsFile.hasHostKey} returns C{False} when a key for the given
hostname is not present.
"""
hostsFile = self.loadSampleHostsFile()
self.assertEqual(False, hostsFile.hasHostKey(
"non-existent.example.com", Key.fromString(sampleKey)))
def test_hasLaterAddedKey(self):
"""
L{KnownHostsFile.hasHostKey} returns C{True} when a key for the given
hostname is present in the file, even if it is only added to the file
after the L{KnownHostsFile} instance is initialized.
"""
key = Key.fromString(sampleKey)
entry = PlainEntry(["brandnew.example.com"], key.sshType(), key, "")
hostsFile = self.loadSampleHostsFile()
with hostsFile.savePath.open("a") as hostsFileObj:
hostsFileObj.write(entry.toString() + "\n")
self.assertEqual(
True, hostsFile.hasHostKey("brandnew.example.com", key))
def test_savedEntryHasKeyMismatch(self):
"""
L{KnownHostsFile.hasHostKey} raises L{HostKeyChanged} if the host key is
present in the underlying file, but different from the expected one.
The resulting exception should have an C{offendingEntry} indicating the
given entry.
"""
hostsFile = self.loadSampleHostsFile()
entries = list(hostsFile.iterentries())
exception = self.assertRaises(
HostKeyChanged, hostsFile.hasHostKey,
"www.twistedmatrix.com", Key.fromString(otherSampleKey))
self.assertEqual(exception.offendingEntry, entries[0])
self.assertEqual(exception.lineno, 1)
self.assertEqual(exception.path, hostsFile.savePath)
def test_savedEntryAfterAddHasKeyMismatch(self):
"""
Even after a new entry has been added in memory but not yet saved, the
L{HostKeyChanged} exception raised by L{KnownHostsFile.hasHostKey} has a
C{lineno} attribute which indicates the 1-based line number of the
offending entry in the underlying file when the given host key does not
match the expected host key.
"""
hostsFile = self.loadSampleHostsFile()
hostsFile.addHostKey(
"www.example.com", Key.fromString(otherSampleKey))
exception = self.assertRaises(
HostKeyChanged, hostsFile.hasHostKey,
"www.twistedmatrix.com", Key.fromString(otherSampleKey))
self.assertEqual(exception.lineno, 1)
self.assertEqual(exception.path, hostsFile.savePath)
def test_unsavedEntryHasKeyMismatch(self):
"""
L{KnownHostsFile.hasHostKey} raises L{HostKeyChanged} if the host key is
present in memory (but not yet saved), but different from the expected
one. The resulting exception has a C{offendingEntry} indicating the
given entry, but no filename or line number information (reflecting the
fact that the entry exists only in memory).
"""
hostsFile = KnownHostsFile(FilePath(self.mktemp()))
entry = hostsFile.addHostKey(
"www.example.com", Key.fromString(otherSampleKey))
exception = self.assertRaises(
HostKeyChanged, hostsFile.hasHostKey,
"www.example.com", Key.fromString(thirdSampleKey))
self.assertEqual(exception.offendingEntry, entry)
self.assertEqual(exception.lineno, None)
self.assertEqual(exception.path, None)
def test_addHostKey(self):
"""
L{KnownHostsFile.addHostKey} adds a new L{HashedEntry} to the host
file, and returns it.
"""
hostsFile = self.loadSampleHostsFile()
aKey = Key.fromString(thirdSampleKey)
self.assertEqual(False,
hostsFile.hasHostKey("somewhere.example.com", aKey))
newEntry = hostsFile.addHostKey("somewhere.example.com", aKey)
# The code in OpenSSH requires host salts to be 20 characters long.
# This is the required length of a SHA-1 HMAC hash, so it's just a
# sanity check.
self.assertEqual(20, len(newEntry._hostSalt))
self.assertEqual(True,
newEntry.matchesHost("somewhere.example.com"))
self.assertEqual(newEntry.keyType, "ssh-rsa")
self.assertEqual(aKey, newEntry.publicKey)
self.assertEqual(True,
hostsFile.hasHostKey("somewhere.example.com", aKey))
def test_randomSalts(self):
"""
L{KnownHostsFile.addHostKey} generates a random salt for each new key,
so subsequent salts will be different.
"""
hostsFile = self.loadSampleHostsFile()
aKey = Key.fromString(thirdSampleKey)
self.assertNotEqual(
hostsFile.addHostKey("somewhere.example.com", aKey)._hostSalt,
hostsFile.addHostKey("somewhere-else.example.com", aKey)._hostSalt)
def test_verifyValidKey(self):
"""
Verifying a valid key should return a L{Deferred} which fires with
True.
"""
hostsFile = self.loadSampleHostsFile()
hostsFile.addHostKey("1.2.3.4", Key.fromString(sampleKey))
ui = FakeUI()
d = hostsFile.verifyHostKey(ui, "www.twistedmatrix.com", "1.2.3.4",
Key.fromString(sampleKey))
l = []
d.addCallback(l.append)
self.assertEqual(l, [True])
def test_verifyInvalidKey(self):
"""
Verifying an invalid key should return a L{Deferred} which fires with a
L{HostKeyChanged} failure.
"""
hostsFile = self.loadSampleHostsFile()
wrongKey = Key.fromString(thirdSampleKey)
ui = FakeUI()
hostsFile.addHostKey("1.2.3.4", Key.fromString(sampleKey))
d = hostsFile.verifyHostKey(
ui, "www.twistedmatrix.com", "1.2.3.4", wrongKey)
return self.assertFailure(d, HostKeyChanged)
def verifyNonPresentKey(self):
"""
Set up a test to verify a key that isn't present. Return a 3-tuple of
the UI, a list set up to collect the result of the verifyHostKey call,
and the sample L{KnownHostsFile} being used.
This utility method avoids returning a L{Deferred}, and records results
in the returned list instead, because the events which get generated
here are pre-recorded in the 'ui' object. If the L{Deferred} in
question does not fire, the it will fail quickly with an empty list.
"""
hostsFile = self.loadSampleHostsFile()
absentKey = Key.fromString(thirdSampleKey)
ui = FakeUI()
l = []
d = hostsFile.verifyHostKey(
ui, "sample-host.example.com", "4.3.2.1", absentKey)
d.addBoth(l.append)
self.assertEqual([], l)
self.assertEqual(
ui.promptText,
"The authenticity of host 'sample-host.example.com (4.3.2.1)' "
"can't be established.\n"
"RSA key fingerprint is "
"89:4e:cc:8c:57:83:96:48:ef:63:ad:ee:99:00:4c:8f.\n"
"Are you sure you want to continue connecting (yes/no)? ")
return ui, l, hostsFile
def test_verifyNonPresentKey_Yes(self):
"""
Verifying a key where neither the hostname nor the IP are present
should result in the UI being prompted with a message explaining as
much. If the UI says yes, the Deferred should fire with True.
"""
ui, l, knownHostsFile = self.verifyNonPresentKey()
ui.promptDeferred.callback(True)
self.assertEqual([True], l)
reloaded = KnownHostsFile.fromPath(knownHostsFile.savePath)
self.assertEqual(
True,
reloaded.hasHostKey("4.3.2.1", Key.fromString(thirdSampleKey)))
self.assertEqual(
True,
reloaded.hasHostKey("sample-host.example.com",
Key.fromString(thirdSampleKey)))
def test_verifyNonPresentKey_No(self):
"""
Verifying a key where neither the hostname nor the IP are present
should result in the UI being prompted with a message explaining as
much. If the UI says no, the Deferred should fail with
UserRejectedKey.
"""
ui, l, knownHostsFile = self.verifyNonPresentKey()
ui.promptDeferred.callback(False)
l[0].trap(UserRejectedKey)
def test_verifyHostIPMismatch(self):
"""
Verifying a key where the host is present (and correct), but the IP is
present and different, should result the deferred firing in a
HostKeyChanged failure.
"""
hostsFile = self.loadSampleHostsFile()
wrongKey = Key.fromString(thirdSampleKey)
ui = FakeUI()
d = hostsFile.verifyHostKey(
ui, "www.twistedmatrix.com", "4.3.2.1", wrongKey)
return self.assertFailure(d, HostKeyChanged)
def test_verifyKeyForHostAndIP(self):
"""
Verifying a key where the hostname is present but the IP is not should
result in the key being added for the IP and the user being warned
about the change.
"""
ui = FakeUI()
hostsFile = self.loadSampleHostsFile()
expectedKey = Key.fromString(sampleKey)
hostsFile.verifyHostKey(
ui, "www.twistedmatrix.com", "5.4.3.2", expectedKey)
self.assertEqual(
True, KnownHostsFile.fromPath(hostsFile.savePath).hasHostKey(
"5.4.3.2", expectedKey))
self.assertEqual(
["Warning: Permanently added the RSA host key for IP address "
"'5.4.3.2' to the list of known hosts."],
ui.userWarnings)
class FakeFile(object):
"""
A fake file-like object that acts enough like a file for
L{ConsoleUI.prompt}.
"""
def __init__(self):
self.inlines = []
self.outchunks = []
self.closed = False
def readline(self):
"""
Return a line from the 'inlines' list.
"""
return self.inlines.pop(0)
def write(self, chunk):
"""
Append the given item to the 'outchunks' list.
"""
if self.closed:
raise IOError("the file was closed")
self.outchunks.append(chunk)
def close(self):
"""
Set the 'closed' flag to True, explicitly marking that it has been
closed.
"""
self.closed = True
class ConsoleUITests(TestCase):
"""
Test cases for L{ConsoleUI}.
"""
def setUp(self):
"""
Create a L{ConsoleUI} pointed at a L{FakeFile}.
"""
self.fakeFile = FakeFile()
self.ui = ConsoleUI(self.openFile)
def openFile(self):
"""
Return the current fake file.
"""
return self.fakeFile
def newFile(self, lines):
"""
Create a new fake file (the next file that self.ui will open) with the
given list of lines to be returned from readline().
"""
self.fakeFile = FakeFile()
self.fakeFile.inlines = lines
def test_promptYes(self):
"""
L{ConsoleUI.prompt} writes a message to the console, then reads a line.
If that line is 'yes', then it returns a L{Deferred} that fires with
True.
"""
for okYes in ['yes', 'Yes', 'yes\n']:
self.newFile([okYes])
l = []
self.ui.prompt("Hello, world!").addCallback(l.append)
self.assertEqual(["Hello, world!"], self.fakeFile.outchunks)
self.assertEqual([True], l)
self.assertEqual(True, self.fakeFile.closed)
def test_promptNo(self):
"""
L{ConsoleUI.prompt} writes a message to the console, then reads a line.
If that line is 'no', then it returns a L{Deferred} that fires with
False.
"""
for okNo in ['no', 'No', 'no\n']:
self.newFile([okNo])
l = []
self.ui.prompt("Goodbye, world!").addCallback(l.append)
self.assertEqual(["Goodbye, world!"], self.fakeFile.outchunks)
self.assertEqual([False], l)
self.assertEqual(True, self.fakeFile.closed)
def test_promptRepeatedly(self):
"""
L{ConsoleUI.prompt} writes a message to the console, then reads a line.
If that line is neither 'yes' nor 'no', then it says "Please enter
'yes' or 'no'" until it gets a 'yes' or a 'no', at which point it
returns a Deferred that answers either True or False.
"""
self.newFile(['what', 'uh', 'okay', 'yes'])
l = []
self.ui.prompt("Please say something useful.").addCallback(l.append)
self.assertEqual([True], l)
self.assertEqual(self.fakeFile.outchunks,
["Please say something useful."] +
["Please type 'yes' or 'no': "] * 3)
self.assertEqual(True, self.fakeFile.closed)
self.newFile(['blah', 'stuff', 'feh', 'no'])
l = []
self.ui.prompt("Please say something negative.").addCallback(l.append)
self.assertEqual([False], l)
self.assertEqual(self.fakeFile.outchunks,
["Please say something negative."] +
["Please type 'yes' or 'no': "] * 3)
self.assertEqual(True, self.fakeFile.closed)
def test_promptOpenFailed(self):
"""
If the C{opener} passed to L{ConsoleUI} raises an exception, that
exception will fail the L{Deferred} returned from L{ConsoleUI.prompt}.
"""
def raiseIt():
raise IOError()
ui = ConsoleUI(raiseIt)
d = ui.prompt("This is a test.")
return self.assertFailure(d, IOError)
def test_warn(self):
"""
L{ConsoleUI.warn} should output a message to the console object.
"""
self.ui.warn("Test message.")
self.assertEqual(["Test message."], self.fakeFile.outchunks)
self.assertEqual(True, self.fakeFile.closed)
def test_warnOpenFailed(self):
"""
L{ConsoleUI.warn} should log a traceback if the output can't be opened.
"""
def raiseIt():
1 / 0
ui = ConsoleUI(raiseIt)
ui.warn("This message never makes it.")
self.assertEqual(len(self.flushLoggedErrors(ZeroDivisionError)), 1)
class FakeUI(object):
"""
A fake UI object, adhering to the interface expected by
L{KnownHostsFile.verifyHostKey}
@ivar userWarnings: inputs provided to 'warn'.
@ivar promptDeferred: last result returned from 'prompt'.
@ivar promptText: the last input provided to 'prompt'.
"""
def __init__(self):
self.userWarnings = []
self.promptDeferred = None
self.promptText = None
def prompt(self, text):
"""
Issue the user an interactive prompt, which they can accept or deny.
"""
self.promptText = text
self.promptDeferred = Deferred()
return self.promptDeferred
def warn(self, text):
"""
Issue a non-interactive warning to the user.
"""
self.userWarnings.append(text)
class FakeObject(object):
"""
A fake object that can have some attributes. Used to fake
L{SSHClientTransport} and L{SSHClientFactory}.
"""
class DefaultAPITests(TestCase):
"""
The API in L{twisted.conch.client.default.verifyHostKey} is the integration
point between the code in the rest of conch and L{KnownHostsFile}.
"""
def patchedOpen(self, fname, mode):
"""
The patched version of 'open'; this returns a L{FakeFile} that the
instantiated L{ConsoleUI} can use.
"""
self.assertEqual(fname, "/dev/tty")
self.assertEqual(mode, "r+b")
return self.fakeFile
def setUp(self):
"""
Patch 'open' in verifyHostKey.
"""
self.fakeFile = FakeFile()
self.patch(default, "_open", self.patchedOpen)
self.hostsOption = self.mktemp()
knownHostsFile = KnownHostsFile(FilePath(self.hostsOption))
knownHostsFile.addHostKey("exists.example.com",
Key.fromString(sampleKey))
knownHostsFile.addHostKey("4.3.2.1", Key.fromString(sampleKey))
knownHostsFile.save()
self.fakeTransport = FakeObject()
self.fakeTransport.factory = FakeObject()
self.options = self.fakeTransport.factory.options = {
'host': "exists.example.com",
'known-hosts': self.hostsOption
}
def test_verifyOKKey(self):
"""
L{default.verifyHostKey} should return a L{Deferred} which fires with
C{1} when passed a host, IP, and key which already match the
known_hosts file it is supposed to check.
"""
l = []
default.verifyHostKey(self.fakeTransport, "4.3.2.1", sampleKey,
"I don't care.").addCallback(l.append)
self.assertEqual([1], l)
def replaceHome(self, tempHome):
"""
Replace the HOME environment variable until the end of the current
test, with the given new home-directory, so that L{os.path.expanduser}
will yield controllable, predictable results.
@param tempHome: the pathname to replace the HOME variable with.
@type tempHome: L{str}
"""
oldHome = os.environ.get('HOME')
def cleanupHome():
if oldHome is None:
del os.environ['HOME']
else:
os.environ['HOME'] = oldHome
self.addCleanup(cleanupHome)
os.environ['HOME'] = tempHome
def test_noKnownHostsOption(self):
"""
L{default.verifyHostKey} should find your known_hosts file in
~/.ssh/known_hosts if you don't specify one explicitly on the command
line.
"""
l = []
tmpdir = self.mktemp()
oldHostsOption = self.hostsOption
hostsNonOption = FilePath(tmpdir).child(".ssh").child("known_hosts")
hostsNonOption.parent().makedirs()
FilePath(oldHostsOption).moveTo(hostsNonOption)
self.replaceHome(tmpdir)
self.options['known-hosts'] = None
default.verifyHostKey(self.fakeTransport, "4.3.2.1", sampleKey,
"I don't care.").addCallback(l.append)
self.assertEqual([1], l)
def test_verifyHostButNotIP(self):
"""
L{default.verifyHostKey} should return a L{Deferred} which fires with
C{1} when passed a host which matches with an IP is not present in its
known_hosts file, and should also warn the user that it has added the
IP address.
"""
l = []
default.verifyHostKey(self.fakeTransport, "8.7.6.5", sampleKey,
"Fingerprint not required.").addCallback(l.append)
self.assertEqual(
["Warning: Permanently added the RSA host key for IP address "
"'8.7.6.5' to the list of known hosts."],
self.fakeFile.outchunks)
self.assertEqual([1], l)
knownHostsFile = KnownHostsFile.fromPath(FilePath(self.hostsOption))
self.assertEqual(True, knownHostsFile.hasHostKey("8.7.6.5",
Key.fromString(sampleKey)))
def test_verifyQuestion(self):
"""
L{default.verifyHostKey} should return a L{Default} which fires with
C{0} when passed a unknown host that the user refuses to acknowledge.
"""
self.fakeTransport.factory.options['host'] = 'fake.example.com'
self.fakeFile.inlines.append("no")
d = default.verifyHostKey(
self.fakeTransport, "9.8.7.6", otherSampleKey, "No fingerprint!")
self.assertEqual(
["The authenticity of host 'fake.example.com (9.8.7.6)' "
"can't be established.\n"
"RSA key fingerprint is "
"57:a1:c2:a1:07:a0:2b:f4:ce:b5:e5:b7:ae:cc:e1:99.\n"
"Are you sure you want to continue connecting (yes/no)? "],
self.fakeFile.outchunks)
return self.assertFailure(d, UserRejectedKey)
def test_verifyBadKey(self):
"""
L{default.verifyHostKey} should return a L{Deferred} which fails with
L{HostKeyChanged} if the host key is incorrect.
"""
d = default.verifyHostKey(
self.fakeTransport, "4.3.2.1", otherSampleKey,
"Again, not required.")
return self.assertFailure(d, HostKeyChanged)
| mit |
openstack/magnum | magnum/tests/unit/api/controllers/v1/test_federation.py | 2 | 18016 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
from unittest import mock
from oslo_config import cfg
from oslo_utils import uuidutils
from magnum.api.controllers.v1 import federation as api_federation
from magnum.conductor import api as rpcapi
import magnum.conf
from magnum import objects
from magnum.tests import base
from magnum.tests.unit.api import base as api_base
from magnum.tests.unit.api import utils as apiutils
from magnum.tests.unit.objects import utils as obj_utils
CONF = magnum.conf.CONF
class TestFederationObject(base.TestCase):
def test_federation_init(self):
fed_dict = apiutils.federation_post_data()
fed_dict['uuid'] = uuidutils.generate_uuid()
federation = api_federation.Federation(**fed_dict)
self.assertEqual(fed_dict['uuid'], federation.uuid)
class TestListFederation(api_base.FunctionalTest):
def setUp(self):
super(TestListFederation, self).setUp()
def test_empty(self):
response = self.get_json('/federations')
self.assertEqual(response['federations'], [])
def test_one(self):
federation = obj_utils.create_test_federation(
self.context, uuid=uuidutils.generate_uuid())
response = self.get_json('/federations')
self.assertEqual(federation.uuid, response['federations'][0]['uuid'])
def test_get_one(self):
federation = obj_utils.create_test_federation(
self.context, uuid=uuidutils.generate_uuid())
response = self.get_json('/federations/%s' % federation['uuid'])
self.assertTrue(response['uuid'], federation.uuid)
def test_get_one_by_name(self):
federation = obj_utils.create_test_federation(
self.context, uuid=uuidutils.generate_uuid())
response = self.get_json('/federations/%s' % federation['name'])
self.assertTrue(response['uuid'], federation.uuid)
def test_get_one_by_name_not_found(self):
response = self.get_json('/federations/not_found', expect_errors=True)
self.assertEqual(404, response.status_int)
self.assertEqual('application/json', response.content_type)
self.assertTrue(response.json['errors'])
def test_get_one_by_uuid(self):
temp_uuid = uuidutils.generate_uuid()
federation = obj_utils.create_test_federation(self.context,
uuid=temp_uuid)
response = self.get_json('/federations/%s' % temp_uuid)
self.assertTrue(response['uuid'], federation.uuid)
def test_get_one_by_uuid_not_found(self):
temp_uuid = uuidutils.generate_uuid()
response = self.get_json('/federations/%s' % temp_uuid,
expect_errors=True)
self.assertEqual(404, response.status_int)
self.assertEqual('application/json', response.content_type)
self.assertTrue(response.json['errors'])
def test_get_one_by_name_multiple_federation(self):
obj_utils.create_test_federation(self.context, name='test_federation',
uuid=uuidutils.generate_uuid())
obj_utils.create_test_federation(self.context, name='test_federation',
uuid=uuidutils.generate_uuid())
response = self.get_json('/federations/test_federation',
expect_errors=True)
self.assertEqual(409, response.status_int)
self.assertEqual('application/json', response.content_type)
self.assertTrue(response.json['errors'])
def test_get_all_with_pagination_marker(self):
federation_list = []
for id_ in range(4):
federation = obj_utils.create_test_federation(
self.context, id=id_, uuid=uuidutils.generate_uuid())
federation_list.append(federation)
response = self.get_json(
'/federations?limit=3&marker=%s' % federation_list[2].uuid)
self.assertEqual(1, len(response['federations']))
self.assertEqual(federation_list[-1].uuid,
response['federations'][0]['uuid'])
def test_detail(self):
federation = obj_utils.create_test_federation(
self.context, uuid=uuidutils.generate_uuid())
response = self.get_json('/federations/detail')
self.assertEqual(federation.uuid, response['federations'][0]["uuid"])
def test_detail_with_pagination_marker(self):
federation_list = []
for id_ in range(4):
federation = obj_utils.create_test_federation(
self.context, id=id_, uuid=uuidutils.generate_uuid())
federation_list.append(federation)
response = self.get_json(
'/federations/detail?limit=3&marker=%s' % federation_list[2].uuid)
self.assertEqual(1, len(response['federations']))
self.assertEqual(federation_list[-1].uuid,
response['federations'][0]['uuid'])
def test_detail_against_single(self):
federation = obj_utils.create_test_federation(
self.context, uuid=uuidutils.generate_uuid())
response = self.get_json(
'/federations/%s/detail' % federation['uuid'], expect_errors=True)
self.assertEqual(404, response.status_int)
self.assertEqual('application/json', response.content_type)
self.assertTrue(response.json['errors'])
def test_many(self):
federation_list = []
for id_ in range(5):
temp_uuid = uuidutils.generate_uuid()
federation = obj_utils.create_test_federation(
self.context, id=id_, uuid=temp_uuid)
federation_list.append(federation.uuid)
response = self.get_json('/federations')
self.assertEqual(len(federation_list), len(response['federations']))
uuids = [f['uuid'] for f in response['federations']]
self.assertEqual(sorted(federation_list), sorted(uuids))
def test_links(self):
uuid = uuidutils.generate_uuid()
obj_utils.create_test_federation(self.context, id=1, uuid=uuid)
response = self.get_json('/federations/%s' % uuid)
self.assertIn('links', response.keys())
self.assertEqual(2, len(response['links']))
self.assertIn(uuid, response['links'][0]['href'])
for link in response['links']:
bookmark = link['rel'] == 'bookmark'
self.assertTrue(self.validate_link(link['href'],
bookmark=bookmark))
def test_collection_links(self):
for id_ in range(5):
obj_utils.create_test_federation(self.context, id=id_,
uuid=uuidutils.generate_uuid())
response = self.get_json('/federations/?limit=3')
next_marker = response['federations'][-1]['uuid']
self.assertIn(next_marker, response['next'])
def test_collection_links_default_limit(self):
cfg.CONF.set_override('max_limit', 3, 'api')
for id_ in range(5):
obj_utils.create_test_federation(self.context, id=id_,
uuid=uuidutils.generate_uuid())
response = self.get_json('/federations')
self.assertEqual(3, len(response['federations']))
next_marker = response['federations'][-1]['uuid']
self.assertIn(next_marker, response['next'])
class TestPatch(api_base.FunctionalTest):
def setUp(self):
super(TestPatch, self).setUp()
p = mock.patch.object(rpcapi.API, 'federation_update_async')
self.mock_federation_update = p.start()
self.mock_federation_update.side_effect = \
self._sim_rpc_federation_update
self.addCleanup(p.stop)
def _sim_rpc_federation_update(self, federation, rollback=False):
federation.save()
return federation
def test_member_join(self):
f = obj_utils.create_test_federation(
self.context, name='federation-example',
uuid=uuidutils.generate_uuid(), member_ids=[])
new_member = obj_utils.create_test_cluster(self.context)
response = self.patch_json(
'/federations/%s' % f.uuid,
[{'path': '/member_ids', 'value': new_member.uuid, 'op': 'add'}])
self.assertEqual(202, response.status_int)
# make sure it was added:
fed = self.get_json('/federations/%s' % f.uuid)
self.assertTrue(new_member.uuid in fed['member_ids'])
def test_member_unjoin(self):
member = obj_utils.create_test_cluster(self.context)
federation = obj_utils.create_test_federation(
self.context, name='federation-example',
uuid=uuidutils.generate_uuid(), member_ids=[member.uuid])
response = self.patch_json(
'/federations/%s' % federation.uuid,
[{'path': '/member_ids', 'value': member.uuid, 'op': 'remove'}])
self.assertEqual(202, response.status_int)
# make sure it was deleted:
fed = self.get_json('/federations/%s' % federation.uuid)
self.assertFalse(member.uuid in fed['member_ids'])
def test_join_non_existent_cluster(self):
foo_uuid = uuidutils.generate_uuid()
f = obj_utils.create_test_federation(
self.context, name='federation-example',
uuid=uuidutils.generate_uuid(), member_ids=[])
response = self.patch_json(
'/federations/%s' % f.uuid,
[{'path': '/member_ids', 'value': foo_uuid, 'op': 'add'}],
expect_errors=True)
self.assertEqual(404, response.status_int)
def test_unjoin_non_existent_cluster(self):
foo_uuid = uuidutils.generate_uuid()
f = obj_utils.create_test_federation(
self.context, name='federation-example',
uuid=uuidutils.generate_uuid(), member_ids=[])
response = self.patch_json(
'/federations/%s' % f.uuid,
[{'path': '/member_ids', 'value': foo_uuid, 'op': 'remove'}],
expect_errors=True)
self.assertEqual(404, response.status_int)
def test_join_cluster_already_member(self):
cluster = obj_utils.create_test_cluster(self.context)
f = obj_utils.create_test_federation(
self.context, name='federation-example',
uuid=uuidutils.generate_uuid(), member_ids=[cluster.uuid])
response = self.patch_json(
'/federations/%s' % f.uuid,
[{'path': '/member_ids', 'value': cluster.uuid, 'op': 'add'}],
expect_errors=True)
self.assertEqual(409, response.status_int)
def test_unjoin_non_member_cluster(self):
cluster = obj_utils.create_test_cluster(self.context)
f = obj_utils.create_test_federation(
self.context, name='federation-example',
uuid=uuidutils.generate_uuid(), member_ids=[])
response = self.patch_json(
'/federations/%s' % f.uuid,
[{'path': '/member_ids', 'value': cluster.uuid, 'op': 'remove'}],
expect_errors=True)
self.assertEqual(404, response.status_int)
class TestPost(api_base.FunctionalTest):
def setUp(self):
super(TestPost, self).setUp()
p = mock.patch.object(rpcapi.API, 'federation_create_async')
self.mock_fed_create = p.start()
self.mock_fed_create.side_effect = self._simulate_federation_create
self.addCleanup(p.stop)
self.hostcluster = obj_utils.create_test_cluster(self.context)
def _simulate_federation_create(self, federation, create_timeout):
federation.create()
return federation
@mock.patch('oslo_utils.timeutils.utcnow')
def test_create_federation(self, mock_utcnow):
bdict = apiutils.federation_post_data(
uuid=uuidutils.generate_uuid(),
hostcluster_id=self.hostcluster.uuid)
test_time = datetime.datetime(2000, 1, 1, 0, 0)
mock_utcnow.return_value = test_time
response = self.post_json('/federations', bdict)
self.assertEqual('application/json', response.content_type)
self.assertEqual(202, response.status_int)
self.assertTrue(uuidutils.is_uuid_like(response.json['uuid']))
def test_create_federation_no_hostcluster_id(self):
bdict = apiutils.federation_post_data(uuid=uuidutils.generate_uuid())
del bdict['hostcluster_id']
response = self.post_json('/federations', bdict, expect_errors=True)
self.assertEqual(400, response.status_int)
self.assertEqual('application/json', response.content_type)
self.assertTrue(response.json['errors'])
def test_create_federation_hostcluster_does_not_exist(self):
bdict = apiutils.federation_post_data(
uuid=uuidutils.generate_uuid(),
hostcluster_id=uuidutils.generate_uuid())
response = self.post_json('/federations', bdict, expect_errors=True)
self.assertEqual(404, response.status_int)
self.assertEqual('application/json', response.content_type)
self.assertTrue(response.json['errors'])
def test_create_federation_no_dns_zone_name(self):
bdict = apiutils.federation_post_data(
uuid=uuidutils.generate_uuid(),
hostcluster_id=self.hostcluster.uuid)
del bdict['properties']
response = self.post_json('/federations', bdict, expect_errors=True)
self.assertEqual(400, response.status_int)
self.assertEqual('application/json', response.content_type)
self.assertTrue(response.json['errors'])
def test_create_federation_generate_uuid(self):
bdict = apiutils.federation_post_data(
hostcluster_id=self.hostcluster.uuid)
del bdict['uuid']
response = self.post_json('/federations', bdict)
self.assertEqual(202, response.status_int)
def test_create_federation_with_invalid_name(self):
invalid_names = [
'x' * 243, '123456', '123456test_federation',
'-test_federation', '.test_federation', '_test_federation', ''
]
for value in invalid_names:
bdict = apiutils.federation_post_data(
uuid=uuidutils.generate_uuid(), name=value,
hostcluster_id=self.hostcluster.uuid)
response = self.post_json('/federations', bdict,
expect_errors=True)
self.assertEqual('application/json', response.content_type)
self.assertEqual(400, response.status_int)
self.assertTrue(response.json['errors'])
def test_create_federation_with_valid_name(self):
valid_names = [
'test_federation123456', 'test-federation', 'test.federation',
'testfederation.', 'testfederation-', 'testfederation_',
'test.-_federation', 'Testfederation'
]
for value in valid_names:
bdict = apiutils.federation_post_data(
name=value, hostcluster_id=self.hostcluster.uuid)
bdict['uuid'] = uuidutils.generate_uuid()
response = self.post_json('/federations', bdict)
self.assertEqual(202, response.status_int)
def test_create_federation_without_name(self):
bdict = apiutils.federation_post_data(
uuid=uuidutils.generate_uuid(),
hostcluster_id=self.hostcluster.uuid)
del bdict['name']
response = self.post_json('/federations', bdict)
self.assertEqual(202, response.status_int)
class TestDelete(api_base.FunctionalTest):
def setUp(self):
super(TestDelete, self).setUp()
self.federation = obj_utils.create_test_federation(
self.context, name='federation-example',
uuid=uuidutils.generate_uuid())
p = mock.patch.object(rpcapi.API, 'federation_delete_async')
self.mock_federation_delete = p.start()
self.mock_federation_delete.side_effect = \
self._simulate_federation_delete
self.addCleanup(p.stop)
def _simulate_federation_delete(self, federation_uuid):
federation = objects.Federation.get_by_uuid(self.context,
federation_uuid)
federation.destroy()
def test_delete_federation(self):
self.delete('/federations/%s' % self.federation.uuid)
response = self.get_json('/federations/%s' % self.federation.uuid,
expect_errors=True)
self.assertEqual(404, response.status_int)
self.assertEqual('application/json', response.content_type)
self.assertTrue(response.json['errors'])
def test_delete_federation_not_found(self):
delete = self.delete('/federations/%s' % uuidutils.generate_uuid(),
expect_errors=True)
self.assertEqual(404, delete.status_int)
self.assertEqual('application/json', delete.content_type)
self.assertTrue(delete.json['errors'])
def test_delete_federation_with_name(self):
delete = self.delete('/federations/%s' % self.federation.name)
self.assertEqual(204, delete.status_int)
def test_delete_federation_with_name_not_found(self):
delete = self.delete('/federations/%s' % 'foo',
expect_errors=True)
self.assertEqual(404, delete.status_int)
self.assertEqual('application/json', delete.content_type)
self.assertTrue(delete.json['errors'])
| apache-2.0 |
Llamatech/sis-fibo | model/queue_client/publisher.py | 1 | 13238 | # -*- coding: utf-8 -*-
import time
import pika
import json
import logging
import hashlib
class ExamplePublisher(object):
"""This is an example publisher that will handle unexpected interactions
with RabbitMQ such as channel and connection closures.
If RabbitMQ closes the connection, it will reopen it. You should
look at the output, as there are limited reasons why the connection may
be closed, which usually are tied to permission related issues or
socket timeouts.
It uses delivery confirmations and illustrates one way to keep track of
messages that have been sent and if they've been confirmed by RabbitMQ.
"""
EXCHANGE = 'transactions'
EXCHANGE_TYPE = 'topic'
PUBLISH_INTERVAL = 60
QUEUE = 'bancandes'
ROUTING_KEY = 'llamabank.requests'
def __init__(self, logger, amqp_url='amqp://llamabank:[email protected]:5672'):
"""Setup the example publisher object, passing in the URL we will use
to connect to RabbitMQ.
:param str amqp_url: The URL for connecting to RabbitMQ
"""
self._connection = None
self._channel = None
self._deliveries = []
self._acked = 0
self._nacked = 0
self._message_number = 0
self._stopping = False
self._url = amqp_url
self._closing = False
self.logger = logger
def connect(self):
"""This method connects to RabbitMQ, returning the connection handle.
When the connection is established, the on_connection_open method
will be invoked by pika. If you want the reconnection to work, make
sure you set stop_ioloop_on_close to False, which is not the default
behavior of this adapter.
:rtype: pika.SelectConnection
"""
self.logger.info('Connecting to %s', self._url)
cred = pika.PlainCredentials('llamabank', '123llama123')
param = pika.ConnectionParameters(
host='margffoy-tuay.com',
port=5672,
virtual_host='bancandesh',
credentials=cred
)
self._connection = pika.TornadoConnection(param,
self.on_connection_open,
stop_ioloop_on_close=False)
def on_connection_open(self, unused_connection):
"""This method is called by pika once the connection to RabbitMQ has
been established. It passes the handle to the connection object in
case we need it, but in this case, we'll just mark it unused.
:type unused_connection: pika.SelectConnection
"""
self.logger.info('Connection opened')
self.add_on_connection_close_callback()
self.open_channel()
def add_on_connection_close_callback(self):
"""This method adds an on close callback that will be invoked by pika
when RabbitMQ closes the connection to the publisher unexpectedly.
"""
self.logger.info('Adding connection close callback')
self._connection.add_on_close_callback(self.on_connection_closed)
def on_connection_closed(self, connection, reply_code, reply_text):
"""This method is invoked by pika when the connection to RabbitMQ is
closed unexpectedly. Since it is unexpected, we will reconnect to
RabbitMQ if it disconnects.
:param pika.connection.Connection connection: The closed connection obj
:param int reply_code: The server provided reply_code if given
:param str reply_text: The server provided reply_text if given
"""
self._channel = None
if self._closing:
self._connection.ioloop.stop()
else:
self.logger.warning('Connection closed, reopening in 5 seconds: (%s) %s',
reply_code, reply_text)
self._connection.add_timeout(5, self.reconnect)
def reconnect(self):
"""Will be invoked by the IOLoop timer if the connection is
closed. See the on_connection_closed method.
"""
self._deliveries = []
self._acked = 0
self._nacked = 0
self._message_number = 0
# This is the old connection IOLoop instance, stop its ioloop
# self._connection.ioloop.stop()
# Create a new connection
self.connect()
# There is now a new connection, needs a new ioloop to run
# self._connection.ioloop.start()
def open_channel(self):
"""This method will open a new channel with RabbitMQ by issuing the
Channel.Open RPC command. When RabbitMQ confirms the channel is open
by sending the Channel.OpenOK RPC reply, the on_channel_open method
will be invoked.
"""
self.logger.info('Creating a new channel')
self._connection.channel(on_open_callback=self.on_channel_open)
def on_channel_open(self, channel):
"""This method is invoked by pika when the channel has been opened.
The channel object is passed in so we can make use of it.
Since the channel is now open, we'll declare the exchange to use.
:param pika.channel.Channel channel: The channel object
"""
self.logger.info('Channel opened')
self._channel = channel
self.add_on_channel_close_callback()
self.setup_exchange(self.EXCHANGE)
def add_on_channel_close_callback(self):
"""This method tells pika to call the on_channel_closed method if
RabbitMQ unexpectedly closes the channel.
"""
self.logger.info('Adding channel close callback')
self._channel.add_on_close_callback(self.on_channel_closed)
def on_channel_closed(self, channel, reply_code, reply_text):
"""Invoked by pika when RabbitMQ unexpectedly closes the channel.
Channels are usually closed if you attempt to do something that
violates the protocol, such as re-declare an exchange or queue with
different parameters. In this case, we'll close the connection
to shutdown the object.
:param pika.channel.Channel: The closed channel
:param int reply_code: The numeric reason the channel was closed
:param str reply_text: The text reason the channel was closed
"""
self.logger.warning('Channel was closed: (%s) %s', reply_code, reply_text)
if not self._closing:
self._connection.close()
def setup_exchange(self, exchange_name):
"""Setup the exchange on RabbitMQ by invoking the Exchange.Declare RPC
command. When it is complete, the on_exchange_declareok method will
be invoked by pika.
:param str|unicode exchange_name: The name of the exchange to declare
"""
self.logger.info('Declaring exchange %s', exchange_name)
self._channel.exchange_declare(self.on_exchange_declareok,
exchange_name,
self.EXCHANGE_TYPE)
def on_exchange_declareok(self, unused_frame):
"""Invoked by pika when RabbitMQ has finished the Exchange.Declare RPC
command.
:param pika.Frame.Method unused_frame: Exchange.DeclareOk response frame
"""
self.logger.info('Exchange declared')
self.setup_queue(self.QUEUE)
def setup_queue(self, queue_name):
"""Setup the queue on RabbitMQ by invoking the Queue.Declare RPC
command. When it is complete, the on_queue_declareok method will
be invoked by pika.
:param str|unicode queue_name: The name of the queue to declare.
"""
self.logger.info('Declaring queue %s', queue_name)
self._channel.queue_declare(self.on_queue_declareok, queue_name)
def on_queue_declareok(self, method_frame):
"""Method invoked by pika when the Queue.Declare RPC call made in
setup_queue has completed. In this method we will bind the queue
and exchange together with the routing key by issuing the Queue.Bind
RPC command. When this command is complete, the on_bindok method will
be invoked by pika.
:param pika.frame.Method method_frame: The Queue.DeclareOk frame
"""
self.logger.info('Binding %s to %s with %s',
self.EXCHANGE, self.QUEUE, self.ROUTING_KEY)
self._channel.queue_bind(self.on_bindok, self.QUEUE,
self.EXCHANGE, self.ROUTING_KEY)
def on_bindok(self, unused_frame):
"""This method is invoked by pika when it receives the Queue.BindOk
response from RabbitMQ. Since we know we're now setup and bound, it's
time to start publishing."""
self.logger.info('Queue bound')
self.start_publishing()
def start_publishing(self):
"""This method will enable delivery confirmations and schedule the
first message to be sent to RabbitMQ
"""
self.logger.info('Issuing consumer related RPC commands')
self.enable_delivery_confirmations()
# self.schedule_next_message()
def enable_delivery_confirmations(self):
"""Send the Confirm.Select RPC method to RabbitMQ to enable delivery
confirmations on the channel. The only way to turn this off is to close
the channel and create a new one.
When the message is confirmed from RabbitMQ, the
on_delivery_confirmation method will be invoked passing in a Basic.Ack
or Basic.Nack method from RabbitMQ that will indicate which messages it
is confirming or rejecting.
"""
self.logger.info('Issuing Confirm.Select RPC command')
self._channel.confirm_delivery(self.on_delivery_confirmation)
def on_delivery_confirmation(self, method_frame):
"""Invoked by pika when RabbitMQ responds to a Basic.Publish RPC
command, passing in either a Basic.Ack or Basic.Nack frame with
the delivery tag of the message that was published. The delivery tag
is an integer counter indicating the message number that was sent
on the channel via Basic.Publish. Here we're just doing house keeping
to keep track of stats and remove message numbers that we expect
a delivery confirmation of from the list used to keep track of messages
that are pending confirmation.
:param pika.frame.Method method_frame: Basic.Ack or Basic.Nack frame
"""
confirmation_type = method_frame.method.NAME.split('.')[1].lower()
self.logger.info('Received %s for delivery tag: %i',
confirmation_type,
method_frame.method.delivery_tag)
if confirmation_type == 'ack':
self._acked += 1
elif confirmation_type == 'nack':
self._nacked += 1
self._deliveries.remove(method_frame.method.delivery_tag)
self.logger.info('Published %i messages, %i have yet to be confirmed, '
'%i were acked and %i were nacked',
self._message_number, len(self._deliveries),
self._acked, self._nacked)
def schedule_next_message(self):
"""If we are not closing our connection to RabbitMQ, schedule another
message to be delivered in PUBLISH_INTERVAL seconds.
"""
if self._stopping:
return
self.logger.info('Scheduling next message for %0.1f seconds',
self.PUBLISH_INTERVAL)
self._connection.add_timeout(self.PUBLISH_INTERVAL,
self.publish_message)
def publish_message(self, message):
"""If the class is not stopping, publish a message to RabbitMQ,
appending a list of deliveries with the message number that was sent.
This list will be used to check for delivery confirmations in the
on_delivery_confirmations method.
Once the message has been sent, schedule another message to be sent.
The main reason I put scheduling in was just so you can get a good idea
of how the process is flowing by slowing down and speeding up the
delivery intervals by changing the PUBLISH_INTERVAL constant in the
class.
"""
if self._stopping:
return
print message
# message = {u'message': msg}
properties = pika.BasicProperties(app_id='llamabank',
content_type='application/json',
headers=message)
self._channel.basic_publish(self.EXCHANGE, self.ROUTING_KEY,
json.dumps(message, ensure_ascii=False),
properties)
self._message_number += 1
self._deliveries.append(self._message_number)
self.logger.info('Published message # %i', self._message_number)
# self.schedule_next_message()
def close_channel(self):
"""Invoke this command to close the channel with RabbitMQ by sending
the Channel.Close RPC command.
"""
self.logger.info('Closing the channel')
if self._channel:
self._channel.close()
| gpl-2.0 |
adaxi/couchpotato | libs/subliminal/api.py | 106 | 5646 | # -*- coding: utf-8 -*-
# Copyright 2011-2012 Antoine Bertin <[email protected]>
#
# This file is part of subliminal.
#
# subliminal is free software; you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# subliminal is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with subliminal. If not, see <http://www.gnu.org/licenses/>.
from .core import (SERVICES, LANGUAGE_INDEX, SERVICE_INDEX, SERVICE_CONFIDENCE,
MATCHING_CONFIDENCE, create_list_tasks, consume_task, create_download_tasks,
group_by_video, key_subtitles)
from .language import language_set, language_list, LANGUAGES
import logging
__all__ = ['list_subtitles', 'download_subtitles']
logger = logging.getLogger(__name__)
def list_subtitles(paths, languages=None, services=None, force=True, multi=False, cache_dir=None, max_depth=3, scan_filter=None):
"""List subtitles in given paths according to the criteria
:param paths: path(s) to video file or folder
:type paths: string or list
:param languages: languages to search for, in preferred order
:type languages: list of :class:`~subliminal.language.Language` or string
:param list services: services to use for the search, in preferred order
:param bool force: force searching for subtitles even if some are detected
:param bool multi: search multiple languages for the same video
:param string cache_dir: path to the cache directory to use
:param int max_depth: maximum depth for scanning entries
:param function scan_filter: filter function that takes a path as argument and returns a boolean indicating whether it has to be filtered out (``True``) or not (``False``)
:return: found subtitles
:rtype: dict of :class:`~subliminal.videos.Video` => [:class:`~subliminal.subtitles.ResultSubtitle`]
"""
services = services or SERVICES
languages = language_set(languages) if languages is not None else language_set(LANGUAGES)
if isinstance(paths, basestring):
paths = [paths]
if any([not isinstance(p, unicode) for p in paths]):
logger.warning(u'Not all entries are unicode')
results = []
service_instances = {}
tasks = create_list_tasks(paths, languages, services, force, multi, cache_dir, max_depth, scan_filter)
for task in tasks:
try:
result = consume_task(task, service_instances)
results.append((task.video, result))
except:
logger.error(u'Error consuming task %r' % task, exc_info=True)
for service_instance in service_instances.itervalues():
service_instance.terminate()
return group_by_video(results)
def download_subtitles(paths, languages=None, services=None, force=True, multi=False, cache_dir=None, max_depth=3, scan_filter=None, order=None):
"""Download subtitles in given paths according to the criteria
:param paths: path(s) to video file or folder
:type paths: string or list
:param languages: languages to search for, in preferred order
:type languages: list of :class:`~subliminal.language.Language` or string
:param list services: services to use for the search, in preferred order
:param bool force: force searching for subtitles even if some are detected
:param bool multi: search multiple languages for the same video
:param string cache_dir: path to the cache directory to use
:param int max_depth: maximum depth for scanning entries
:param function scan_filter: filter function that takes a path as argument and returns a boolean indicating whether it has to be filtered out (``True``) or not (``False``)
:param order: preferred order for subtitles sorting
:type list: list of :data:`~subliminal.core.LANGUAGE_INDEX`, :data:`~subliminal.core.SERVICE_INDEX`, :data:`~subliminal.core.SERVICE_CONFIDENCE`, :data:`~subliminal.core.MATCHING_CONFIDENCE`
:return: downloaded subtitles
:rtype: dict of :class:`~subliminal.videos.Video` => [:class:`~subliminal.subtitles.ResultSubtitle`]
.. note::
If you use ``multi=True``, :data:`~subliminal.core.LANGUAGE_INDEX` has to be the first item of the ``order`` list
or you might get unexpected results.
"""
services = services or SERVICES
languages = language_list(languages) if languages is not None else language_list(LANGUAGES)
if isinstance(paths, basestring):
paths = [paths]
order = order or [LANGUAGE_INDEX, SERVICE_INDEX, SERVICE_CONFIDENCE, MATCHING_CONFIDENCE]
subtitles_by_video = list_subtitles(paths, languages, services, force, multi, cache_dir, max_depth, scan_filter)
for video, subtitles in subtitles_by_video.iteritems():
subtitles.sort(key=lambda s: key_subtitles(s, video, languages, services, order), reverse=True)
results = []
service_instances = {}
tasks = create_download_tasks(subtitles_by_video, languages, multi)
for task in tasks:
try:
result = consume_task(task, service_instances)
results.append((task.video, result))
except:
logger.error(u'Error consuming task %r' % task, exc_info=True)
for service_instance in service_instances.itervalues():
service_instance.terminate()
return group_by_video(results)
| gpl-3.0 |
dashmoment/facerecognition | py/apps/videofacerec/helper/video.py | 3 | 4718 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import numpy as np
import cv2
from time import clock
from numpy import pi, sin, cos
import common
class VideoSynthBase(object):
def __init__(self, size=None, noise=0.0, bg = None, **params):
self.bg = None
self.frame_size = (640, 480)
if bg is not None:
self.bg = cv2.imread(bg, 1)
h, w = self.bg.shape[:2]
self.frame_size = (w, h)
if size is not None:
w, h = map(int, size.split('x'))
self.frame_size = (w, h)
self.bg = cv2.resize(self.bg, self.frame_size)
self.noise = float(noise)
def render(self, dst):
pass
def read(self, dst=None):
w, h = self.frame_size
if self.bg is None:
buf = np.zeros((h, w, 3), np.uint8)
else:
buf = self.bg.copy()
self.render(buf)
if self.noise > 0.0:
noise = np.zeros((h, w, 3), np.int8)
cv2.randn(noise, np.zeros(3), np.ones(3)*255*self.noise)
buf = cv2.add(buf, noise, dtype=cv2.CV_8UC3)
return True, buf
class Chess(VideoSynthBase):
def __init__(self, **kw):
super(Chess, self).__init__(**kw)
w, h = self.frame_size
self.grid_size = sx, sy = 10, 7
white_quads = []
black_quads = []
for i, j in np.ndindex(sy, sx):
q = [[j, i, 0], [j+1, i, 0], [j+1, i+1, 0], [j, i+1, 0]]
[white_quads, black_quads][(i + j) % 2].append(q)
self.white_quads = np.float32(white_quads)
self.black_quads = np.float32(black_quads)
fx = 0.9
self.K = np.float64([[fx*w, 0, 0.5*(w-1)],
[0, fx*w, 0.5*(h-1)],
[0.0,0.0, 1.0]])
self.dist_coef = np.float64([-0.2, 0.1, 0, 0])
self.t = 0
def draw_quads(self, img, quads, color = (0, 255, 0)):
img_quads = cv2.projectPoints(quads.reshape(-1, 3), self.rvec, self.tvec, self.K, self.dist_coef) [0]
img_quads.shape = quads.shape[:2] + (2,)
for q in img_quads:
cv2.fillConvexPoly(img, np.int32(q*4), color, cv2.CV_AA, shift=2)
def render(self, dst):
t = self.t
self.t += 1.0/30.0
sx, sy = self.grid_size
center = np.array([0.5*sx, 0.5*sy, 0.0])
phi = pi/3 + sin(t*3)*pi/8
c, s = cos(phi), sin(phi)
ofs = np.array([sin(1.2*t), cos(1.8*t), 0]) * sx * 0.2
eye_pos = center + np.array([cos(t)*c, sin(t)*c, s]) * 15.0 + ofs
target_pos = center + ofs
R, self.tvec = common.lookat(eye_pos, target_pos)
self.rvec = common.mtx2rvec(R)
self.draw_quads(dst, self.white_quads, (245, 245, 245))
self.draw_quads(dst, self.black_quads, (10, 10, 10))
classes = dict(chess=Chess)
def create_capture(source):
'''
source: <int> or '<int>' or '<filename>' or 'synth:<params>'
'''
try: source = int(source)
except ValueError: pass
else:
return cv2.VideoCapture(source)
source = str(source).strip()
if source.startswith('synth'):
ss = filter(None, source.split(':'))
params = dict( s.split('=') for s in ss[1:] )
try: Class = classes[params['class']]
except: Class = VideoSynthBase
return Class(**params)
return cv2.VideoCapture(source)
presets = dict(
empty = 'synth:',
lena = 'synth:bg=../cpp/lena.jpg:noise=0.1',
chess = 'synth:class=chess:bg=../cpp/lena.jpg:noise=0.1:size=640x480'
)
if __name__ == '__main__':
import sys
import getopt
print 'USAGE: video.py [--shotdir <dir>] [source0] [source1] ...'
print "source: '<int>' or '<filename>' or 'synth:<params>'"
print
args, sources = getopt.getopt(sys.argv[1:], '', 'shotdir=')
args = dict(args)
shotdir = args.get('--shotdir', '.')
if len(sources) == 0:
sources = [ presets['chess'] ]
print 'Press SPACE to save current frame'
caps = map(create_capture, sources)
shot_idx = 0
while True:
imgs = []
for i, cap in enumerate(caps):
ret, img = cap.read()
imgs.append(img)
cv2.imshow('capture %d' % i, img)
ch = cv2.waitKey(1)
if ch == 27:
break
if ch == ord(' '):
for i, img in enumerate(imgs):
fn = '%s/shot_%d_%03d.bmp' % (shotdir, i, shot_idx)
cv2.imwrite(fn, img)
print fn, 'saved'
shot_idx += 1
| bsd-3-clause |
EntityFXCode/arsenalsuite | python/blur/examples/path_lister.py | 11 | 2216 | #!/usr/bin/python
from PyQt4.QtCore import *
from PyQt4.QtSql import *
from PyQt4.QtGui import *
from blur.Stone import *
from blur.Classes import *
from blur.Classesui import *
import blur.email, blur.jabber
import sys, time, re, os
from math import ceil
import traceback
try:
import popen2
except: pass
app = QApplication(sys.argv)
initConfig( "/etc/db.ini", "/var/log/path_lister.log" )
blur.RedirectOutputToLog()
blurqt_loader()
VERBOSE_DEBUG = False
if VERBOSE_DEBUG:
Database.instance().setEchoMode( Database.EchoUpdate | Database.EchoDelete )# | Database.EchoSelect )
FreezerCore.instance().reconnect()
class ProjectChooserDialog( QDialog ):
def __init__(self):
QDialog.__init__(self)
# Project Chooser Widget
self.projectCombo = ProjectCombo(self)
self.projectCombo.setShowSpecialItem(False)
# Ok | Cancel buttons
dbb = QDialogButtonBox( QDialogButtonBox.Ok | QDialogButtonBox.Cancel, Qt.Horizontal, self )
self.connect( dbb, SIGNAL( 'accepted()' ), self.accept )
self.connect( dbb, SIGNAL( 'rejected()' ), self.reject )
# Layout
l = QVBoxLayout(self)
l.addWidget( self.projectCombo )
l.addWidget( dbb )
def project(self):
return self.projectCombo.project()
project = None
regenPaths = len(sys.argv) > 2 and sys.argv[2]
if len(sys.argv) > 1:
project = Project.recordByName( sys.argv[1] )
if not project or not project.isRecord():
d = ProjectChooserDialog()
if d.exec_() == QDialog.Accepted:
project = d.project()
if project.isRecord():
storageLocations = project.projectStorages()
def print_paths( asset, tabs = 0 ):
for c in asset.children():
print (' ' * tabs), c.name(), c.assetTemplate().name(), c.pathTemplate().name()
for storage in storageLocations:
path = c.path(storage)
pt = c.pathTracker(storage)
if not path.isEmpty() or pt.isRecord():
print (' ' * tabs) + ' ', path
gen_path = pt.generatePathFromTemplate(storage)
if path != gen_path:
if regenPaths:
print "Changing path to match template: ", gen_path
pt.setPath(gen_path)
pt.commit()
else:
print "Path doesnt match template: ", gen_path
print_paths( c, tabs + 1 )
print_paths( project )
| gpl-2.0 |
chubbymaggie/datasketch | examples/lshforest_example.py | 3 | 1141 | from datasketch import MinHashLSHForest, MinHash
data1 = ['minhash', 'is', 'a', 'probabilistic', 'data', 'structure', 'for',
'estimating', 'the', 'similarity', 'between', 'datasets']
data2 = ['minhash', 'is', 'a', 'probability', 'data', 'structure', 'for',
'estimating', 'the', 'similarity', 'between', 'documents']
data3 = ['minhash', 'is', 'probability', 'data', 'structure', 'for',
'estimating', 'the', 'similarity', 'between', 'documents']
# Create MinHash objects
m1 = MinHash(num_perm=128)
m2 = MinHash(num_perm=128)
m3 = MinHash(num_perm=128)
for d in data1:
m1.update(d.encode('utf8'))
for d in data2:
m2.update(d.encode('utf8'))
for d in data3:
m3.update(d.encode('utf8'))
forest = MinHashLSHForest(num_perm=128)
# Add m2 and m3 into the index
forest.add("m2", m2)
forest.add("m3", m3)
# IMPORTANT: must call index() otherwise the keys won't be searchable
forest.index()
# Check for membership using the key
print("m2" in forest)
print("m3" in forest)
# Using m1 as the query, retrieve top 2 keys that have the higest Jaccard
result = forest.query(m1, 2)
print("Top 2 candidates", result)
| mit |
umaro/UmaroKernel | Documentation/networking/cxacru-cf.py | 14668 | 1626 | #!/usr/bin/env python
# Copyright 2009 Simon Arlott
#
# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the Free
# Software Foundation; either version 2 of the License, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 59
# Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
# Usage: cxacru-cf.py < cxacru-cf.bin
# Output: values string suitable for the sysfs adsl_config attribute
#
# Warning: cxacru-cf.bin with MD5 hash cdbac2689969d5ed5d4850f117702110
# contains mis-aligned values which will stop the modem from being able
# to make a connection. If the first and last two bytes are removed then
# the values become valid, but the modulation will be forced to ANSI
# T1.413 only which may not be appropriate.
#
# The original binary format is a packed list of le32 values.
import sys
import struct
i = 0
while True:
buf = sys.stdin.read(4)
if len(buf) == 0:
break
elif len(buf) != 4:
sys.stdout.write("\n")
sys.stderr.write("Error: read {0} not 4 bytes\n".format(len(buf)))
sys.exit(1)
if i > 0:
sys.stdout.write(" ")
sys.stdout.write("{0:x}={1}".format(i, struct.unpack("<I", buf)[0]))
i += 1
sys.stdout.write("\n")
| gpl-2.0 |
gauravmm/Remote-Temperature-Monitor | utilities/colormap/colormaps.py | 28 | 50518 | # New matplotlib colormaps by Nathaniel J. Smith, Stefan van der Walt,
# and (in the case of viridis) Eric Firing.
#
# This file and the colormaps in it are released under the CC0 license /
# public domain dedication. We would appreciate credit if you use or
# redistribute these colormaps, but do not impose any legal restrictions.
#
# To the extent possible under law, the persons who associated CC0 with
# mpl-colormaps have waived all copyright and related or neighboring rights
# to mpl-colormaps.
#
# You should have received a copy of the CC0 legalcode along with this
# work. If not, see <http://creativecommons.org/publicdomain/zero/1.0/>.
__all__ = ['magma', 'inferno', 'plasma', 'viridis']
_magma_data = [[0.001462, 0.000466, 0.013866],
[0.002258, 0.001295, 0.018331],
[0.003279, 0.002305, 0.023708],
[0.004512, 0.003490, 0.029965],
[0.005950, 0.004843, 0.037130],
[0.007588, 0.006356, 0.044973],
[0.009426, 0.008022, 0.052844],
[0.011465, 0.009828, 0.060750],
[0.013708, 0.011771, 0.068667],
[0.016156, 0.013840, 0.076603],
[0.018815, 0.016026, 0.084584],
[0.021692, 0.018320, 0.092610],
[0.024792, 0.020715, 0.100676],
[0.028123, 0.023201, 0.108787],
[0.031696, 0.025765, 0.116965],
[0.035520, 0.028397, 0.125209],
[0.039608, 0.031090, 0.133515],
[0.043830, 0.033830, 0.141886],
[0.048062, 0.036607, 0.150327],
[0.052320, 0.039407, 0.158841],
[0.056615, 0.042160, 0.167446],
[0.060949, 0.044794, 0.176129],
[0.065330, 0.047318, 0.184892],
[0.069764, 0.049726, 0.193735],
[0.074257, 0.052017, 0.202660],
[0.078815, 0.054184, 0.211667],
[0.083446, 0.056225, 0.220755],
[0.088155, 0.058133, 0.229922],
[0.092949, 0.059904, 0.239164],
[0.097833, 0.061531, 0.248477],
[0.102815, 0.063010, 0.257854],
[0.107899, 0.064335, 0.267289],
[0.113094, 0.065492, 0.276784],
[0.118405, 0.066479, 0.286321],
[0.123833, 0.067295, 0.295879],
[0.129380, 0.067935, 0.305443],
[0.135053, 0.068391, 0.315000],
[0.140858, 0.068654, 0.324538],
[0.146785, 0.068738, 0.334011],
[0.152839, 0.068637, 0.343404],
[0.159018, 0.068354, 0.352688],
[0.165308, 0.067911, 0.361816],
[0.171713, 0.067305, 0.370771],
[0.178212, 0.066576, 0.379497],
[0.184801, 0.065732, 0.387973],
[0.191460, 0.064818, 0.396152],
[0.198177, 0.063862, 0.404009],
[0.204935, 0.062907, 0.411514],
[0.211718, 0.061992, 0.418647],
[0.218512, 0.061158, 0.425392],
[0.225302, 0.060445, 0.431742],
[0.232077, 0.059889, 0.437695],
[0.238826, 0.059517, 0.443256],
[0.245543, 0.059352, 0.448436],
[0.252220, 0.059415, 0.453248],
[0.258857, 0.059706, 0.457710],
[0.265447, 0.060237, 0.461840],
[0.271994, 0.060994, 0.465660],
[0.278493, 0.061978, 0.469190],
[0.284951, 0.063168, 0.472451],
[0.291366, 0.064553, 0.475462],
[0.297740, 0.066117, 0.478243],
[0.304081, 0.067835, 0.480812],
[0.310382, 0.069702, 0.483186],
[0.316654, 0.071690, 0.485380],
[0.322899, 0.073782, 0.487408],
[0.329114, 0.075972, 0.489287],
[0.335308, 0.078236, 0.491024],
[0.341482, 0.080564, 0.492631],
[0.347636, 0.082946, 0.494121],
[0.353773, 0.085373, 0.495501],
[0.359898, 0.087831, 0.496778],
[0.366012, 0.090314, 0.497960],
[0.372116, 0.092816, 0.499053],
[0.378211, 0.095332, 0.500067],
[0.384299, 0.097855, 0.501002],
[0.390384, 0.100379, 0.501864],
[0.396467, 0.102902, 0.502658],
[0.402548, 0.105420, 0.503386],
[0.408629, 0.107930, 0.504052],
[0.414709, 0.110431, 0.504662],
[0.420791, 0.112920, 0.505215],
[0.426877, 0.115395, 0.505714],
[0.432967, 0.117855, 0.506160],
[0.439062, 0.120298, 0.506555],
[0.445163, 0.122724, 0.506901],
[0.451271, 0.125132, 0.507198],
[0.457386, 0.127522, 0.507448],
[0.463508, 0.129893, 0.507652],
[0.469640, 0.132245, 0.507809],
[0.475780, 0.134577, 0.507921],
[0.481929, 0.136891, 0.507989],
[0.488088, 0.139186, 0.508011],
[0.494258, 0.141462, 0.507988],
[0.500438, 0.143719, 0.507920],
[0.506629, 0.145958, 0.507806],
[0.512831, 0.148179, 0.507648],
[0.519045, 0.150383, 0.507443],
[0.525270, 0.152569, 0.507192],
[0.531507, 0.154739, 0.506895],
[0.537755, 0.156894, 0.506551],
[0.544015, 0.159033, 0.506159],
[0.550287, 0.161158, 0.505719],
[0.556571, 0.163269, 0.505230],
[0.562866, 0.165368, 0.504692],
[0.569172, 0.167454, 0.504105],
[0.575490, 0.169530, 0.503466],
[0.581819, 0.171596, 0.502777],
[0.588158, 0.173652, 0.502035],
[0.594508, 0.175701, 0.501241],
[0.600868, 0.177743, 0.500394],
[0.607238, 0.179779, 0.499492],
[0.613617, 0.181811, 0.498536],
[0.620005, 0.183840, 0.497524],
[0.626401, 0.185867, 0.496456],
[0.632805, 0.187893, 0.495332],
[0.639216, 0.189921, 0.494150],
[0.645633, 0.191952, 0.492910],
[0.652056, 0.193986, 0.491611],
[0.658483, 0.196027, 0.490253],
[0.664915, 0.198075, 0.488836],
[0.671349, 0.200133, 0.487358],
[0.677786, 0.202203, 0.485819],
[0.684224, 0.204286, 0.484219],
[0.690661, 0.206384, 0.482558],
[0.697098, 0.208501, 0.480835],
[0.703532, 0.210638, 0.479049],
[0.709962, 0.212797, 0.477201],
[0.716387, 0.214982, 0.475290],
[0.722805, 0.217194, 0.473316],
[0.729216, 0.219437, 0.471279],
[0.735616, 0.221713, 0.469180],
[0.742004, 0.224025, 0.467018],
[0.748378, 0.226377, 0.464794],
[0.754737, 0.228772, 0.462509],
[0.761077, 0.231214, 0.460162],
[0.767398, 0.233705, 0.457755],
[0.773695, 0.236249, 0.455289],
[0.779968, 0.238851, 0.452765],
[0.786212, 0.241514, 0.450184],
[0.792427, 0.244242, 0.447543],
[0.798608, 0.247040, 0.444848],
[0.804752, 0.249911, 0.442102],
[0.810855, 0.252861, 0.439305],
[0.816914, 0.255895, 0.436461],
[0.822926, 0.259016, 0.433573],
[0.828886, 0.262229, 0.430644],
[0.834791, 0.265540, 0.427671],
[0.840636, 0.268953, 0.424666],
[0.846416, 0.272473, 0.421631],
[0.852126, 0.276106, 0.418573],
[0.857763, 0.279857, 0.415496],
[0.863320, 0.283729, 0.412403],
[0.868793, 0.287728, 0.409303],
[0.874176, 0.291859, 0.406205],
[0.879464, 0.296125, 0.403118],
[0.884651, 0.300530, 0.400047],
[0.889731, 0.305079, 0.397002],
[0.894700, 0.309773, 0.393995],
[0.899552, 0.314616, 0.391037],
[0.904281, 0.319610, 0.388137],
[0.908884, 0.324755, 0.385308],
[0.913354, 0.330052, 0.382563],
[0.917689, 0.335500, 0.379915],
[0.921884, 0.341098, 0.377376],
[0.925937, 0.346844, 0.374959],
[0.929845, 0.352734, 0.372677],
[0.933606, 0.358764, 0.370541],
[0.937221, 0.364929, 0.368567],
[0.940687, 0.371224, 0.366762],
[0.944006, 0.377643, 0.365136],
[0.947180, 0.384178, 0.363701],
[0.950210, 0.390820, 0.362468],
[0.953099, 0.397563, 0.361438],
[0.955849, 0.404400, 0.360619],
[0.958464, 0.411324, 0.360014],
[0.960949, 0.418323, 0.359630],
[0.963310, 0.425390, 0.359469],
[0.965549, 0.432519, 0.359529],
[0.967671, 0.439703, 0.359810],
[0.969680, 0.446936, 0.360311],
[0.971582, 0.454210, 0.361030],
[0.973381, 0.461520, 0.361965],
[0.975082, 0.468861, 0.363111],
[0.976690, 0.476226, 0.364466],
[0.978210, 0.483612, 0.366025],
[0.979645, 0.491014, 0.367783],
[0.981000, 0.498428, 0.369734],
[0.982279, 0.505851, 0.371874],
[0.983485, 0.513280, 0.374198],
[0.984622, 0.520713, 0.376698],
[0.985693, 0.528148, 0.379371],
[0.986700, 0.535582, 0.382210],
[0.987646, 0.543015, 0.385210],
[0.988533, 0.550446, 0.388365],
[0.989363, 0.557873, 0.391671],
[0.990138, 0.565296, 0.395122],
[0.990871, 0.572706, 0.398714],
[0.991558, 0.580107, 0.402441],
[0.992196, 0.587502, 0.406299],
[0.992785, 0.594891, 0.410283],
[0.993326, 0.602275, 0.414390],
[0.993834, 0.609644, 0.418613],
[0.994309, 0.616999, 0.422950],
[0.994738, 0.624350, 0.427397],
[0.995122, 0.631696, 0.431951],
[0.995480, 0.639027, 0.436607],
[0.995810, 0.646344, 0.441361],
[0.996096, 0.653659, 0.446213],
[0.996341, 0.660969, 0.451160],
[0.996580, 0.668256, 0.456192],
[0.996775, 0.675541, 0.461314],
[0.996925, 0.682828, 0.466526],
[0.997077, 0.690088, 0.471811],
[0.997186, 0.697349, 0.477182],
[0.997254, 0.704611, 0.482635],
[0.997325, 0.711848, 0.488154],
[0.997351, 0.719089, 0.493755],
[0.997351, 0.726324, 0.499428],
[0.997341, 0.733545, 0.505167],
[0.997285, 0.740772, 0.510983],
[0.997228, 0.747981, 0.516859],
[0.997138, 0.755190, 0.522806],
[0.997019, 0.762398, 0.528821],
[0.996898, 0.769591, 0.534892],
[0.996727, 0.776795, 0.541039],
[0.996571, 0.783977, 0.547233],
[0.996369, 0.791167, 0.553499],
[0.996162, 0.798348, 0.559820],
[0.995932, 0.805527, 0.566202],
[0.995680, 0.812706, 0.572645],
[0.995424, 0.819875, 0.579140],
[0.995131, 0.827052, 0.585701],
[0.994851, 0.834213, 0.592307],
[0.994524, 0.841387, 0.598983],
[0.994222, 0.848540, 0.605696],
[0.993866, 0.855711, 0.612482],
[0.993545, 0.862859, 0.619299],
[0.993170, 0.870024, 0.626189],
[0.992831, 0.877168, 0.633109],
[0.992440, 0.884330, 0.640099],
[0.992089, 0.891470, 0.647116],
[0.991688, 0.898627, 0.654202],
[0.991332, 0.905763, 0.661309],
[0.990930, 0.912915, 0.668481],
[0.990570, 0.920049, 0.675675],
[0.990175, 0.927196, 0.682926],
[0.989815, 0.934329, 0.690198],
[0.989434, 0.941470, 0.697519],
[0.989077, 0.948604, 0.704863],
[0.988717, 0.955742, 0.712242],
[0.988367, 0.962878, 0.719649],
[0.988033, 0.970012, 0.727077],
[0.987691, 0.977154, 0.734536],
[0.987387, 0.984288, 0.742002],
[0.987053, 0.991438, 0.749504]]
_inferno_data = [[0.001462, 0.000466, 0.013866],
[0.002267, 0.001270, 0.018570],
[0.003299, 0.002249, 0.024239],
[0.004547, 0.003392, 0.030909],
[0.006006, 0.004692, 0.038558],
[0.007676, 0.006136, 0.046836],
[0.009561, 0.007713, 0.055143],
[0.011663, 0.009417, 0.063460],
[0.013995, 0.011225, 0.071862],
[0.016561, 0.013136, 0.080282],
[0.019373, 0.015133, 0.088767],
[0.022447, 0.017199, 0.097327],
[0.025793, 0.019331, 0.105930],
[0.029432, 0.021503, 0.114621],
[0.033385, 0.023702, 0.123397],
[0.037668, 0.025921, 0.132232],
[0.042253, 0.028139, 0.141141],
[0.046915, 0.030324, 0.150164],
[0.051644, 0.032474, 0.159254],
[0.056449, 0.034569, 0.168414],
[0.061340, 0.036590, 0.177642],
[0.066331, 0.038504, 0.186962],
[0.071429, 0.040294, 0.196354],
[0.076637, 0.041905, 0.205799],
[0.081962, 0.043328, 0.215289],
[0.087411, 0.044556, 0.224813],
[0.092990, 0.045583, 0.234358],
[0.098702, 0.046402, 0.243904],
[0.104551, 0.047008, 0.253430],
[0.110536, 0.047399, 0.262912],
[0.116656, 0.047574, 0.272321],
[0.122908, 0.047536, 0.281624],
[0.129285, 0.047293, 0.290788],
[0.135778, 0.046856, 0.299776],
[0.142378, 0.046242, 0.308553],
[0.149073, 0.045468, 0.317085],
[0.155850, 0.044559, 0.325338],
[0.162689, 0.043554, 0.333277],
[0.169575, 0.042489, 0.340874],
[0.176493, 0.041402, 0.348111],
[0.183429, 0.040329, 0.354971],
[0.190367, 0.039309, 0.361447],
[0.197297, 0.038400, 0.367535],
[0.204209, 0.037632, 0.373238],
[0.211095, 0.037030, 0.378563],
[0.217949, 0.036615, 0.383522],
[0.224763, 0.036405, 0.388129],
[0.231538, 0.036405, 0.392400],
[0.238273, 0.036621, 0.396353],
[0.244967, 0.037055, 0.400007],
[0.251620, 0.037705, 0.403378],
[0.258234, 0.038571, 0.406485],
[0.264810, 0.039647, 0.409345],
[0.271347, 0.040922, 0.411976],
[0.277850, 0.042353, 0.414392],
[0.284321, 0.043933, 0.416608],
[0.290763, 0.045644, 0.418637],
[0.297178, 0.047470, 0.420491],
[0.303568, 0.049396, 0.422182],
[0.309935, 0.051407, 0.423721],
[0.316282, 0.053490, 0.425116],
[0.322610, 0.055634, 0.426377],
[0.328921, 0.057827, 0.427511],
[0.335217, 0.060060, 0.428524],
[0.341500, 0.062325, 0.429425],
[0.347771, 0.064616, 0.430217],
[0.354032, 0.066925, 0.430906],
[0.360284, 0.069247, 0.431497],
[0.366529, 0.071579, 0.431994],
[0.372768, 0.073915, 0.432400],
[0.379001, 0.076253, 0.432719],
[0.385228, 0.078591, 0.432955],
[0.391453, 0.080927, 0.433109],
[0.397674, 0.083257, 0.433183],
[0.403894, 0.085580, 0.433179],
[0.410113, 0.087896, 0.433098],
[0.416331, 0.090203, 0.432943],
[0.422549, 0.092501, 0.432714],
[0.428768, 0.094790, 0.432412],
[0.434987, 0.097069, 0.432039],
[0.441207, 0.099338, 0.431594],
[0.447428, 0.101597, 0.431080],
[0.453651, 0.103848, 0.430498],
[0.459875, 0.106089, 0.429846],
[0.466100, 0.108322, 0.429125],
[0.472328, 0.110547, 0.428334],
[0.478558, 0.112764, 0.427475],
[0.484789, 0.114974, 0.426548],
[0.491022, 0.117179, 0.425552],
[0.497257, 0.119379, 0.424488],
[0.503493, 0.121575, 0.423356],
[0.509730, 0.123769, 0.422156],
[0.515967, 0.125960, 0.420887],
[0.522206, 0.128150, 0.419549],
[0.528444, 0.130341, 0.418142],
[0.534683, 0.132534, 0.416667],
[0.540920, 0.134729, 0.415123],
[0.547157, 0.136929, 0.413511],
[0.553392, 0.139134, 0.411829],
[0.559624, 0.141346, 0.410078],
[0.565854, 0.143567, 0.408258],
[0.572081, 0.145797, 0.406369],
[0.578304, 0.148039, 0.404411],
[0.584521, 0.150294, 0.402385],
[0.590734, 0.152563, 0.400290],
[0.596940, 0.154848, 0.398125],
[0.603139, 0.157151, 0.395891],
[0.609330, 0.159474, 0.393589],
[0.615513, 0.161817, 0.391219],
[0.621685, 0.164184, 0.388781],
[0.627847, 0.166575, 0.386276],
[0.633998, 0.168992, 0.383704],
[0.640135, 0.171438, 0.381065],
[0.646260, 0.173914, 0.378359],
[0.652369, 0.176421, 0.375586],
[0.658463, 0.178962, 0.372748],
[0.664540, 0.181539, 0.369846],
[0.670599, 0.184153, 0.366879],
[0.676638, 0.186807, 0.363849],
[0.682656, 0.189501, 0.360757],
[0.688653, 0.192239, 0.357603],
[0.694627, 0.195021, 0.354388],
[0.700576, 0.197851, 0.351113],
[0.706500, 0.200728, 0.347777],
[0.712396, 0.203656, 0.344383],
[0.718264, 0.206636, 0.340931],
[0.724103, 0.209670, 0.337424],
[0.729909, 0.212759, 0.333861],
[0.735683, 0.215906, 0.330245],
[0.741423, 0.219112, 0.326576],
[0.747127, 0.222378, 0.322856],
[0.752794, 0.225706, 0.319085],
[0.758422, 0.229097, 0.315266],
[0.764010, 0.232554, 0.311399],
[0.769556, 0.236077, 0.307485],
[0.775059, 0.239667, 0.303526],
[0.780517, 0.243327, 0.299523],
[0.785929, 0.247056, 0.295477],
[0.791293, 0.250856, 0.291390],
[0.796607, 0.254728, 0.287264],
[0.801871, 0.258674, 0.283099],
[0.807082, 0.262692, 0.278898],
[0.812239, 0.266786, 0.274661],
[0.817341, 0.270954, 0.270390],
[0.822386, 0.275197, 0.266085],
[0.827372, 0.279517, 0.261750],
[0.832299, 0.283913, 0.257383],
[0.837165, 0.288385, 0.252988],
[0.841969, 0.292933, 0.248564],
[0.846709, 0.297559, 0.244113],
[0.851384, 0.302260, 0.239636],
[0.855992, 0.307038, 0.235133],
[0.860533, 0.311892, 0.230606],
[0.865006, 0.316822, 0.226055],
[0.869409, 0.321827, 0.221482],
[0.873741, 0.326906, 0.216886],
[0.878001, 0.332060, 0.212268],
[0.882188, 0.337287, 0.207628],
[0.886302, 0.342586, 0.202968],
[0.890341, 0.347957, 0.198286],
[0.894305, 0.353399, 0.193584],
[0.898192, 0.358911, 0.188860],
[0.902003, 0.364492, 0.184116],
[0.905735, 0.370140, 0.179350],
[0.909390, 0.375856, 0.174563],
[0.912966, 0.381636, 0.169755],
[0.916462, 0.387481, 0.164924],
[0.919879, 0.393389, 0.160070],
[0.923215, 0.399359, 0.155193],
[0.926470, 0.405389, 0.150292],
[0.929644, 0.411479, 0.145367],
[0.932737, 0.417627, 0.140417],
[0.935747, 0.423831, 0.135440],
[0.938675, 0.430091, 0.130438],
[0.941521, 0.436405, 0.125409],
[0.944285, 0.442772, 0.120354],
[0.946965, 0.449191, 0.115272],
[0.949562, 0.455660, 0.110164],
[0.952075, 0.462178, 0.105031],
[0.954506, 0.468744, 0.099874],
[0.956852, 0.475356, 0.094695],
[0.959114, 0.482014, 0.089499],
[0.961293, 0.488716, 0.084289],
[0.963387, 0.495462, 0.079073],
[0.965397, 0.502249, 0.073859],
[0.967322, 0.509078, 0.068659],
[0.969163, 0.515946, 0.063488],
[0.970919, 0.522853, 0.058367],
[0.972590, 0.529798, 0.053324],
[0.974176, 0.536780, 0.048392],
[0.975677, 0.543798, 0.043618],
[0.977092, 0.550850, 0.039050],
[0.978422, 0.557937, 0.034931],
[0.979666, 0.565057, 0.031409],
[0.980824, 0.572209, 0.028508],
[0.981895, 0.579392, 0.026250],
[0.982881, 0.586606, 0.024661],
[0.983779, 0.593849, 0.023770],
[0.984591, 0.601122, 0.023606],
[0.985315, 0.608422, 0.024202],
[0.985952, 0.615750, 0.025592],
[0.986502, 0.623105, 0.027814],
[0.986964, 0.630485, 0.030908],
[0.987337, 0.637890, 0.034916],
[0.987622, 0.645320, 0.039886],
[0.987819, 0.652773, 0.045581],
[0.987926, 0.660250, 0.051750],
[0.987945, 0.667748, 0.058329],
[0.987874, 0.675267, 0.065257],
[0.987714, 0.682807, 0.072489],
[0.987464, 0.690366, 0.079990],
[0.987124, 0.697944, 0.087731],
[0.986694, 0.705540, 0.095694],
[0.986175, 0.713153, 0.103863],
[0.985566, 0.720782, 0.112229],
[0.984865, 0.728427, 0.120785],
[0.984075, 0.736087, 0.129527],
[0.983196, 0.743758, 0.138453],
[0.982228, 0.751442, 0.147565],
[0.981173, 0.759135, 0.156863],
[0.980032, 0.766837, 0.166353],
[0.978806, 0.774545, 0.176037],
[0.977497, 0.782258, 0.185923],
[0.976108, 0.789974, 0.196018],
[0.974638, 0.797692, 0.206332],
[0.973088, 0.805409, 0.216877],
[0.971468, 0.813122, 0.227658],
[0.969783, 0.820825, 0.238686],
[0.968041, 0.828515, 0.249972],
[0.966243, 0.836191, 0.261534],
[0.964394, 0.843848, 0.273391],
[0.962517, 0.851476, 0.285546],
[0.960626, 0.859069, 0.298010],
[0.958720, 0.866624, 0.310820],
[0.956834, 0.874129, 0.323974],
[0.954997, 0.881569, 0.337475],
[0.953215, 0.888942, 0.351369],
[0.951546, 0.896226, 0.365627],
[0.950018, 0.903409, 0.380271],
[0.948683, 0.910473, 0.395289],
[0.947594, 0.917399, 0.410665],
[0.946809, 0.924168, 0.426373],
[0.946392, 0.930761, 0.442367],
[0.946403, 0.937159, 0.458592],
[0.946903, 0.943348, 0.474970],
[0.947937, 0.949318, 0.491426],
[0.949545, 0.955063, 0.507860],
[0.951740, 0.960587, 0.524203],
[0.954529, 0.965896, 0.540361],
[0.957896, 0.971003, 0.556275],
[0.961812, 0.975924, 0.571925],
[0.966249, 0.980678, 0.587206],
[0.971162, 0.985282, 0.602154],
[0.976511, 0.989753, 0.616760],
[0.982257, 0.994109, 0.631017],
[0.988362, 0.998364, 0.644924]]
_plasma_data = [[0.050383, 0.029803, 0.527975],
[0.063536, 0.028426, 0.533124],
[0.075353, 0.027206, 0.538007],
[0.086222, 0.026125, 0.542658],
[0.096379, 0.025165, 0.547103],
[0.105980, 0.024309, 0.551368],
[0.115124, 0.023556, 0.555468],
[0.123903, 0.022878, 0.559423],
[0.132381, 0.022258, 0.563250],
[0.140603, 0.021687, 0.566959],
[0.148607, 0.021154, 0.570562],
[0.156421, 0.020651, 0.574065],
[0.164070, 0.020171, 0.577478],
[0.171574, 0.019706, 0.580806],
[0.178950, 0.019252, 0.584054],
[0.186213, 0.018803, 0.587228],
[0.193374, 0.018354, 0.590330],
[0.200445, 0.017902, 0.593364],
[0.207435, 0.017442, 0.596333],
[0.214350, 0.016973, 0.599239],
[0.221197, 0.016497, 0.602083],
[0.227983, 0.016007, 0.604867],
[0.234715, 0.015502, 0.607592],
[0.241396, 0.014979, 0.610259],
[0.248032, 0.014439, 0.612868],
[0.254627, 0.013882, 0.615419],
[0.261183, 0.013308, 0.617911],
[0.267703, 0.012716, 0.620346],
[0.274191, 0.012109, 0.622722],
[0.280648, 0.011488, 0.625038],
[0.287076, 0.010855, 0.627295],
[0.293478, 0.010213, 0.629490],
[0.299855, 0.009561, 0.631624],
[0.306210, 0.008902, 0.633694],
[0.312543, 0.008239, 0.635700],
[0.318856, 0.007576, 0.637640],
[0.325150, 0.006915, 0.639512],
[0.331426, 0.006261, 0.641316],
[0.337683, 0.005618, 0.643049],
[0.343925, 0.004991, 0.644710],
[0.350150, 0.004382, 0.646298],
[0.356359, 0.003798, 0.647810],
[0.362553, 0.003243, 0.649245],
[0.368733, 0.002724, 0.650601],
[0.374897, 0.002245, 0.651876],
[0.381047, 0.001814, 0.653068],
[0.387183, 0.001434, 0.654177],
[0.393304, 0.001114, 0.655199],
[0.399411, 0.000859, 0.656133],
[0.405503, 0.000678, 0.656977],
[0.411580, 0.000577, 0.657730],
[0.417642, 0.000564, 0.658390],
[0.423689, 0.000646, 0.658956],
[0.429719, 0.000831, 0.659425],
[0.435734, 0.001127, 0.659797],
[0.441732, 0.001540, 0.660069],
[0.447714, 0.002080, 0.660240],
[0.453677, 0.002755, 0.660310],
[0.459623, 0.003574, 0.660277],
[0.465550, 0.004545, 0.660139],
[0.471457, 0.005678, 0.659897],
[0.477344, 0.006980, 0.659549],
[0.483210, 0.008460, 0.659095],
[0.489055, 0.010127, 0.658534],
[0.494877, 0.011990, 0.657865],
[0.500678, 0.014055, 0.657088],
[0.506454, 0.016333, 0.656202],
[0.512206, 0.018833, 0.655209],
[0.517933, 0.021563, 0.654109],
[0.523633, 0.024532, 0.652901],
[0.529306, 0.027747, 0.651586],
[0.534952, 0.031217, 0.650165],
[0.540570, 0.034950, 0.648640],
[0.546157, 0.038954, 0.647010],
[0.551715, 0.043136, 0.645277],
[0.557243, 0.047331, 0.643443],
[0.562738, 0.051545, 0.641509],
[0.568201, 0.055778, 0.639477],
[0.573632, 0.060028, 0.637349],
[0.579029, 0.064296, 0.635126],
[0.584391, 0.068579, 0.632812],
[0.589719, 0.072878, 0.630408],
[0.595011, 0.077190, 0.627917],
[0.600266, 0.081516, 0.625342],
[0.605485, 0.085854, 0.622686],
[0.610667, 0.090204, 0.619951],
[0.615812, 0.094564, 0.617140],
[0.620919, 0.098934, 0.614257],
[0.625987, 0.103312, 0.611305],
[0.631017, 0.107699, 0.608287],
[0.636008, 0.112092, 0.605205],
[0.640959, 0.116492, 0.602065],
[0.645872, 0.120898, 0.598867],
[0.650746, 0.125309, 0.595617],
[0.655580, 0.129725, 0.592317],
[0.660374, 0.134144, 0.588971],
[0.665129, 0.138566, 0.585582],
[0.669845, 0.142992, 0.582154],
[0.674522, 0.147419, 0.578688],
[0.679160, 0.151848, 0.575189],
[0.683758, 0.156278, 0.571660],
[0.688318, 0.160709, 0.568103],
[0.692840, 0.165141, 0.564522],
[0.697324, 0.169573, 0.560919],
[0.701769, 0.174005, 0.557296],
[0.706178, 0.178437, 0.553657],
[0.710549, 0.182868, 0.550004],
[0.714883, 0.187299, 0.546338],
[0.719181, 0.191729, 0.542663],
[0.723444, 0.196158, 0.538981],
[0.727670, 0.200586, 0.535293],
[0.731862, 0.205013, 0.531601],
[0.736019, 0.209439, 0.527908],
[0.740143, 0.213864, 0.524216],
[0.744232, 0.218288, 0.520524],
[0.748289, 0.222711, 0.516834],
[0.752312, 0.227133, 0.513149],
[0.756304, 0.231555, 0.509468],
[0.760264, 0.235976, 0.505794],
[0.764193, 0.240396, 0.502126],
[0.768090, 0.244817, 0.498465],
[0.771958, 0.249237, 0.494813],
[0.775796, 0.253658, 0.491171],
[0.779604, 0.258078, 0.487539],
[0.783383, 0.262500, 0.483918],
[0.787133, 0.266922, 0.480307],
[0.790855, 0.271345, 0.476706],
[0.794549, 0.275770, 0.473117],
[0.798216, 0.280197, 0.469538],
[0.801855, 0.284626, 0.465971],
[0.805467, 0.289057, 0.462415],
[0.809052, 0.293491, 0.458870],
[0.812612, 0.297928, 0.455338],
[0.816144, 0.302368, 0.451816],
[0.819651, 0.306812, 0.448306],
[0.823132, 0.311261, 0.444806],
[0.826588, 0.315714, 0.441316],
[0.830018, 0.320172, 0.437836],
[0.833422, 0.324635, 0.434366],
[0.836801, 0.329105, 0.430905],
[0.840155, 0.333580, 0.427455],
[0.843484, 0.338062, 0.424013],
[0.846788, 0.342551, 0.420579],
[0.850066, 0.347048, 0.417153],
[0.853319, 0.351553, 0.413734],
[0.856547, 0.356066, 0.410322],
[0.859750, 0.360588, 0.406917],
[0.862927, 0.365119, 0.403519],
[0.866078, 0.369660, 0.400126],
[0.869203, 0.374212, 0.396738],
[0.872303, 0.378774, 0.393355],
[0.875376, 0.383347, 0.389976],
[0.878423, 0.387932, 0.386600],
[0.881443, 0.392529, 0.383229],
[0.884436, 0.397139, 0.379860],
[0.887402, 0.401762, 0.376494],
[0.890340, 0.406398, 0.373130],
[0.893250, 0.411048, 0.369768],
[0.896131, 0.415712, 0.366407],
[0.898984, 0.420392, 0.363047],
[0.901807, 0.425087, 0.359688],
[0.904601, 0.429797, 0.356329],
[0.907365, 0.434524, 0.352970],
[0.910098, 0.439268, 0.349610],
[0.912800, 0.444029, 0.346251],
[0.915471, 0.448807, 0.342890],
[0.918109, 0.453603, 0.339529],
[0.920714, 0.458417, 0.336166],
[0.923287, 0.463251, 0.332801],
[0.925825, 0.468103, 0.329435],
[0.928329, 0.472975, 0.326067],
[0.930798, 0.477867, 0.322697],
[0.933232, 0.482780, 0.319325],
[0.935630, 0.487712, 0.315952],
[0.937990, 0.492667, 0.312575],
[0.940313, 0.497642, 0.309197],
[0.942598, 0.502639, 0.305816],
[0.944844, 0.507658, 0.302433],
[0.947051, 0.512699, 0.299049],
[0.949217, 0.517763, 0.295662],
[0.951344, 0.522850, 0.292275],
[0.953428, 0.527960, 0.288883],
[0.955470, 0.533093, 0.285490],
[0.957469, 0.538250, 0.282096],
[0.959424, 0.543431, 0.278701],
[0.961336, 0.548636, 0.275305],
[0.963203, 0.553865, 0.271909],
[0.965024, 0.559118, 0.268513],
[0.966798, 0.564396, 0.265118],
[0.968526, 0.569700, 0.261721],
[0.970205, 0.575028, 0.258325],
[0.971835, 0.580382, 0.254931],
[0.973416, 0.585761, 0.251540],
[0.974947, 0.591165, 0.248151],
[0.976428, 0.596595, 0.244767],
[0.977856, 0.602051, 0.241387],
[0.979233, 0.607532, 0.238013],
[0.980556, 0.613039, 0.234646],
[0.981826, 0.618572, 0.231287],
[0.983041, 0.624131, 0.227937],
[0.984199, 0.629718, 0.224595],
[0.985301, 0.635330, 0.221265],
[0.986345, 0.640969, 0.217948],
[0.987332, 0.646633, 0.214648],
[0.988260, 0.652325, 0.211364],
[0.989128, 0.658043, 0.208100],
[0.989935, 0.663787, 0.204859],
[0.990681, 0.669558, 0.201642],
[0.991365, 0.675355, 0.198453],
[0.991985, 0.681179, 0.195295],
[0.992541, 0.687030, 0.192170],
[0.993032, 0.692907, 0.189084],
[0.993456, 0.698810, 0.186041],
[0.993814, 0.704741, 0.183043],
[0.994103, 0.710698, 0.180097],
[0.994324, 0.716681, 0.177208],
[0.994474, 0.722691, 0.174381],
[0.994553, 0.728728, 0.171622],
[0.994561, 0.734791, 0.168938],
[0.994495, 0.740880, 0.166335],
[0.994355, 0.746995, 0.163821],
[0.994141, 0.753137, 0.161404],
[0.993851, 0.759304, 0.159092],
[0.993482, 0.765499, 0.156891],
[0.993033, 0.771720, 0.154808],
[0.992505, 0.777967, 0.152855],
[0.991897, 0.784239, 0.151042],
[0.991209, 0.790537, 0.149377],
[0.990439, 0.796859, 0.147870],
[0.989587, 0.803205, 0.146529],
[0.988648, 0.809579, 0.145357],
[0.987621, 0.815978, 0.144363],
[0.986509, 0.822401, 0.143557],
[0.985314, 0.828846, 0.142945],
[0.984031, 0.835315, 0.142528],
[0.982653, 0.841812, 0.142303],
[0.981190, 0.848329, 0.142279],
[0.979644, 0.854866, 0.142453],
[0.977995, 0.861432, 0.142808],
[0.976265, 0.868016, 0.143351],
[0.974443, 0.874622, 0.144061],
[0.972530, 0.881250, 0.144923],
[0.970533, 0.887896, 0.145919],
[0.968443, 0.894564, 0.147014],
[0.966271, 0.901249, 0.148180],
[0.964021, 0.907950, 0.149370],
[0.961681, 0.914672, 0.150520],
[0.959276, 0.921407, 0.151566],
[0.956808, 0.928152, 0.152409],
[0.954287, 0.934908, 0.152921],
[0.951726, 0.941671, 0.152925],
[0.949151, 0.948435, 0.152178],
[0.946602, 0.955190, 0.150328],
[0.944152, 0.961916, 0.146861],
[0.941896, 0.968590, 0.140956],
[0.940015, 0.975158, 0.131326]]
_viridis_data = [[0.267004, 0.004874, 0.329415],
[0.268510, 0.009605, 0.335427],
[0.269944, 0.014625, 0.341379],
[0.271305, 0.019942, 0.347269],
[0.272594, 0.025563, 0.353093],
[0.273809, 0.031497, 0.358853],
[0.274952, 0.037752, 0.364543],
[0.276022, 0.044167, 0.370164],
[0.277018, 0.050344, 0.375715],
[0.277941, 0.056324, 0.381191],
[0.278791, 0.062145, 0.386592],
[0.279566, 0.067836, 0.391917],
[0.280267, 0.073417, 0.397163],
[0.280894, 0.078907, 0.402329],
[0.281446, 0.084320, 0.407414],
[0.281924, 0.089666, 0.412415],
[0.282327, 0.094955, 0.417331],
[0.282656, 0.100196, 0.422160],
[0.282910, 0.105393, 0.426902],
[0.283091, 0.110553, 0.431554],
[0.283197, 0.115680, 0.436115],
[0.283229, 0.120777, 0.440584],
[0.283187, 0.125848, 0.444960],
[0.283072, 0.130895, 0.449241],
[0.282884, 0.135920, 0.453427],
[0.282623, 0.140926, 0.457517],
[0.282290, 0.145912, 0.461510],
[0.281887, 0.150881, 0.465405],
[0.281412, 0.155834, 0.469201],
[0.280868, 0.160771, 0.472899],
[0.280255, 0.165693, 0.476498],
[0.279574, 0.170599, 0.479997],
[0.278826, 0.175490, 0.483397],
[0.278012, 0.180367, 0.486697],
[0.277134, 0.185228, 0.489898],
[0.276194, 0.190074, 0.493001],
[0.275191, 0.194905, 0.496005],
[0.274128, 0.199721, 0.498911],
[0.273006, 0.204520, 0.501721],
[0.271828, 0.209303, 0.504434],
[0.270595, 0.214069, 0.507052],
[0.269308, 0.218818, 0.509577],
[0.267968, 0.223549, 0.512008],
[0.266580, 0.228262, 0.514349],
[0.265145, 0.232956, 0.516599],
[0.263663, 0.237631, 0.518762],
[0.262138, 0.242286, 0.520837],
[0.260571, 0.246922, 0.522828],
[0.258965, 0.251537, 0.524736],
[0.257322, 0.256130, 0.526563],
[0.255645, 0.260703, 0.528312],
[0.253935, 0.265254, 0.529983],
[0.252194, 0.269783, 0.531579],
[0.250425, 0.274290, 0.533103],
[0.248629, 0.278775, 0.534556],
[0.246811, 0.283237, 0.535941],
[0.244972, 0.287675, 0.537260],
[0.243113, 0.292092, 0.538516],
[0.241237, 0.296485, 0.539709],
[0.239346, 0.300855, 0.540844],
[0.237441, 0.305202, 0.541921],
[0.235526, 0.309527, 0.542944],
[0.233603, 0.313828, 0.543914],
[0.231674, 0.318106, 0.544834],
[0.229739, 0.322361, 0.545706],
[0.227802, 0.326594, 0.546532],
[0.225863, 0.330805, 0.547314],
[0.223925, 0.334994, 0.548053],
[0.221989, 0.339161, 0.548752],
[0.220057, 0.343307, 0.549413],
[0.218130, 0.347432, 0.550038],
[0.216210, 0.351535, 0.550627],
[0.214298, 0.355619, 0.551184],
[0.212395, 0.359683, 0.551710],
[0.210503, 0.363727, 0.552206],
[0.208623, 0.367752, 0.552675],
[0.206756, 0.371758, 0.553117],
[0.204903, 0.375746, 0.553533],
[0.203063, 0.379716, 0.553925],
[0.201239, 0.383670, 0.554294],
[0.199430, 0.387607, 0.554642],
[0.197636, 0.391528, 0.554969],
[0.195860, 0.395433, 0.555276],
[0.194100, 0.399323, 0.555565],
[0.192357, 0.403199, 0.555836],
[0.190631, 0.407061, 0.556089],
[0.188923, 0.410910, 0.556326],
[0.187231, 0.414746, 0.556547],
[0.185556, 0.418570, 0.556753],
[0.183898, 0.422383, 0.556944],
[0.182256, 0.426184, 0.557120],
[0.180629, 0.429975, 0.557282],
[0.179019, 0.433756, 0.557430],
[0.177423, 0.437527, 0.557565],
[0.175841, 0.441290, 0.557685],
[0.174274, 0.445044, 0.557792],
[0.172719, 0.448791, 0.557885],
[0.171176, 0.452530, 0.557965],
[0.169646, 0.456262, 0.558030],
[0.168126, 0.459988, 0.558082],
[0.166617, 0.463708, 0.558119],
[0.165117, 0.467423, 0.558141],
[0.163625, 0.471133, 0.558148],
[0.162142, 0.474838, 0.558140],
[0.160665, 0.478540, 0.558115],
[0.159194, 0.482237, 0.558073],
[0.157729, 0.485932, 0.558013],
[0.156270, 0.489624, 0.557936],
[0.154815, 0.493313, 0.557840],
[0.153364, 0.497000, 0.557724],
[0.151918, 0.500685, 0.557587],
[0.150476, 0.504369, 0.557430],
[0.149039, 0.508051, 0.557250],
[0.147607, 0.511733, 0.557049],
[0.146180, 0.515413, 0.556823],
[0.144759, 0.519093, 0.556572],
[0.143343, 0.522773, 0.556295],
[0.141935, 0.526453, 0.555991],
[0.140536, 0.530132, 0.555659],
[0.139147, 0.533812, 0.555298],
[0.137770, 0.537492, 0.554906],
[0.136408, 0.541173, 0.554483],
[0.135066, 0.544853, 0.554029],
[0.133743, 0.548535, 0.553541],
[0.132444, 0.552216, 0.553018],
[0.131172, 0.555899, 0.552459],
[0.129933, 0.559582, 0.551864],
[0.128729, 0.563265, 0.551229],
[0.127568, 0.566949, 0.550556],
[0.126453, 0.570633, 0.549841],
[0.125394, 0.574318, 0.549086],
[0.124395, 0.578002, 0.548287],
[0.123463, 0.581687, 0.547445],
[0.122606, 0.585371, 0.546557],
[0.121831, 0.589055, 0.545623],
[0.121148, 0.592739, 0.544641],
[0.120565, 0.596422, 0.543611],
[0.120092, 0.600104, 0.542530],
[0.119738, 0.603785, 0.541400],
[0.119512, 0.607464, 0.540218],
[0.119423, 0.611141, 0.538982],
[0.119483, 0.614817, 0.537692],
[0.119699, 0.618490, 0.536347],
[0.120081, 0.622161, 0.534946],
[0.120638, 0.625828, 0.533488],
[0.121380, 0.629492, 0.531973],
[0.122312, 0.633153, 0.530398],
[0.123444, 0.636809, 0.528763],
[0.124780, 0.640461, 0.527068],
[0.126326, 0.644107, 0.525311],
[0.128087, 0.647749, 0.523491],
[0.130067, 0.651384, 0.521608],
[0.132268, 0.655014, 0.519661],
[0.134692, 0.658636, 0.517649],
[0.137339, 0.662252, 0.515571],
[0.140210, 0.665859, 0.513427],
[0.143303, 0.669459, 0.511215],
[0.146616, 0.673050, 0.508936],
[0.150148, 0.676631, 0.506589],
[0.153894, 0.680203, 0.504172],
[0.157851, 0.683765, 0.501686],
[0.162016, 0.687316, 0.499129],
[0.166383, 0.690856, 0.496502],
[0.170948, 0.694384, 0.493803],
[0.175707, 0.697900, 0.491033],
[0.180653, 0.701402, 0.488189],
[0.185783, 0.704891, 0.485273],
[0.191090, 0.708366, 0.482284],
[0.196571, 0.711827, 0.479221],
[0.202219, 0.715272, 0.476084],
[0.208030, 0.718701, 0.472873],
[0.214000, 0.722114, 0.469588],
[0.220124, 0.725509, 0.466226],
[0.226397, 0.728888, 0.462789],
[0.232815, 0.732247, 0.459277],
[0.239374, 0.735588, 0.455688],
[0.246070, 0.738910, 0.452024],
[0.252899, 0.742211, 0.448284],
[0.259857, 0.745492, 0.444467],
[0.266941, 0.748751, 0.440573],
[0.274149, 0.751988, 0.436601],
[0.281477, 0.755203, 0.432552],
[0.288921, 0.758394, 0.428426],
[0.296479, 0.761561, 0.424223],
[0.304148, 0.764704, 0.419943],
[0.311925, 0.767822, 0.415586],
[0.319809, 0.770914, 0.411152],
[0.327796, 0.773980, 0.406640],
[0.335885, 0.777018, 0.402049],
[0.344074, 0.780029, 0.397381],
[0.352360, 0.783011, 0.392636],
[0.360741, 0.785964, 0.387814],
[0.369214, 0.788888, 0.382914],
[0.377779, 0.791781, 0.377939],
[0.386433, 0.794644, 0.372886],
[0.395174, 0.797475, 0.367757],
[0.404001, 0.800275, 0.362552],
[0.412913, 0.803041, 0.357269],
[0.421908, 0.805774, 0.351910],
[0.430983, 0.808473, 0.346476],
[0.440137, 0.811138, 0.340967],
[0.449368, 0.813768, 0.335384],
[0.458674, 0.816363, 0.329727],
[0.468053, 0.818921, 0.323998],
[0.477504, 0.821444, 0.318195],
[0.487026, 0.823929, 0.312321],
[0.496615, 0.826376, 0.306377],
[0.506271, 0.828786, 0.300362],
[0.515992, 0.831158, 0.294279],
[0.525776, 0.833491, 0.288127],
[0.535621, 0.835785, 0.281908],
[0.545524, 0.838039, 0.275626],
[0.555484, 0.840254, 0.269281],
[0.565498, 0.842430, 0.262877],
[0.575563, 0.844566, 0.256415],
[0.585678, 0.846661, 0.249897],
[0.595839, 0.848717, 0.243329],
[0.606045, 0.850733, 0.236712],
[0.616293, 0.852709, 0.230052],
[0.626579, 0.854645, 0.223353],
[0.636902, 0.856542, 0.216620],
[0.647257, 0.858400, 0.209861],
[0.657642, 0.860219, 0.203082],
[0.668054, 0.861999, 0.196293],
[0.678489, 0.863742, 0.189503],
[0.688944, 0.865448, 0.182725],
[0.699415, 0.867117, 0.175971],
[0.709898, 0.868751, 0.169257],
[0.720391, 0.870350, 0.162603],
[0.730889, 0.871916, 0.156029],
[0.741388, 0.873449, 0.149561],
[0.751884, 0.874951, 0.143228],
[0.762373, 0.876424, 0.137064],
[0.772852, 0.877868, 0.131109],
[0.783315, 0.879285, 0.125405],
[0.793760, 0.880678, 0.120005],
[0.804182, 0.882046, 0.114965],
[0.814576, 0.883393, 0.110347],
[0.824940, 0.884720, 0.106217],
[0.835270, 0.886029, 0.102646],
[0.845561, 0.887322, 0.099702],
[0.855810, 0.888601, 0.097452],
[0.866013, 0.889868, 0.095953],
[0.876168, 0.891125, 0.095250],
[0.886271, 0.892374, 0.095374],
[0.896320, 0.893616, 0.096335],
[0.906311, 0.894855, 0.098125],
[0.916242, 0.896091, 0.100717],
[0.926106, 0.897330, 0.104071],
[0.935904, 0.898570, 0.108131],
[0.945636, 0.899815, 0.112838],
[0.955300, 0.901065, 0.118128],
[0.964894, 0.902323, 0.123941],
[0.974417, 0.903590, 0.130215],
[0.983868, 0.904867, 0.136897],
[0.993248, 0.906157, 0.143936]]
from matplotlib.colors import ListedColormap
cmaps = {}
for (name, data) in (('magma', _magma_data),
('inferno', _inferno_data),
('plasma', _plasma_data),
('viridis', _viridis_data)):
cmaps[name] = ListedColormap(data, name=name)
magma = cmaps['magma']
inferno = cmaps['inferno']
plasma = cmaps['plasma']
viridis = cmaps['viridis']
| mit |
grow/grow | grow/translators/base.py | 1 | 15038 | """Base translators for translating Grow content."""
import copy
import json
import logging
import os
import threading
import progressbar
import texttable
import yaml
from protorpc import message_types
from protorpc import messages
from protorpc import protojson
from grow.common import progressbar_non
from grow.common import utils
from grow.translators import errors as translator_errors
class TranslatorStat(messages.Message):
lang = messages.StringField(1)
num_words = messages.IntegerField(2)
num_words_translated = messages.IntegerField(3)
source_lang = messages.StringField(4)
ident = messages.StringField(5)
url = messages.StringField(6)
uploaded = message_types.DateTimeField(7)
service = messages.StringField(8)
downloaded = message_types.DateTimeField(9)
def translator_stat_representer(dumper, stat):
content = json.loads(protojson.encode_message(stat))
content.pop('lang') # Exclude from serialization.
return dumper.represent_mapping('tag:yaml.org,2002:map', content)
yaml.SafeDumper.add_representer(TranslatorStat,
translator_stat_representer)
class TranslatorServiceError(Exception):
def __init__(self, message, ident=None, locale=None):
if locale:
new_message = 'Error for locale "{}" -> {}'.format(locale, message)
elif ident:
new_message = 'Error for resource "{}" -> {}'.format(
ident, message)
else:
new_message = message
self.message = new_message
super(TranslatorServiceError, self).__init__(new_message)
class Translator(object):
TRANSLATOR_STATS_PATH = '/translators.yaml'
KIND = None
has_immutable_translation_resources = False
has_multiple_langs_in_one_resource = False
def __init__(self, pod, config=None, project_title=None, instructions=None):
self.pod = pod
self.config = config or {}
self.project_title = project_title or 'Untitled Grow Website'
self.instructions = instructions
def _cleanup_locales(self, locales):
"""Certain locales should be ignored."""
clean_locales = []
default_locale = self.pod.podspec.default_locale
skipped = {
'symlink': set(),
'po': set(),
}
for locale in locales:
locale_path = os.path.join('translations', str(locale))
# Silently ignore the default locale.
if default_locale and str(locale) == str(default_locale):
continue
# Ignore the symlinks.
if os.path.islink(locale_path):
skipped['symlink'].add(str(locale))
continue
# Ignore the locales without a `.PO` file.
po_path = os.path.join(locale_path, 'LC_MESSAGES', 'messages.po')
if not self.pod.file_exists(po_path):
skipped['po'].add(str(locale))
continue
clean_locales.append(locale)
# Summary of skipped files.
if skipped['symlink']:
self.pod.logger.info('Skipping: {} (symlinked)'.format(
', '.join(sorted(skipped['symlink']))))
if skipped['po']:
self.pod.logger.info('Skipping: {} (no `.po` file)'.format(
', '.join(sorted(skipped['po']))))
return clean_locales
def _download_content(self, stat):
raise NotImplementedError
def _log_catalog_changes(self, unchanged_locales, changed_locales):
if unchanged_locales:
self.pod.logger.info('No translations updated: {}'.format(
', '.join(sorted(unchanged_locales))))
if changed_locales:
for locale, value in changed_locales.items():
self.pod.logger.info('Updated {} of {} translations: {}'.format(
value['imported'], value['total'], locale))
def _upload_catalog(self, catalog, source_lang, prune):
raise NotImplementedError
def _upload_catalogs(self, catalogs, source_lang, prune=False):
raise NotImplementedError
def _update_acl(self, stat, locale):
raise NotImplementedError
def _update_acls(self, stat, locales):
raise NotImplementedError
def _update_meta(self, stat, locale, catalog):
raise NotImplementedError
def needs_meta_update(self):
"""Allow to be flagged for additional meta update after uploading."""
return False
def _get_stats_to_download(self, locales):
# 'stats' maps the service name to a mapping of languages to stats.
if not self.pod.file_exists(Translator.TRANSLATOR_STATS_PATH):
return {}
stats = self.pod.read_yaml(Translator.TRANSLATOR_STATS_PATH)
if self.KIND not in stats:
self.pod.logger.info(
'Nothing found to download from {}'.format(self.KIND))
return {}
stats_to_download = stats[self.KIND]
if locales:
stats_to_download = dict([(lang, stat)
for (lang, stat) in stats_to_download.items()
if lang in locales])
for lang, stat in stats_to_download.items():
if isinstance(stat, TranslatorStat):
stat = json.loads(protojson.encode_message(stat))
stat['lang'] = lang
stat_message = protojson.decode_message(TranslatorStat, json.dumps(stat))
stats_to_download[lang] = stat_message
return stats_to_download
def download(self, locales, save_stats=True, include_obsolete=False):
# TODO: Rename to `download_and_import`.
if not self.pod.file_exists(Translator.TRANSLATOR_STATS_PATH):
text = 'File {} not found. Nothing to download.'
self.pod.logger.info(text.format(Translator.TRANSLATOR_STATS_PATH))
return
stats_to_download = self._get_stats_to_download(locales)
if not stats_to_download:
return
num_files = len(stats_to_download)
text = 'Downloading translations: %(value)d/{} (in %(time_elapsed).9s)'
widgets = [progressbar.FormatLabel(text.format(num_files))]
bar = progressbar_non.create_progressbar(
"Downloading translations...", widgets=widgets, max_value=num_files)
bar.start()
threads = []
langs_to_translations = {}
new_stats = []
def _do_download(lang, stat):
try:
new_stat, content = self._download_content(stat)
except translator_errors.NotFoundError:
text = 'No translations to download for: {}'
self.pod.logger.info(text.format(lang))
return
new_stat.uploaded = stat.uploaded # Preserve uploaded field.
langs_to_translations[lang] = content
new_stats.append(new_stat)
for i, (lang, stat) in enumerate(stats_to_download.items()):
thread = utils.ProgressBarThread(
bar, True, target=_do_download, args=(lang, stat))
threads.append(thread)
thread.start()
# Perform the first operation synchronously to avoid oauth2 refresh
# locking issues.
if i == 0:
thread.join()
for i, thread in enumerate(threads):
if i > 0:
thread.join()
bar.finish()
has_changed_content = False
unchanged_locales = []
changed_locales = {}
for lang, translations in langs_to_translations.items():
has_changed_content, imported_translations, total_translations = self.pod.catalogs.import_translations(
locale=lang, content=translations,
include_obsolete=include_obsolete)
if imported_translations == 0:
unchanged_locales.append(lang)
else:
changed_locales[lang] = {
'imported': imported_translations,
'total': total_translations,
}
if has_changed_content:
has_changed_content = True
if save_stats and has_changed_content:
self.save_stats(new_stats)
self._log_catalog_changes(unchanged_locales, changed_locales)
return new_stats
def update_acl(self, locales=None):
locales = locales or self.pod.catalogs.list_locales()
locales = self._cleanup_locales(locales)
if not locales:
self.pod.logger.info('No locales to found to update.')
return
stats_to_download = self._get_stats_to_download(locales)
if not stats_to_download:
self.pod.logger.info('No documents found to update.')
return
if self.has_multiple_langs_in_one_resource:
self._update_acls(stats_to_download, locales)
stat = list(stats_to_download.values())[0]
self.pod.logger.info('ACL updated -> {}'.format(stat.ident))
return
threads = []
for i, (locale, stat) in enumerate(stats_to_download.items()):
thread = threading.Thread(
target=self._update_acl, args=(stat, locale))
threads.append(thread)
thread.start()
if i == 0:
thread.join()
self.pod.logger.info(
'ACL updated ({}): {}'.format(stat.lang, stat.url))
for i, thread in enumerate(threads):
if i > 0:
thread.join()
def update_meta(self, locales=None):
locales = locales or self.pod.catalogs.list_locales()
locales = self._cleanup_locales(locales)
if not locales:
self.pod.logger.info('No locales to found to update.')
return
stats_to_download = self._get_stats_to_download(locales)
if not stats_to_download:
self.pod.logger.info('No documents found to update.')
return
threads = []
for i, (locale, stat) in enumerate(stats_to_download.items()):
catalog_for_meta = self.pod.catalogs.get(locale)
thread = threading.Thread(
target=self._update_meta, args=(stat, locale, catalog_for_meta))
threads.append(thread)
thread.start()
if i == 0:
thread.join()
self.pod.logger.info('Meta information updated ({}): {}'.format(
stat.lang, stat.url))
for i, thread in enumerate(threads):
if i > 0:
thread.join()
def upload(self, locales=None, force=True, verbose=False, save_stats=True,
prune=False):
source_lang = self.pod.podspec.default_locale
locales = locales or self.pod.catalogs.list_locales()
locales = self._cleanup_locales(locales)
stats = []
num_files = len(locales)
if not locales:
self.pod.logger.info('No locales to upload.')
return
if not force:
if (self.has_immutable_translation_resources
and self.pod.file_exists(Translator.TRANSLATOR_STATS_PATH)):
text = 'Found existing translator data in: {}'
self.pod.logger.info(text.format(
Translator.TRANSLATOR_STATS_PATH))
text = 'This will be updated with new data after the upload is complete.'
self.pod.logger.info(text)
text = 'Proceed to upload {} translation catalogs?'
text = text.format(num_files)
if not utils.interactive_confirm(text):
self.pod.logger.info('Aborted.')
return
if self.has_multiple_langs_in_one_resource:
catalogs_to_upload = []
for locale in locales:
catalog_to_upload = self.pod.catalogs.get(locale)
if catalog_to_upload:
catalogs_to_upload.append(catalog_to_upload)
stats = self._upload_catalogs(catalogs_to_upload, source_lang,
prune=prune)
else:
text = 'Uploading translations: %(value)d/{} (in %(time_elapsed).9s)'
widgets = [progressbar.FormatLabel(text.format(num_files))]
bar = progressbar_non.create_progressbar(
"Uploading translations...", widgets=widgets, max_value=num_files)
bar.start()
threads = []
def _do_upload(locale):
catalog = self.pod.catalogs.get(locale)
stat = self._upload_catalog(catalog, source_lang, prune=prune)
stats.append(stat)
for i, locale in enumerate(locales):
thread = utils.ProgressBarThread(
bar, True, target=_do_upload, args=(locale,))
threads.append(thread)
thread.start()
# Perform the first operation synchronously to avoid oauth2 refresh
# locking issues.
if i == 0:
thread.join()
for i, thread in enumerate(threads):
if i > 0:
thread.join()
bar.finish()
stats = sorted(stats, key=lambda stat: stat.lang)
if verbose:
self.pretty_print_stats(stats)
if save_stats:
self.save_stats(stats)
return stats
def save_stats(self, stats):
"""Merges a list of stats into the translator stats file."""
if self.pod.file_exists(Translator.TRANSLATOR_STATS_PATH):
content = self.pod.read_yaml(Translator.TRANSLATOR_STATS_PATH)
create = False
else:
content = {}
create = True
if self.KIND not in content:
content[self.KIND] = {}
for stat in copy.deepcopy(stats):
content[self.KIND][stat.lang] = stat
yaml_content = yaml.safe_dump(content, default_flow_style=False)
self.pod.write_file(Translator.TRANSLATOR_STATS_PATH, yaml_content)
if create:
self.pod.logger.info('Saved: {}'.format(
Translator.TRANSLATOR_STATS_PATH))
else:
self.pod.logger.info('Updated: {}'.format(
Translator.TRANSLATOR_STATS_PATH))
@classmethod
def pretty_print_stats(cls, stats):
table = texttable.Texttable(max_width=0)
table.set_deco(texttable.Texttable.HEADER)
rows = []
rows.append(['Language', 'URL', 'Wordcount'])
for stat in stats:
rows.append([stat.lang, stat.url, stat.num_words or '--'])
table.add_rows(rows)
logging.info('\n' + table.draw() + '\n')
def get_edit_url(self, doc):
if not doc.locale:
return
stats = self._get_stats_to_download([doc.locale])
if doc.locale not in stats:
return
stat = stats[doc.locale]
return stat.url
| mit |
spottradingllc/zoom | test/entities/test_application_state.py | 1 | 1899 | from unittest import TestCase
from zoom.www.entities.application_state import ApplicationState
class TestApplicationState(TestCase):
def setUp(self):
self.state = ApplicationState(application_name="1",
configuration_path="2",
application_status="3",
application_host=None,
last_update=1388556000,
start_stop_time="6",
error_state="7",
delete="8",
local_mode="9",
login_user="10",
last_command="12",
pd_disabled=False,
grayed=True,
read_only=True,
load_times=1,
restart_count=0,
platform=0)
def test_to_dictionary(self):
self.assertEquals(
{
'application_name': "1",
'configuration_path': "2",
'application_status': "unknown",
'application_host': "",
'last_update': '2014-01-01 00:00:00',
'start_stop_time': "6",
'error_state': "7",
'delete': "8",
'local_mode': "9",
'login_user': "10",
'last_command': "12",
'pd_disabled': False,
'grayed': True,
'read_only': True,
'load_times': 1,
'restart_count': 0,
'platform': 0
},
self.state.to_dictionary()
)
| gpl-2.0 |
xliux/chess | third_party/gtest-1.7.0/test/gtest_shuffle_test.py | 3023 | 12549 | #!/usr/bin/env python
#
# Copyright 2009 Google Inc. All Rights Reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Verifies that test shuffling works."""
__author__ = '[email protected] (Zhanyong Wan)'
import os
import gtest_test_utils
# Command to run the gtest_shuffle_test_ program.
COMMAND = gtest_test_utils.GetTestExecutablePath('gtest_shuffle_test_')
# The environment variables for test sharding.
TOTAL_SHARDS_ENV_VAR = 'GTEST_TOTAL_SHARDS'
SHARD_INDEX_ENV_VAR = 'GTEST_SHARD_INDEX'
TEST_FILTER = 'A*.A:A*.B:C*'
ALL_TESTS = []
ACTIVE_TESTS = []
FILTERED_TESTS = []
SHARDED_TESTS = []
SHUFFLED_ALL_TESTS = []
SHUFFLED_ACTIVE_TESTS = []
SHUFFLED_FILTERED_TESTS = []
SHUFFLED_SHARDED_TESTS = []
def AlsoRunDisabledTestsFlag():
return '--gtest_also_run_disabled_tests'
def FilterFlag(test_filter):
return '--gtest_filter=%s' % (test_filter,)
def RepeatFlag(n):
return '--gtest_repeat=%s' % (n,)
def ShuffleFlag():
return '--gtest_shuffle'
def RandomSeedFlag(n):
return '--gtest_random_seed=%s' % (n,)
def RunAndReturnOutput(extra_env, args):
"""Runs the test program and returns its output."""
environ_copy = os.environ.copy()
environ_copy.update(extra_env)
return gtest_test_utils.Subprocess([COMMAND] + args, env=environ_copy).output
def GetTestsForAllIterations(extra_env, args):
"""Runs the test program and returns a list of test lists.
Args:
extra_env: a map from environment variables to their values
args: command line flags to pass to gtest_shuffle_test_
Returns:
A list where the i-th element is the list of tests run in the i-th
test iteration.
"""
test_iterations = []
for line in RunAndReturnOutput(extra_env, args).split('\n'):
if line.startswith('----'):
tests = []
test_iterations.append(tests)
elif line.strip():
tests.append(line.strip()) # 'TestCaseName.TestName'
return test_iterations
def GetTestCases(tests):
"""Returns a list of test cases in the given full test names.
Args:
tests: a list of full test names
Returns:
A list of test cases from 'tests', in their original order.
Consecutive duplicates are removed.
"""
test_cases = []
for test in tests:
test_case = test.split('.')[0]
if not test_case in test_cases:
test_cases.append(test_case)
return test_cases
def CalculateTestLists():
"""Calculates the list of tests run under different flags."""
if not ALL_TESTS:
ALL_TESTS.extend(
GetTestsForAllIterations({}, [AlsoRunDisabledTestsFlag()])[0])
if not ACTIVE_TESTS:
ACTIVE_TESTS.extend(GetTestsForAllIterations({}, [])[0])
if not FILTERED_TESTS:
FILTERED_TESTS.extend(
GetTestsForAllIterations({}, [FilterFlag(TEST_FILTER)])[0])
if not SHARDED_TESTS:
SHARDED_TESTS.extend(
GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',
SHARD_INDEX_ENV_VAR: '1'},
[])[0])
if not SHUFFLED_ALL_TESTS:
SHUFFLED_ALL_TESTS.extend(GetTestsForAllIterations(
{}, [AlsoRunDisabledTestsFlag(), ShuffleFlag(), RandomSeedFlag(1)])[0])
if not SHUFFLED_ACTIVE_TESTS:
SHUFFLED_ACTIVE_TESTS.extend(GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(1)])[0])
if not SHUFFLED_FILTERED_TESTS:
SHUFFLED_FILTERED_TESTS.extend(GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(1), FilterFlag(TEST_FILTER)])[0])
if not SHUFFLED_SHARDED_TESTS:
SHUFFLED_SHARDED_TESTS.extend(
GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',
SHARD_INDEX_ENV_VAR: '1'},
[ShuffleFlag(), RandomSeedFlag(1)])[0])
class GTestShuffleUnitTest(gtest_test_utils.TestCase):
"""Tests test shuffling."""
def setUp(self):
CalculateTestLists()
def testShufflePreservesNumberOfTests(self):
self.assertEqual(len(ALL_TESTS), len(SHUFFLED_ALL_TESTS))
self.assertEqual(len(ACTIVE_TESTS), len(SHUFFLED_ACTIVE_TESTS))
self.assertEqual(len(FILTERED_TESTS), len(SHUFFLED_FILTERED_TESTS))
self.assertEqual(len(SHARDED_TESTS), len(SHUFFLED_SHARDED_TESTS))
def testShuffleChangesTestOrder(self):
self.assert_(SHUFFLED_ALL_TESTS != ALL_TESTS, SHUFFLED_ALL_TESTS)
self.assert_(SHUFFLED_ACTIVE_TESTS != ACTIVE_TESTS, SHUFFLED_ACTIVE_TESTS)
self.assert_(SHUFFLED_FILTERED_TESTS != FILTERED_TESTS,
SHUFFLED_FILTERED_TESTS)
self.assert_(SHUFFLED_SHARDED_TESTS != SHARDED_TESTS,
SHUFFLED_SHARDED_TESTS)
def testShuffleChangesTestCaseOrder(self):
self.assert_(GetTestCases(SHUFFLED_ALL_TESTS) != GetTestCases(ALL_TESTS),
GetTestCases(SHUFFLED_ALL_TESTS))
self.assert_(
GetTestCases(SHUFFLED_ACTIVE_TESTS) != GetTestCases(ACTIVE_TESTS),
GetTestCases(SHUFFLED_ACTIVE_TESTS))
self.assert_(
GetTestCases(SHUFFLED_FILTERED_TESTS) != GetTestCases(FILTERED_TESTS),
GetTestCases(SHUFFLED_FILTERED_TESTS))
self.assert_(
GetTestCases(SHUFFLED_SHARDED_TESTS) != GetTestCases(SHARDED_TESTS),
GetTestCases(SHUFFLED_SHARDED_TESTS))
def testShuffleDoesNotRepeatTest(self):
for test in SHUFFLED_ALL_TESTS:
self.assertEqual(1, SHUFFLED_ALL_TESTS.count(test),
'%s appears more than once' % (test,))
for test in SHUFFLED_ACTIVE_TESTS:
self.assertEqual(1, SHUFFLED_ACTIVE_TESTS.count(test),
'%s appears more than once' % (test,))
for test in SHUFFLED_FILTERED_TESTS:
self.assertEqual(1, SHUFFLED_FILTERED_TESTS.count(test),
'%s appears more than once' % (test,))
for test in SHUFFLED_SHARDED_TESTS:
self.assertEqual(1, SHUFFLED_SHARDED_TESTS.count(test),
'%s appears more than once' % (test,))
def testShuffleDoesNotCreateNewTest(self):
for test in SHUFFLED_ALL_TESTS:
self.assert_(test in ALL_TESTS, '%s is an invalid test' % (test,))
for test in SHUFFLED_ACTIVE_TESTS:
self.assert_(test in ACTIVE_TESTS, '%s is an invalid test' % (test,))
for test in SHUFFLED_FILTERED_TESTS:
self.assert_(test in FILTERED_TESTS, '%s is an invalid test' % (test,))
for test in SHUFFLED_SHARDED_TESTS:
self.assert_(test in SHARDED_TESTS, '%s is an invalid test' % (test,))
def testShuffleIncludesAllTests(self):
for test in ALL_TESTS:
self.assert_(test in SHUFFLED_ALL_TESTS, '%s is missing' % (test,))
for test in ACTIVE_TESTS:
self.assert_(test in SHUFFLED_ACTIVE_TESTS, '%s is missing' % (test,))
for test in FILTERED_TESTS:
self.assert_(test in SHUFFLED_FILTERED_TESTS, '%s is missing' % (test,))
for test in SHARDED_TESTS:
self.assert_(test in SHUFFLED_SHARDED_TESTS, '%s is missing' % (test,))
def testShuffleLeavesDeathTestsAtFront(self):
non_death_test_found = False
for test in SHUFFLED_ACTIVE_TESTS:
if 'DeathTest.' in test:
self.assert_(not non_death_test_found,
'%s appears after a non-death test' % (test,))
else:
non_death_test_found = True
def _VerifyTestCasesDoNotInterleave(self, tests):
test_cases = []
for test in tests:
[test_case, _] = test.split('.')
if test_cases and test_cases[-1] != test_case:
test_cases.append(test_case)
self.assertEqual(1, test_cases.count(test_case),
'Test case %s is not grouped together in %s' %
(test_case, tests))
def testShuffleDoesNotInterleaveTestCases(self):
self._VerifyTestCasesDoNotInterleave(SHUFFLED_ALL_TESTS)
self._VerifyTestCasesDoNotInterleave(SHUFFLED_ACTIVE_TESTS)
self._VerifyTestCasesDoNotInterleave(SHUFFLED_FILTERED_TESTS)
self._VerifyTestCasesDoNotInterleave(SHUFFLED_SHARDED_TESTS)
def testShuffleRestoresOrderAfterEachIteration(self):
# Get the test lists in all 3 iterations, using random seed 1, 2,
# and 3 respectively. Google Test picks a different seed in each
# iteration, and this test depends on the current implementation
# picking successive numbers. This dependency is not ideal, but
# makes the test much easier to write.
[tests_in_iteration1, tests_in_iteration2, tests_in_iteration3] = (
GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(1), RepeatFlag(3)]))
# Make sure running the tests with random seed 1 gets the same
# order as in iteration 1 above.
[tests_with_seed1] = GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(1)])
self.assertEqual(tests_in_iteration1, tests_with_seed1)
# Make sure running the tests with random seed 2 gets the same
# order as in iteration 2 above. Success means that Google Test
# correctly restores the test order before re-shuffling at the
# beginning of iteration 2.
[tests_with_seed2] = GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(2)])
self.assertEqual(tests_in_iteration2, tests_with_seed2)
# Make sure running the tests with random seed 3 gets the same
# order as in iteration 3 above. Success means that Google Test
# correctly restores the test order before re-shuffling at the
# beginning of iteration 3.
[tests_with_seed3] = GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(3)])
self.assertEqual(tests_in_iteration3, tests_with_seed3)
def testShuffleGeneratesNewOrderInEachIteration(self):
[tests_in_iteration1, tests_in_iteration2, tests_in_iteration3] = (
GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(1), RepeatFlag(3)]))
self.assert_(tests_in_iteration1 != tests_in_iteration2,
tests_in_iteration1)
self.assert_(tests_in_iteration1 != tests_in_iteration3,
tests_in_iteration1)
self.assert_(tests_in_iteration2 != tests_in_iteration3,
tests_in_iteration2)
def testShuffleShardedTestsPreservesPartition(self):
# If we run M tests on N shards, the same M tests should be run in
# total, regardless of the random seeds used by the shards.
[tests1] = GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',
SHARD_INDEX_ENV_VAR: '0'},
[ShuffleFlag(), RandomSeedFlag(1)])
[tests2] = GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',
SHARD_INDEX_ENV_VAR: '1'},
[ShuffleFlag(), RandomSeedFlag(20)])
[tests3] = GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',
SHARD_INDEX_ENV_VAR: '2'},
[ShuffleFlag(), RandomSeedFlag(25)])
sorted_sharded_tests = tests1 + tests2 + tests3
sorted_sharded_tests.sort()
sorted_active_tests = []
sorted_active_tests.extend(ACTIVE_TESTS)
sorted_active_tests.sort()
self.assertEqual(sorted_active_tests, sorted_sharded_tests)
if __name__ == '__main__':
gtest_test_utils.Main()
| apache-2.0 |
Andrew-McNab-UK/DIRAC | RequestManagementSystem/Service/ReqManagerHandler.py | 2 | 12793 | #####################################################################
# File: ReqManagerHandler.py
########################################################################
"""
:mod: ReqManagerHandler
.. module: ReqManagerHandler
:synopsis: Implementation of the RequestDB service in the DISET framework
"""
__RCSID__ = "$Id$"
# # imports
import json
import datetime
import math
from types import DictType, IntType, LongType, ListType, StringTypes, BooleanType
# # from DIRAC
from DIRAC import gLogger, S_OK, S_ERROR
from DIRAC.Core.DISET.RequestHandler import RequestHandler, getServiceOption
# # from RMS
from DIRAC.RequestManagementSystem.Client.Request import Request
from DIRAC.RequestManagementSystem.private.RequestValidator import RequestValidator
from DIRAC.RequestManagementSystem.DB.RequestDB import RequestDB
class ReqManagerHandler( RequestHandler ):
"""
.. class:: ReqManagerHandler
RequestDB interface in the DISET framework.
"""
# # request validator
__validator = None
# # request DB instance
__requestDB = None
@classmethod
def initializeHandler( cls, serviceInfoDict ):
""" initialize handler """
try:
cls.__requestDB = RequestDB()
except RuntimeError, error:
gLogger.exception( error )
return S_ERROR( error )
# If there is a constant delay to be applied to each request
cls.constantRequestDelay = getServiceOption( serviceInfoDict, 'ConstantRequestDelay', 0 )
# # create tables for empty db
return cls.__requestDB.createTables()
# # helper functions
@classmethod
def validate( cls, request ):
""" request validation """
if not cls.__validator:
cls.__validator = RequestValidator()
return cls.__validator.validate( request )
types_getRequestIDForName = [ StringTypes ]
@classmethod
def export_getRequestIDForName( cls, requestName ):
""" get requestID for given :requestName: """
if type( requestName ) in StringTypes:
result = cls.__requestDB.getRequestIDForName( requestName )
if not result["OK"]:
return result
requestID = result["Value"]
return S_OK( requestID )
types_cancelRequest = [ ( IntType, LongType ) ]
@classmethod
def export_cancelRequest( cls , requestID ):
""" Cancel a request """
return cls.__requestDB.cancelRequest( requestID )
types_putRequest = [ StringTypes ]
@classmethod
def export_putRequest( cls, requestJSON ):
""" put a new request into RequestDB
:param cls: class ref
:param str requestJSON: request serialized to JSON format
"""
requestDict = json.loads( requestJSON )
requestName = requestDict.get( "RequestID", requestDict.get( 'RequestName', "***UNKNOWN***" ) )
request = Request( requestDict )
optimized = request.optimize()
if optimized.get( "Value", False ):
gLogger.debug( "putRequest: request was optimized" )
else:
gLogger.debug( "putRequest: request unchanged", optimized.get( "Message", "Nothing could be optimized" ) )
valid = cls.validate( request )
if not valid["OK"]:
gLogger.error( "putRequest: request %s not valid: %s" % ( requestName, valid["Message"] ) )
return valid
# If NotBefore is not set or user defined, we calculate its value
now = datetime.datetime.utcnow().replace( microsecond = 0 )
extraDelay = datetime.timedelta( 0 )
if request.Status not in Request.FINAL_STATES and ( not request.NotBefore or request.NotBefore < now ) :
# We don't delay if it is the first insertion
if getattr( request, 'RequestID', 0 ):
# If it is a constant delay, just set it
if cls.constantRequestDelay:
extraDelay = datetime.timedelta( minutes = cls.constantRequestDelay )
else:
# If there is a waiting Operation with Files
op = request.getWaiting().get( 'Value' )
if op and len( op ):
attemptList = [ opFile.Attempt for opFile in op if opFile.Status == "Waiting" ]
if attemptList:
maxWaitingAttempt = max( [ opFile.Attempt for opFile in op if opFile.Status == "Waiting" ] )
# In case it is the first attempt, extraDelay is 0
# maxWaitingAttempt can be None if the operation has no File, like the ForwardDiset
extraDelay = datetime.timedelta( minutes = 2 * math.log( maxWaitingAttempt ) if maxWaitingAttempt else 0 )
request.NotBefore = now + extraDelay
gLogger.info( "putRequest: request %s not before %s (extra delay %s)" % ( request.RequestName, request.NotBefore, extraDelay ) )
requestName = request.RequestName
gLogger.info( "putRequest: Attempting to set request '%s'" % requestName )
return cls.__requestDB.putRequest( request )
types_getScheduledRequest = [ ( IntType, LongType ) ]
@classmethod
def export_getScheduledRequest( cls , operationID ):
""" read scheduled request given operationID """
scheduled = cls.__requestDB.getScheduledRequest( operationID )
if not scheduled["OK"]:
gLogger.error( "getScheduledRequest: %s" % scheduled["Message"] )
return scheduled
if not scheduled["Value"]:
return S_OK()
requestJSON = scheduled["Value"].toJSON()
if not requestJSON["OK"]:
gLogger.error( "getScheduledRequest: %s" % requestJSON["Message"] )
return requestJSON
types_getDBSummary = []
@classmethod
def export_getDBSummary( cls ):
""" Get the summary of requests in the Request DB """
return cls.__requestDB.getDBSummary()
types_getRequest = [ ( LongType, IntType ) ]
@classmethod
def export_getRequest( cls, requestID = 0 ):
""" Get a request of given type from the database """
getRequest = cls.__requestDB.getRequest( requestID )
if not getRequest["OK"]:
gLogger.error( "getRequest: %s" % getRequest["Message"] )
return getRequest
if getRequest["Value"]:
getRequest = getRequest["Value"]
toJSON = getRequest.toJSON()
if not toJSON["OK"]:
gLogger.error( toJSON["Message"] )
return toJSON
return S_OK()
types_getBulkRequests = [ IntType, BooleanType ]
@classmethod
def export_getBulkRequests( cls, numberOfRequest, assigned ):
""" Get a request of given type from the database
:param numberOfRequest : size of the bulk (default 10)
:return S_OK( {Failed : message, Successful : list of Request.toJSON()} )
"""
getRequests = cls.__requestDB.getBulkRequests( numberOfRequest = numberOfRequest, assigned = assigned )
if not getRequests["OK"]:
gLogger.error( "getRequests: %s" % getRequests["Message"] )
return getRequests
if getRequests["Value"]:
getRequests = getRequests["Value"]
toJSONDict = {"Successful" : {}, "Failed" : {}}
for rId in getRequests:
toJSON = getRequests[rId].toJSON()
if not toJSON["OK"]:
gLogger.error( toJSON["Message"] )
toJSONDict["Failed"][rId] = toJSON["Message"]
else:
toJSONDict["Successful"][rId] = toJSON["Value"]
return S_OK( toJSONDict )
return S_OK()
types_peekRequest = [ ( LongType, IntType ) ]
@classmethod
def export_peekRequest( cls, requestID = 0 ):
""" peek request given its id """
peekRequest = cls.__requestDB.peekRequest( requestID )
if not peekRequest["OK"]:
gLogger.error( "peekRequest: %s" % peekRequest["Message"] )
return peekRequest
if peekRequest["Value"]:
peekRequest = peekRequest["Value"].toJSON()
if not peekRequest["OK"]:
gLogger.error( peekRequest["Message"] )
return peekRequest
types_getRequestSummaryWeb = [ DictType, ListType, IntType, IntType ]
@classmethod
def export_getRequestSummaryWeb( cls, selectDict, sortList, startItem, maxItems ):
""" Returns a list of Request for the web portal
:param dict selectDict: parameter on which to restrain the query {key : Value}
key can be any of the Request columns, 'Type' (interpreted as Operation.Type)
and 'FromData' and 'ToData' are matched against the LastUpdate field
:param sortList: [sorting column, ASC/DESC]
:type sortList: python:list
:param int startItem: start item (for pagination)
:param int maxItems: max items (for pagination)
"""
return cls.__requestDB.getRequestSummaryWeb( selectDict, sortList, startItem, maxItems )
types_getDistinctValuesWeb = [ StringTypes ]
@classmethod
def export_getDistinctValuesWeb( cls, attribute ):
""" Get distinct values for a given request attribute. 'Type' is interpreted as
the operation type """
tableName = 'Request'
if attribute == 'Type':
tableName = 'Operation'
return cls.__requestDB.getDistinctValues( tableName, attribute )
types_getRequestCountersWeb = [ StringTypes, DictType ]
@classmethod
def export_getRequestCountersWeb( cls, groupingAttribute, selectDict ):
""" For the web portal.
Returns a dictionary {value : counts} for a given key.
The key can be any field from the RequestTable. or "Type",
which will be interpreted as 'Operation.Type'
:param groupingAttribute : attribute used for grouping
:param selectDict : selection criteria
"""
return cls.__requestDB.getRequestCountersWeb( groupingAttribute, selectDict )
types_deleteRequest = [ ( IntType, LongType ) ]
@classmethod
def export_deleteRequest( cls, requestID ):
""" Delete the request with the supplied ID"""
return cls.__requestDB.deleteRequest( requestID )
types_getRequestIDsList = [ ListType, IntType, StringTypes ]
@classmethod
def export_getRequestIDsList( cls, statusList = None, limit = None, since = None, until = None, getJobID = False ):
""" get requests' IDs with status in :statusList: """
statusList = statusList if statusList else list( Request.FINAL_STATES )
limit = limit if limit else 100
since = since if since else ""
until = until if until else ""
reqIDsList = cls.__requestDB.getRequestIDsList( statusList, limit, since = since, until = until, getJobID = getJobID )
if not reqIDsList["OK"]:
gLogger.error( "getRequestIDsList: %s" % reqIDsList["Message"] )
return reqIDsList
types_getRequestIDsForJobs = [ ListType ]
@classmethod
def export_getRequestIDsForJobs( cls, jobIDs ):
""" Select the request IDs for supplied jobIDs """
return cls.__requestDB.getRequestIDsForJobs( jobIDs )
types_readRequestsForJobs = [ ListType ]
@classmethod
def export_readRequestsForJobs( cls, jobIDs ):
""" read requests for jobs given list of jobIDs """
requests = cls.__requestDB.readRequestsForJobs( jobIDs )
if not requests["OK"]:
gLogger.error( "readRequestsForJobs: %s" % requests["Message"] )
return requests
for jobID, request in requests["Value"]["Successful"].items():
requests["Value"]["Successful"][jobID] = request.toJSON()["Value"]
return requests
types_getDigest = [ ( IntType, LongType ) ]
@classmethod
def export_getDigest( cls, requestID ):
""" get digest for a request given its id
:param str requestID: request's id
:return: S_OK( json_str )
"""
return cls.__requestDB.getDigest( requestID )
types_getRequestStatus = [ ( IntType, LongType ) ]
@classmethod
def export_getRequestStatus( cls, requestID ):
""" get request status given its id """
status = cls.__requestDB.getRequestStatus( requestID )
if not status["OK"]:
gLogger.error( "getRequestStatus: %s" % status["Message"] )
return status
types_getRequestFileStatus = [ [ IntType, LongType ], list( StringTypes ) + [ListType] ]
@classmethod
def export_getRequestFileStatus( cls, requestID, lfnList ):
""" get request file status for a given LFNs list and requestID """
if type( lfnList ) == str:
lfnList = [lfnList]
res = cls.__requestDB.getRequestFileStatus( requestID, lfnList )
if not res["OK"]:
gLogger.error( "getRequestFileStatus: %s" % res["Message"] )
return res
# types_getRequestName = [ ( IntType, LongType ) ]
# @classmethod
# def export_getRequestName( cls, requestID ):
# """ get request name for a given requestID """
# requestName = cls.__requestDB.getRequestName( requestID )
# if not requestName["OK"]:
# gLogger.error( "getRequestName: %s" % requestName["Message"] )
# return requestName
types_getRequestInfo = [ [ IntType, LongType ] ]
@classmethod
def export_getRequestInfo( cls, requestID ):
""" get request info for a given requestID """
requestInfo = cls.__requestDB.getRequestInfo( requestID )
if not requestInfo["OK"]:
gLogger.error( "getRequestInfo: %s" % requestInfo["Message"] )
return requestInfo
| gpl-3.0 |
felixma/nova | nova/tests/unit/virt/hyperv/test_vhdutils.py | 32 | 11536 | # Copyright 2013 Cloudbase Solutions Srl
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from oslo_utils import units
from nova import test
from nova.virt.hyperv import constants
from nova.virt.hyperv import vhdutils
from nova.virt.hyperv import vmutils
class VHDUtilsBaseTestCase(test.NoDBTestCase):
"Base Class unit test classes of Hyper-V VHD Utils classes."
_FAKE_VHD_PATH = "C:\\fake_path.vhdx"
_FAKE_PARENT_PATH = "C:\\fake_parent_path.vhdx"
_FAKE_FORMAT = 3
_FAKE_TYPE = 3
_FAKE_MAX_INTERNAL_SIZE = units.Gi
_FAKE_DYNAMIC_BLK_SIZE = 2097152
_FAKE_BAD_TYPE = 5
_FAKE_JOB_PATH = 'fake_job_path'
_FAKE_RET_VAL = 0
_FAKE_VHD_INFO_XML = (
"""<INSTANCE CLASSNAME="Msvm_VirtualHardDiskSettingData">
<PROPERTY NAME="BlockSize" TYPE="uint32">
<VALUE>33554432</VALUE>
</PROPERTY>
<PROPERTY NAME="Caption" TYPE="string">
<VALUE>Virtual Hard Disk Setting Data</VALUE>
</PROPERTY>
<PROPERTY NAME="Description" TYPE="string">
<VALUE>Setting Data for a Virtual Hard Disk.</VALUE>
</PROPERTY>
<PROPERTY NAME="ElementName" TYPE="string">
<VALUE>fake_path.vhdx</VALUE>
</PROPERTY>
<PROPERTY NAME="Format" TYPE="uint16">
<VALUE>%(format)s</VALUE>
</PROPERTY>
<PROPERTY NAME="InstanceID" TYPE="string">
<VALUE>52794B89-AC06-4349-AC57-486CAAD52F69</VALUE>
</PROPERTY>
<PROPERTY NAME="LogicalSectorSize" TYPE="uint32">
<VALUE>4096</VALUE>
</PROPERTY>
<PROPERTY NAME="MaxInternalSize" TYPE="uint64">
<VALUE>%(max_internal_size)s</VALUE>
</PROPERTY>
<PROPERTY NAME="ParentPath" TYPE="string">
<VALUE>%(parent_path)s</VALUE>
</PROPERTY>
<PROPERTY NAME="Path" TYPE="string">
<VALUE>%(path)s</VALUE>
</PROPERTY>
<PROPERTY NAME="PhysicalSectorSize" TYPE="uint32">
<VALUE>4096</VALUE>
</PROPERTY>
<PROPERTY NAME="Type" TYPE="uint16">
<VALUE>%(type)s</VALUE>
</PROPERTY>
</INSTANCE>""" % {'path': _FAKE_VHD_PATH,
'parent_path': _FAKE_PARENT_PATH,
'format': _FAKE_FORMAT,
'max_internal_size': _FAKE_MAX_INTERNAL_SIZE,
'type': _FAKE_TYPE})
class VHDUtilsTestCase(VHDUtilsBaseTestCase):
"""Unit tests for the Hyper-V VHDUtils class."""
def setUp(self):
super(VHDUtilsTestCase, self).setUp()
self._vhdutils = vhdutils.VHDUtils()
self._vhdutils._conn = mock.MagicMock()
self._vhdutils._vmutils = mock.MagicMock()
self._fake_vhd_info = {
'ParentPath': self._FAKE_PARENT_PATH,
'MaxInternalSize': self._FAKE_MAX_INTERNAL_SIZE,
'Type': self._FAKE_TYPE}
def test_validate_vhd(self):
mock_img_svc = self._vhdutils._conn.Msvm_ImageManagementService()[0]
mock_img_svc.ValidateVirtualHardDisk.return_value = (
self._FAKE_JOB_PATH, self._FAKE_RET_VAL)
self._vhdutils.validate_vhd(self._FAKE_VHD_PATH)
mock_img_svc.ValidateVirtualHardDisk.assert_called_once_with(
Path=self._FAKE_VHD_PATH)
def test_get_vhd_info(self):
self._mock_get_vhd_info()
vhd_info = self._vhdutils.get_vhd_info(self._FAKE_VHD_PATH)
self.assertEqual(self._fake_vhd_info, vhd_info)
def _mock_get_vhd_info(self):
mock_img_svc = self._vhdutils._conn.Msvm_ImageManagementService()[0]
mock_img_svc.GetVirtualHardDiskInfo.return_value = (
self._FAKE_VHD_INFO_XML, self._FAKE_JOB_PATH, self._FAKE_RET_VAL)
def test_create_dynamic_vhd(self):
self._vhdutils.get_vhd_info = mock.MagicMock(
return_value={'Format': self._FAKE_FORMAT})
mock_img_svc = self._vhdutils._conn.Msvm_ImageManagementService()[0]
mock_img_svc.CreateDynamicVirtualHardDisk.return_value = (
self._FAKE_JOB_PATH, self._FAKE_RET_VAL)
self._vhdutils.create_dynamic_vhd(self._FAKE_VHD_PATH,
self._FAKE_MAX_INTERNAL_SIZE,
constants.DISK_FORMAT_VHD)
mock_img_svc.CreateDynamicVirtualHardDisk.assert_called_once_with(
Path=self._FAKE_VHD_PATH,
MaxInternalSize=self._FAKE_MAX_INTERNAL_SIZE)
self._vhdutils._vmutils.check_ret_val.assert_called_once_with(
self._FAKE_RET_VAL, self._FAKE_JOB_PATH)
def test_reconnect_parent_vhd(self):
mock_img_svc = self._vhdutils._conn.Msvm_ImageManagementService()[0]
mock_img_svc.ReconnectParentVirtualHardDisk.return_value = (
self._FAKE_JOB_PATH, self._FAKE_RET_VAL)
self._vhdutils.reconnect_parent_vhd(self._FAKE_VHD_PATH,
self._FAKE_PARENT_PATH)
mock_img_svc.ReconnectParentVirtualHardDisk.assert_called_once_with(
ChildPath=self._FAKE_VHD_PATH,
ParentPath=self._FAKE_PARENT_PATH,
Force=True)
self._vhdutils._vmutils.check_ret_val.assert_called_once_with(
self._FAKE_RET_VAL, self._FAKE_JOB_PATH)
def test_merge_vhd(self):
mock_img_svc = self._vhdutils._conn.Msvm_ImageManagementService()[0]
mock_img_svc.MergeVirtualHardDisk.return_value = (
self._FAKE_JOB_PATH, self._FAKE_RET_VAL)
self._vhdutils.merge_vhd(self._FAKE_VHD_PATH, self._FAKE_VHD_PATH)
mock_img_svc.MergeVirtualHardDisk.assert_called_once_with(
SourcePath=self._FAKE_VHD_PATH,
DestinationPath=self._FAKE_VHD_PATH)
self._vhdutils._vmutils.check_ret_val.assert_called_once_with(
self._FAKE_RET_VAL, self._FAKE_JOB_PATH)
def test_resize_vhd(self):
mock_img_svc = self._vhdutils._conn.Msvm_ImageManagementService()[0]
mock_img_svc.ExpandVirtualHardDisk.return_value = (
self._FAKE_JOB_PATH, self._FAKE_RET_VAL)
self._vhdutils.get_internal_vhd_size_by_file_size = mock.MagicMock(
return_value=self._FAKE_MAX_INTERNAL_SIZE)
self._vhdutils.resize_vhd(self._FAKE_VHD_PATH,
self._FAKE_MAX_INTERNAL_SIZE)
mock_img_svc.ExpandVirtualHardDisk.assert_called_once_with(
Path=self._FAKE_VHD_PATH,
MaxInternalSize=self._FAKE_MAX_INTERNAL_SIZE)
self._vhdutils._vmutils.check_ret_val.assert_called_once_with(
self._FAKE_RET_VAL, self._FAKE_JOB_PATH)
def _mocked_get_internal_vhd_size(self, root_vhd_size, vhd_type):
mock_get_vhd_info = mock.MagicMock(return_value={'Type': vhd_type})
mock_get_blk_size = mock.MagicMock(
return_value=self._FAKE_DYNAMIC_BLK_SIZE)
with mock.patch.multiple(self._vhdutils,
get_vhd_info=mock_get_vhd_info,
_get_vhd_dynamic_blk_size=mock_get_blk_size):
return self._vhdutils.get_internal_vhd_size_by_file_size(
None, root_vhd_size)
def test_create_differencing_vhd(self):
mock_img_svc = self._vhdutils._conn.Msvm_ImageManagementService()[0]
mock_img_svc.CreateDifferencingVirtualHardDisk.return_value = (
self._FAKE_JOB_PATH, self._FAKE_RET_VAL)
self._vhdutils.create_differencing_vhd(self._FAKE_VHD_PATH,
self._FAKE_PARENT_PATH)
mock_img_svc.CreateDifferencingVirtualHardDisk.assert_called_once_with(
Path=self._FAKE_VHD_PATH,
ParentPath=self._FAKE_PARENT_PATH)
def test_get_internal_vhd_size_by_file_size_fixed(self):
root_vhd_size = 1 * 1024 ** 3
real_size = self._mocked_get_internal_vhd_size(
root_vhd_size, constants.VHD_TYPE_FIXED)
expected_vhd_size = 1 * 1024 ** 3 - 512
self.assertEqual(expected_vhd_size, real_size)
def test_get_internal_vhd_size_by_file_size_dynamic(self):
root_vhd_size = 20 * 1024 ** 3
real_size = self._mocked_get_internal_vhd_size(
root_vhd_size, constants.VHD_TYPE_DYNAMIC)
expected_vhd_size = 20 * 1024 ** 3 - 43008
self.assertEqual(expected_vhd_size, real_size)
def test_get_internal_vhd_size_by_file_size_differencing(self):
# For differencing images, the internal size of the parent vhd
# is returned
vhdutil = vhdutils.VHDUtils()
root_vhd_size = 20 * 1024 ** 3
vhdutil.get_vhd_info = mock.MagicMock()
vhdutil.get_vhd_parent_path = mock.MagicMock()
vhdutil.get_vhd_parent_path.return_value = self._FAKE_VHD_PATH
vhdutil.get_vhd_info.side_effect = [
{'Type': 4}, {'Type': constants.VHD_TYPE_DYNAMIC}]
vhdutil._get_vhd_dynamic_blk_size = mock.MagicMock()
vhdutil._get_vhd_dynamic_blk_size.return_value = 2097152
real_size = vhdutil.get_internal_vhd_size_by_file_size(None,
root_vhd_size)
expected_vhd_size = 20 * 1024 ** 3 - 43008
self.assertEqual(expected_vhd_size, real_size)
def test_get_vhd_format_vhdx(self):
with mock.patch('nova.virt.hyperv.vhdutils.open',
mock.mock_open(read_data=vhdutils.VHDX_SIGNATURE),
create=True):
format = self._vhdutils.get_vhd_format(self._FAKE_VHD_PATH)
self.assertEqual(constants.DISK_FORMAT_VHDX, format)
def test_get_vhd_format_vhd(self):
with mock.patch('nova.virt.hyperv.vhdutils.open',
mock.mock_open(),
create=True) as mock_open:
f = mock_open.return_value
f.tell.return_value = 1024
readdata = ['notthesig', vhdutils.VHD_SIGNATURE]
def read(*args):
for content in readdata:
yield content
f.read.side_effect = read()
format = self._vhdutils.get_vhd_format(self._FAKE_VHD_PATH)
self.assertEqual(constants.DISK_FORMAT_VHD, format)
def test_get_vhd_format_invalid_format(self):
with mock.patch('nova.virt.hyperv.vhdutils.open',
mock.mock_open(read_data='invalid'),
create=True) as mock_open:
f = mock_open.return_value
f.tell.return_value = 1024
self.assertRaises(vmutils.HyperVException,
self._vhdutils.get_vhd_format,
self._FAKE_VHD_PATH)
def test_get_vhd_format_zero_length_file(self):
with mock.patch('nova.virt.hyperv.vhdutils.open',
mock.mock_open(read_data=''),
create=True) as mock_open:
f = mock_open.return_value
f.tell.return_value = 0
self.assertRaises(vmutils.HyperVException,
self._vhdutils.get_vhd_format,
self._FAKE_VHD_PATH)
f.seek.assert_called_once_with(0, 2)
def test_get_supported_vhd_format(self):
fmt = self._vhdutils.get_best_supported_vhd_format()
self.assertEqual(constants.DISK_FORMAT_VHD, fmt)
| apache-2.0 |
mihailignatenko/erp | addons/mail/tests/test_mail_gateway.py | 62 | 43064 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Business Applications
# Copyright (c) 2012-TODAY OpenERP S.A. <http://openerp.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.addons.mail.tests.common import TestMail
from openerp.tools import mute_logger
import socket
MAIL_TEMPLATE = """Return-Path: <[email protected]>
To: {to}
Received: by mail1.openerp.com (Postfix, from userid 10002)
id 5DF9ABFB2A; Fri, 10 Aug 2012 16:16:39 +0200 (CEST)
From: {email_from}
Subject: {subject}
MIME-Version: 1.0
Content-Type: multipart/alternative;
boundary="----=_Part_4200734_24778174.1344608186754"
Date: Fri, 10 Aug 2012 14:16:26 +0000
Message-ID: {msg_id}
{extra}
------=_Part_4200734_24778174.1344608186754
Content-Type: text/plain; charset=utf-8
Content-Transfer-Encoding: quoted-printable
Please call me as soon as possible this afternoon!
--
Sylvie
------=_Part_4200734_24778174.1344608186754
Content-Type: text/html; charset=utf-8
Content-Transfer-Encoding: quoted-printable
<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01//EN" "http://www.w3.org/TR/html4/strict.dtd">
<html>
<head>=20
<meta http-equiv=3D"Content-Type" content=3D"text/html; charset=3Dutf-8" />
</head>=20
<body style=3D"margin: 0; padding: 0; background: #ffffff;-webkit-text-size-adjust: 100%;">=20
<p>Please call me as soon as possible this afternoon!</p>
<p>--<br/>
Sylvie
<p>
</body>
</html>
------=_Part_4200734_24778174.1344608186754--
"""
MAIL_TEMPLATE_PLAINTEXT = """Return-Path: <[email protected]>
To: {to}
Received: by mail1.openerp.com (Postfix, from userid 10002)
id 5DF9ABFB2A; Fri, 10 Aug 2012 16:16:39 +0200 (CEST)
From: Sylvie Lelitre <[email protected]>
Subject: {subject}
MIME-Version: 1.0
Content-Type: text/plain
Date: Fri, 10 Aug 2012 14:16:26 +0000
Message-ID: {msg_id}
{extra}
Please call me as soon as possible this afternoon!
--
Sylvie
"""
MAIL_MULTIPART_MIXED = """Return-Path: <[email protected]>
X-Original-To: [email protected]
Delivered-To: [email protected]
Received: by mail1.grosbedon.com (Postfix, from userid 10002)
id E8166BFACA; Fri, 23 Aug 2013 13:18:01 +0200 (CEST)
X-Spam-Checker-Version: SpamAssassin 3.3.1 (2010-03-16) on mail1.grosbedon.com
X-Spam-Level:
X-Spam-Status: No, score=-2.6 required=5.0 tests=BAYES_00,FREEMAIL_FROM,
HTML_MESSAGE,RCVD_IN_DNSWL_LOW autolearn=unavailable version=3.3.1
Received: from mail-ie0-f173.google.com (mail-ie0-f173.google.com [209.85.223.173])
by mail1.grosbedon.com (Postfix) with ESMTPS id 9BBD7BFAAA
for <[email protected]>; Fri, 23 Aug 2013 13:17:55 +0200 (CEST)
Received: by mail-ie0-f173.google.com with SMTP id qd12so575130ieb.4
for <[email protected]>; Fri, 23 Aug 2013 04:17:54 -0700 (PDT)
DKIM-Signature: v=1; a=rsa-sha256; c=relaxed/relaxed;
d=gmail.com; s=20120113;
h=mime-version:date:message-id:subject:from:to:content-type;
bh=dMNHV52EC7GAa7+9a9tqwT9joy9z+1950J/3A6/M/hU=;
b=DGuv0VjegdSrEe36ADC8XZ9Inrb3Iu+3/52Bm+caltddXFH9yewTr0JkCRQaJgMwG9
qXTQgP8qu/VFEbCh6scu5ZgU1hknzlNCYr3LT+Ih7dAZVUEHUJdwjzUU1LFV95G2RaCd
/Lwff6CibuUvrA+0CBO7IRKW0Sn5j0mukYu8dbaKsm6ou6HqS8Nuj85fcXJfHSHp6Y9u
dmE8jBh3fHCHF/nAvU+8aBNSIzl1FGfiBYb2jCoapIuVFitKR4q5cuoodpkH9XqqtOdH
DG+YjEyi8L7uvdOfN16eMr7hfUkQei1yQgvGu9/5kXoHg9+Gx6VsZIycn4zoaXTV3Nhn
nu4g==
MIME-Version: 1.0
X-Received: by 10.50.124.65 with SMTP id mg1mr1144467igb.43.1377256674216;
Fri, 23 Aug 2013 04:17:54 -0700 (PDT)
Received: by 10.43.99.71 with HTTP; Fri, 23 Aug 2013 04:17:54 -0700 (PDT)
Date: Fri, 23 Aug 2013 13:17:54 +0200
Message-ID: <CAP76m_V4BY2F7DWHzwfjteyhW8L2LJswVshtmtVym+LUJ=rASQ@mail.gmail.com>
Subject: Test mail multipart/mixed
From: =?ISO-8859-1?Q?Raoul Grosbedon=E9e?= <[email protected]>
To: Followers of ASUSTeK-Joseph-Walters <[email protected]>
Content-Type: multipart/mixed; boundary=089e01536c4ed4d17204e49b8e96
--089e01536c4ed4d17204e49b8e96
Content-Type: multipart/alternative; boundary=089e01536c4ed4d16d04e49b8e94
--089e01536c4ed4d16d04e49b8e94
Content-Type: text/plain; charset=ISO-8859-1
Should create a multipart/mixed: from gmail, *bold*, with attachment.
--
Marcel Boitempoils.
--089e01536c4ed4d16d04e49b8e94
Content-Type: text/html; charset=ISO-8859-1
<div dir="ltr">Should create a multipart/mixed: from gmail, <b>bold</b>, with attachment.<br clear="all"><div><br></div>-- <br>Marcel Boitempoils.</div>
--089e01536c4ed4d16d04e49b8e94--
--089e01536c4ed4d17204e49b8e96
Content-Type: text/plain; charset=US-ASCII; name="test.txt"
Content-Disposition: attachment; filename="test.txt"
Content-Transfer-Encoding: base64
X-Attachment-Id: f_hkpb27k00
dGVzdAo=
--089e01536c4ed4d17204e49b8e96--"""
MAIL_MULTIPART_MIXED_TWO = """X-Original-To: [email protected]
Delivered-To: [email protected]
Received: by mail1.grosbedon.com (Postfix, from userid 10002)
id E8166BFACA; Fri, 23 Aug 2013 13:18:01 +0200 (CEST)
From: "Bruce Wayne" <[email protected]>
Content-Type: multipart/alternative;
boundary="Apple-Mail=_9331E12B-8BD2-4EC7-B53E-01F3FBEC9227"
Message-Id: <[email protected]>
Mime-Version: 1.0 (Mac OS X Mail 7.3 \(1878.6\))
--Apple-Mail=_9331E12B-8BD2-4EC7-B53E-01F3FBEC9227
Content-Transfer-Encoding: 7bit
Content-Type: text/plain;
charset=us-ascii
First and second part
--Apple-Mail=_9331E12B-8BD2-4EC7-B53E-01F3FBEC9227
Content-Type: multipart/mixed;
boundary="Apple-Mail=_CA6C687E-6AA0-411E-B0FE-F0ABB4CFED1F"
--Apple-Mail=_CA6C687E-6AA0-411E-B0FE-F0ABB4CFED1F
Content-Transfer-Encoding: 7bit
Content-Type: text/html;
charset=us-ascii
<html><head></head><body>First part</body></html>
--Apple-Mail=_CA6C687E-6AA0-411E-B0FE-F0ABB4CFED1F
Content-Disposition: inline;
filename=thetruth.pdf
Content-Type: application/pdf;
name="thetruth.pdf"
Content-Transfer-Encoding: base64
SSBhbSB0aGUgQmF0TWFuCg==
--Apple-Mail=_CA6C687E-6AA0-411E-B0FE-F0ABB4CFED1F
Content-Transfer-Encoding: 7bit
Content-Type: text/html;
charset=us-ascii
<html><head></head><body>Second part</body></html>
--Apple-Mail=_CA6C687E-6AA0-411E-B0FE-F0ABB4CFED1F--
--Apple-Mail=_9331E12B-8BD2-4EC7-B53E-01F3FBEC9227--
"""
class TestMailgateway(TestMail):
def test_00_message_parse(self):
""" Testing incoming emails parsing """
cr, uid = self.cr, self.uid
res = self.mail_thread.message_parse(cr, uid, MAIL_TEMPLATE_PLAINTEXT)
self.assertIn('Please call me as soon as possible this afternoon!', res.get('body', ''),
'message_parse: missing text in text/plain body after parsing')
res = self.mail_thread.message_parse(cr, uid, MAIL_TEMPLATE)
self.assertIn('<p>Please call me as soon as possible this afternoon!</p>', res.get('body', ''),
'message_parse: missing html in multipart/alternative body after parsing')
res = self.mail_thread.message_parse(cr, uid, MAIL_MULTIPART_MIXED)
self.assertNotIn('Should create a multipart/mixed: from gmail, *bold*, with attachment', res.get('body', ''),
'message_parse: text version should not be in body after parsing multipart/mixed')
self.assertIn('<div dir="ltr">Should create a multipart/mixed: from gmail, <b>bold</b>, with attachment.<br clear="all"><div><br></div>', res.get('body', ''),
'message_parse: html version should be in body after parsing multipart/mixed')
res = self.mail_thread.message_parse(cr, uid, MAIL_MULTIPART_MIXED_TWO)
self.assertNotIn('First and second part', res.get('body', ''),
'message_parse: text version should not be in body after parsing multipart/mixed')
self.assertIn('First part', res.get('body', ''),
'message_parse: first part of the html version should be in body after parsing multipart/mixed')
self.assertIn('Second part', res.get('body', ''),
'message_parse: second part of the html version should be in body after parsing multipart/mixed')
@mute_logger('openerp.addons.mail.mail_thread', 'openerp.models')
def test_10_message_process(self):
""" Testing incoming emails processing. """
cr, uid, user_raoul = self.cr, self.uid, self.user_raoul
def format_and_process(template, to='[email protected], [email protected]', subject='Frogs',
extra='', email_from='Sylvie Lelitre <[email protected]>',
msg_id='<[email protected]>',
model=None):
self.assertEqual(self.mail_group.search(cr, uid, [('name', '=', subject)]), [])
mail = template.format(to=to, subject=subject, extra=extra, email_from=email_from, msg_id=msg_id)
self.mail_thread.message_process(cr, uid, model, mail)
return self.mail_group.search(cr, uid, [('name', '=', subject)])
# --------------------------------------------------
# Data creation
# --------------------------------------------------
# groups@.. will cause the creation of new mail groups
self.mail_group_model_id = self.ir_model.search(cr, uid, [('model', '=', 'mail.group')])[0]
alias_id = self.mail_alias.create(cr, uid, {
'alias_name': 'groups',
'alias_user_id': False,
'alias_model_id': self.mail_group_model_id,
'alias_parent_model_id': self.mail_group_model_id,
'alias_parent_thread_id': self.group_pigs_id,
'alias_contact': 'everyone'})
# --------------------------------------------------
# Test1: new record creation
# --------------------------------------------------
# Do: incoming mail from an unknown partner on an alias creates a new mail_group "frogs"
self._init_mock_build_email()
frog_groups = format_and_process(MAIL_TEMPLATE, to='[email protected], [email protected]')
sent_emails = self._build_email_kwargs_list
# Test: one group created by mailgateway administrator
self.assertEqual(len(frog_groups), 1, 'message_process: a new mail.group should have been created')
frog_group = self.mail_group.browse(cr, uid, frog_groups[0])
res = self.mail_group.get_metadata(cr, uid, [frog_group.id])[0].get('create_uid') or [None]
self.assertEqual(res[0], uid,
'message_process: group should have been created by uid as alias_user__id is False on the alias')
# Test: one message that is the incoming email
self.assertEqual(len(frog_group.message_ids), 1,
'message_process: newly created group should have the incoming email in message_ids')
msg = frog_group.message_ids[0]
self.assertEqual('Frogs', msg.subject,
'message_process: newly created group should have the incoming email as first message')
self.assertIn('Please call me as soon as possible this afternoon!', msg.body,
'message_process: newly created group should have the incoming email as first message')
self.assertEqual('email', msg.type,
'message_process: newly created group should have an email as first message')
self.assertEqual('Discussions', msg.subtype_id.name,
'message_process: newly created group should not have a log first message but an email')
# Test: message: unknown email address -> message has email_from, not author_id
self.assertFalse(msg.author_id,
'message_process: message on created group should not have an author_id')
self.assertIn('[email protected]', msg.email_from,
'message_process: message on created group should have an email_from')
# Test: followers: nobody
self.assertEqual(len(frog_group.message_follower_ids), 0, 'message_process: newly create group should not have any follower')
# Test: sent emails: no-one
self.assertEqual(len(sent_emails), 0,
'message_process: should create emails without any follower added')
# Data: unlink group
frog_group.unlink()
# Do: incoming email from an unknown partner on a Partners only alias -> bounce
self._init_mock_build_email()
self.mail_alias.write(cr, uid, [alias_id], {'alias_contact': 'partners'})
frog_groups = format_and_process(MAIL_TEMPLATE, to='[email protected], [email protected]')
# Test: no group created
self.assertTrue(len(frog_groups) == 0)
# Test: email bounced
sent_emails = self._build_email_kwargs_list
self.assertEqual(len(sent_emails), 1,
'message_process: incoming email on Partners alias should send a bounce email')
self.assertIn('Frogs', sent_emails[0].get('subject'),
'message_process: bounce email on Partners alias should contain the original subject')
self.assertIn('[email protected]', sent_emails[0].get('email_to'),
'message_process: bounce email on Partners alias should have original email sender as recipient')
# Do: incoming email from an unknown partner on a Followers only alias -> bounce
self._init_mock_build_email()
self.mail_alias.write(cr, uid, [alias_id], {'alias_contact': 'followers'})
frog_groups = format_and_process(MAIL_TEMPLATE, to='[email protected], [email protected]')
# Test: no group created
self.assertTrue(len(frog_groups) == 0)
# Test: email bounced
sent_emails = self._build_email_kwargs_list
self.assertEqual(len(sent_emails), 1,
'message_process: incoming email on Followers alias should send a bounce email')
self.assertIn('Frogs', sent_emails[0].get('subject'),
'message_process: bounce email on Followers alias should contain the original subject')
self.assertIn('[email protected]', sent_emails[0].get('email_to'),
'message_process: bounce email on Followers alias should have original email sender as recipient')
# Do: incoming email from a known partner on a Partners alias -> ok (+ test on alias.user_id)
self.mail_alias.write(cr, uid, [alias_id], {'alias_user_id': self.user_raoul_id, 'alias_contact': 'partners'})
p1id = self.res_partner.create(cr, uid, {'name': 'Sylvie Lelitre', 'email': '[email protected]'})
p2id = self.res_partner.create(cr, uid, {'name': 'Other Poilvache', 'email': '[email protected]'})
self._init_mock_build_email()
frog_groups = format_and_process(MAIL_TEMPLATE, to='[email protected], [email protected]')
sent_emails = self._build_email_kwargs_list
# Test: one group created by Raoul
self.assertEqual(len(frog_groups), 1, 'message_process: a new mail.group should have been created')
frog_group = self.mail_group.browse(cr, uid, frog_groups[0])
res = self.mail_group.get_metadata(cr, uid, [frog_group.id])[0].get('create_uid') or [None]
self.assertEqual(res[0], self.user_raoul_id,
'message_process: group should have been created by alias_user_id')
# Test: one message that is the incoming email
self.assertEqual(len(frog_group.message_ids), 1,
'message_process: newly created group should have the incoming email in message_ids')
msg = frog_group.message_ids[0]
# Test: message: author found
self.assertEqual(p1id, msg.author_id.id,
'message_process: message on created group should have Sylvie as author_id')
self.assertIn('Sylvie Lelitre <[email protected]>', msg.email_from,
'message_process: message on created group should have have an email_from')
# Test: author (not recipient and not Raoul (as alias owner)) added as follower
frog_follower_ids = set([p.id for p in frog_group.message_follower_ids])
self.assertEqual(frog_follower_ids, set([p1id]),
'message_process: newly created group should have 1 follower (author, not creator, not recipients)')
# Test: sent emails: no-one, no bounce effet
sent_emails = self._build_email_kwargs_list
self.assertEqual(len(sent_emails), 0,
'message_process: should not bounce incoming emails')
# Data: unlink group
frog_group.unlink()
# Do: incoming email from a not follower Partner on a Followers only alias -> bounce
self._init_mock_build_email()
self.mail_alias.write(cr, uid, [alias_id], {'alias_user_id': False, 'alias_contact': 'followers'})
frog_groups = format_and_process(MAIL_TEMPLATE, to='[email protected], [email protected]')
# Test: no group created
self.assertTrue(len(frog_groups) == 0)
# Test: email bounced
sent_emails = self._build_email_kwargs_list
self.assertEqual(len(sent_emails), 1,
'message_process: incoming email on Partners alias should send a bounce email')
# Do: incoming email from a parent document follower on a Followers only alias -> ok
self._init_mock_build_email()
self.mail_group.message_subscribe(cr, uid, [self.group_pigs_id], [p1id])
frog_groups = format_and_process(MAIL_TEMPLATE, to='[email protected], [email protected]')
# Test: one group created by Raoul (or Sylvie maybe, if we implement it)
self.assertEqual(len(frog_groups), 1, 'message_process: a new mail.group should have been created')
frog_group = self.mail_group.browse(cr, uid, frog_groups[0])
# Test: one message that is the incoming email
self.assertEqual(len(frog_group.message_ids), 1,
'message_process: newly created group should have the incoming email in message_ids')
# Test: author (and not recipient) added as follower
frog_follower_ids = set([p.id for p in frog_group.message_follower_ids])
self.assertEqual(frog_follower_ids, set([p1id]),
'message_process: newly created group should have 1 follower (author, not creator, not recipients)')
# Test: sent emails: no-one, no bounce effet
sent_emails = self._build_email_kwargs_list
self.assertEqual(len(sent_emails), 0,
'message_process: should not bounce incoming emails')
# --------------------------------------------------
# Test2: update-like alias
# --------------------------------------------------
# Do: Pigs alias is restricted, should bounce
self._init_mock_build_email()
self.mail_group.write(cr, uid, [frog_group.id], {'alias_name': 'frogs', 'alias_contact': 'followers', 'alias_force_thread_id': frog_group.id})
frog_groups = format_and_process(MAIL_TEMPLATE, email_from='[email protected]',
msg_id='<[email protected]>',
to='[email protected]>', subject='Re: news')
# Test: no group 'Re: news' created, still only 1 Frogs group
self.assertEqual(len(frog_groups), 0,
'message_process: reply on Frogs should not have created a new group with new subject')
frog_groups = self.mail_group.search(cr, uid, [('name', '=', 'Frogs')])
self.assertEqual(len(frog_groups), 1,
'message_process: reply on Frogs should not have created a duplicate group with old subject')
frog_group = self.mail_group.browse(cr, uid, frog_groups[0])
# Test: email bounced
sent_emails = self._build_email_kwargs_list
self.assertEqual(len(sent_emails), 1,
'message_process: incoming email on Followers alias should send a bounce email')
self.assertIn('Re: news', sent_emails[0].get('subject'),
'message_process: bounce email on Followers alias should contain the original subject')
# Do: Pigs alias is restricted, should accept Followers
self._init_mock_build_email()
self.mail_group.message_subscribe(cr, uid, [frog_group.id], [p2id])
frog_groups = format_and_process(MAIL_TEMPLATE, email_from='[email protected]',
msg_id='<[email protected]>',
to='[email protected]>', subject='Re: cats')
# Test: no group 'Re: news' created, still only 1 Frogs group
self.assertEqual(len(frog_groups), 0,
'message_process: reply on Frogs should not have created a new group with new subject')
frog_groups = self.mail_group.search(cr, uid, [('name', '=', 'Frogs')])
self.assertEqual(len(frog_groups), 1,
'message_process: reply on Frogs should not have created a duplicate group with old subject')
frog_group = self.mail_group.browse(cr, uid, frog_groups[0])
# Test: one new message
self.assertEqual(len(frog_group.message_ids), 2, 'message_process: group should contain 2 messages after reply')
# Test: sent emails: 1 (Sylvie copy of the incoming email, but no bounce)
sent_emails = self._build_email_kwargs_list
self.assertEqual(len(sent_emails), 1,
'message_process: one email should have been generated')
self.assertIn('[email protected]', sent_emails[0].get('email_to')[0],
'message_process: email should be sent to Sylvie')
self.mail_group.message_unsubscribe(cr, uid, [frog_group.id], [p2id])
# --------------------------------------------------
# Test3: discussion and replies
# --------------------------------------------------
# Do: even with a wrong destination, a reply should end up in the correct thread
frog_groups = format_and_process(MAIL_TEMPLATE, email_from='[email protected]',
msg_id='<[email protected]>',
to='[email protected]>', subject='Re: news',
extra='In-Reply-To: <[email protected]>\n')
# Test: no group 'Re: news' created, still only 1 Frogs group
self.assertEqual(len(frog_groups), 0,
'message_process: reply on Frogs should not have created a new group with new subject')
frog_groups = self.mail_group.search(cr, uid, [('name', '=', 'Frogs')])
self.assertEqual(len(frog_groups), 1,
'message_process: reply on Frogs should not have created a duplicate group with old subject')
frog_group = self.mail_group.browse(cr, uid, frog_groups[0])
# Test: one new message
self.assertEqual(len(frog_group.message_ids), 3, 'message_process: group should contain 3 messages after reply')
# Test: author (and not recipient) added as follower
frog_follower_ids = set([p.id for p in frog_group.message_follower_ids])
self.assertEqual(frog_follower_ids, set([p1id, p2id]),
'message_process: after reply, group should have 2 followers')
# Do: incoming email with ref holding model / res_id but that does not match any message in the thread: must raise since OpenERP saas-3
self.assertRaises(ValueError,
format_and_process,
MAIL_TEMPLATE, email_from='[email protected]',
to='[email protected]', subject='spam',
extra='In-Reply-To: <12321321-openerp-%[email protected]>' % frog_group.id,
msg_id='<[email protected]>')
# When 6.1 messages are present, compat mode is available
# Create a fake 6.1 message
tmp_msg_id = self.mail_message.create(cr, uid, {'model': 'mail.group', 'res_id': frog_group.id})
self.mail_message.write(cr, uid, [tmp_msg_id], {'message_id': False})
# Do: compat mode accepts partial-matching emails
frog_groups = format_and_process(MAIL_TEMPLATE, email_from='[email protected]',
msg_id='<[email protected]>',
to='[email protected]>', subject='spam',
extra='In-Reply-To: <12321321-openerp-%d-mail.group@%s>' % (frog_group.id, socket.gethostname()))
self.mail_message.unlink(cr, uid, [tmp_msg_id])
# Test: no group 'Re: news' created, still only 1 Frogs group
self.assertEqual(len(frog_groups), 0,
'message_process: reply on Frogs should not have created a new group with new subject')
frog_groups = self.mail_group.search(cr, uid, [('name', '=', 'Frogs')])
self.assertEqual(len(frog_groups), 1,
'message_process: reply on Frogs should not have created a duplicate group with old subject')
frog_group = self.mail_group.browse(cr, uid, frog_groups[0])
# Test: one new message
self.assertEqual(len(frog_group.message_ids), 4, 'message_process: group should contain 4 messages after reply')
# 6.1 compat mode should not work if hostname does not match!
tmp_msg_id = self.mail_message.create(cr, uid, {'model': 'mail.group', 'res_id': frog_group.id})
self.mail_message.write(cr, uid, [tmp_msg_id], {'message_id': False})
self.assertRaises(ValueError,
format_and_process,
MAIL_TEMPLATE, email_from='[email protected]',
msg_id='<[email protected]>',
to='[email protected]>', subject='spam',
extra='In-Reply-To: <12321321-openerp-%[email protected]>' % frog_group.id)
self.mail_message.unlink(cr, uid, [tmp_msg_id])
# Do: due to some issue, same email goes back into the mailgateway
frog_groups = format_and_process(MAIL_TEMPLATE, email_from='[email protected]',
msg_id='<[email protected]>',
subject='Re: news', extra='In-Reply-To: <[email protected]>\n')
# Test: no group 'Re: news' created, still only 1 Frogs group
self.assertEqual(len(frog_groups), 0,
'message_process: reply on Frogs should not have created a new group with new subject')
frog_groups = self.mail_group.search(cr, uid, [('name', '=', 'Frogs')])
self.assertEqual(len(frog_groups), 1,
'message_process: reply on Frogs should not have created a duplicate group with old subject')
frog_group = self.mail_group.browse(cr, uid, frog_groups[0])
# Test: no new message
self.assertEqual(len(frog_group.message_ids), 4, 'message_process: message with already existing message_id should not have been duplicated')
# Test: message_id is still unique
msg_ids = self.mail_message.search(cr, uid, [('message_id', 'ilike', '<[email protected]>')])
self.assertEqual(len(msg_ids), 1,
'message_process: message with already existing message_id should not have been duplicated')
# --------------------------------------------------
# Test4: email_from and partner finding
# --------------------------------------------------
# Data: extra partner with Raoul's email -> test the 'better author finding'
extra_partner_id = self.res_partner.create(cr, uid, {'name': 'A-Raoul', 'email': '[email protected]'})
# Do: post a new message, with a known partner -> duplicate emails -> partner
format_and_process(MAIL_TEMPLATE, email_from='Lombrik Lubrik <[email protected]>',
subject='Re: news (2)',
msg_id='<[email protected]>',
extra='In-Reply-To: <[email protected]>')
frog_groups = self.mail_group.search(cr, uid, [('name', '=', 'Frogs')])
frog_group = self.mail_group.browse(cr, uid, frog_groups[0])
# Test: author is A-Raoul (only existing)
self.assertEqual(frog_group.message_ids[0].author_id.id, extra_partner_id,
'message_process: email_from -> author_id wrong')
# Do: post a new message with a non-existant email that is a substring of a partner email
format_and_process(MAIL_TEMPLATE, email_from='Not really Lombrik Lubrik <[email protected]>',
subject='Re: news (2)',
msg_id='<[email protected]>',
extra='In-Reply-To: <[email protected]>\n')
frog_groups = self.mail_group.search(cr, uid, [('name', '=', 'Frogs')])
frog_group = self.mail_group.browse(cr, uid, frog_groups[0])
# Test: author must not be set, otherwise the system is confusing different users
self.assertFalse(frog_group.message_ids[0].author_id, 'message_process: email_from -> mismatching author_id')
# Do: post a new message, with a known partner -> duplicate emails -> user
frog_group.message_unsubscribe([extra_partner_id])
self.res_users.write(cr, uid, self.user_raoul_id, {'email': '[email protected]'})
format_and_process(MAIL_TEMPLATE, email_from='Lombrik Lubrik <[email protected]>',
to='[email protected]', subject='Re: news (3)',
msg_id='<[email protected]>',
extra='In-Reply-To: <[email protected]>')
frog_groups = self.mail_group.search(cr, uid, [('name', '=', 'Frogs')])
frog_group = self.mail_group.browse(cr, uid, frog_groups[0])
# Test: author is Raoul (user), not A-Raoul
self.assertEqual(frog_group.message_ids[0].author_id.id, self.partner_raoul_id,
'message_process: email_from -> author_id wrong')
# Do: post a new message, with a known partner -> duplicate emails -> partner because is follower
frog_group.message_unsubscribe([self.partner_raoul_id])
frog_group.message_subscribe([extra_partner_id])
raoul_email = self.user_raoul.email
self.res_users.write(cr, uid, self.user_raoul_id, {'email': '[email protected]'})
format_and_process(MAIL_TEMPLATE, email_from='Lombrik Lubrik <[email protected]>',
to='[email protected]', subject='Re: news (3)',
msg_id='<[email protected]>',
extra='In-Reply-To: <[email protected]>')
frog_groups = self.mail_group.search(cr, uid, [('name', '=', 'Frogs')])
frog_group = self.mail_group.browse(cr, uid, frog_groups[0])
# Test: author is Raoul (user), not A-Raoul
self.assertEqual(frog_group.message_ids[0].author_id.id, extra_partner_id,
'message_process: email_from -> author_id wrong')
self.res_users.write(cr, uid, self.user_raoul_id, {'email': raoul_email})
# --------------------------------------------------
# Test5: misc gateway features
# --------------------------------------------------
# Do: incoming email with model that does not accepts incoming emails must raise
self.assertRaises(ValueError,
format_and_process,
MAIL_TEMPLATE,
to='[email protected]', subject='spam', extra='', model='res.country',
msg_id='<[email protected]>')
# Do: incoming email without model and without alias must raise
self.assertRaises(ValueError,
format_and_process,
MAIL_TEMPLATE,
to='[email protected]', subject='spam', extra='',
msg_id='<[email protected]>')
# Do: incoming email with model that accepting incoming emails as fallback
frog_groups = format_and_process(MAIL_TEMPLATE,
to='[email protected]',
subject='Spammy', extra='', model='mail.group',
msg_id='<[email protected]>')
self.assertEqual(len(frog_groups), 1,
'message_process: erroneous email but with a fallback model should have created a new mail.group')
# Do: incoming email in plaintext should be stored as html
frog_groups = format_and_process(MAIL_TEMPLATE_PLAINTEXT,
to='[email protected]', subject='Frogs Return', extra='',
msg_id='<[email protected]>')
# Test: one group created with one message
self.assertEqual(len(frog_groups), 1, 'message_process: a new mail.group should have been created')
frog_group = self.mail_group.browse(cr, uid, frog_groups[0])
msg = frog_group.message_ids[0]
# Test: plain text content should be wrapped and stored as html
self.assertIn('<pre>\nPlease call me as soon as possible this afternoon!\n\n--\nSylvie\n</pre>', msg.body,
'message_process: plaintext incoming email incorrectly parsed')
@mute_logger('openerp.addons.mail.mail_thread', 'openerp.models')
def test_20_thread_parent_resolution(self):
""" Testing parent/child relationships are correctly established when processing incoming mails """
cr, uid = self.cr, self.uid
def format(template, to='Pretty Pigs <[email protected]>, [email protected]', subject='Re: 1',
extra='', email_from='Sylvie Lelitre <[email protected]>',
msg_id='<[email protected]>'):
return template.format(to=to, subject=subject, extra=extra, email_from=email_from, msg_id=msg_id)
group_pigs = self.mail_group.browse(cr, uid, self.group_pigs_id)
msg1 = group_pigs.message_post(body='My Body', subject='1')
msg2 = group_pigs.message_post(body='My Body', subject='2')
msg1, msg2 = self.mail_message.browse(cr, uid, [msg1, msg2])
self.assertTrue(msg1.message_id, "message_process: new message should have a proper message_id")
# Reply to msg1, make sure the reply is properly attached using the various reply identification mechanisms
# 0. Direct alias match
reply_msg1 = format(MAIL_TEMPLATE, to='Pretty Pigs <[email protected]>',
extra='In-Reply-To: %s' % msg1.message_id,
msg_id='<[email protected]>')
self.mail_group.message_process(cr, uid, None, reply_msg1)
# 1. In-Reply-To header
reply_msg2 = format(MAIL_TEMPLATE, to='[email protected]',
extra='In-Reply-To: %s' % msg1.message_id,
msg_id='<[email protected]>')
self.mail_group.message_process(cr, uid, None, reply_msg2)
# 2. References header
reply_msg3 = format(MAIL_TEMPLATE, to='[email protected]',
extra='References: <[email protected]>\r\n\t<[email protected]> %s' % msg1.message_id,
msg_id='<[email protected]>')
self.mail_group.message_process(cr, uid, None, reply_msg3)
# 3. Subject contains [<ID>] + model passed to message+process -> only attached to group, but not to mail (not in msg1.child_ids)
reply_msg4 = format(MAIL_TEMPLATE, to='[email protected]',
extra='', subject='Re: [%s] 1' % self.group_pigs_id,
msg_id='<[email protected]>')
self.mail_group.message_process(cr, uid, 'mail.group', reply_msg4)
group_pigs.refresh()
msg1.refresh()
self.assertEqual(6, len(group_pigs.message_ids), 'message_process: group should contain 6 messages')
self.assertEqual(3, len(msg1.child_ids), 'message_process: msg1 should have 3 children now')
def test_30_private_discussion(self):
""" Testing private discussion between partners. """
cr, uid = self.cr, self.uid
def format(template, to='Pretty Pigs <[email protected]>, [email protected]', subject='Re: 1',
extra='', email_from='Sylvie Lelitre <[email protected]>',
msg_id='<[email protected]>'):
return template.format(to=to, subject=subject, extra=extra, email_from=email_from, msg_id=msg_id)
# Do: Raoul writes to Bert and Administrator, with a thread_model in context that should not be taken into account
msg1_pids = [self.partner_admin_id, self.partner_bert_id]
msg1_id = self.mail_thread.message_post(
cr, self.user_raoul_id, False,
partner_ids=msg1_pids,
subtype='mail.mt_comment',
context={'thread_model': 'mail.group'}
)
# Test: message recipients
msg = self.mail_message.browse(cr, uid, msg1_id)
msg_pids = [p.id for p in msg.partner_ids]
msg_nids = [p.id for p in msg.notified_partner_ids]
test_pids = msg1_pids
test_nids = msg1_pids
self.assertEqual(set(msg_pids), set(test_pids),
'message_post: private discussion: incorrect recipients')
self.assertEqual(set(msg_nids), set(test_nids),
'message_post: private discussion: incorrect notified recipients')
self.assertEqual(msg.model, False,
'message_post: private discussion: context key "thread_model" not correctly ignored when having no res_id')
# Test: message-id
self.assertIn('openerp-private', msg.message_id,
'message_post: private discussion: message-id should contain the private keyword')
# Do: Bert replies through mailgateway (is a customer)
reply_message = format(MAIL_TEMPLATE, to='[email protected]',
email_from='[email protected]',
extra='In-Reply-To: %s' % msg.message_id,
msg_id='<[email protected]>')
self.mail_thread.message_process(cr, uid, None, reply_message)
# Test: last mail_message created
msg2_id = self.mail_message.search(cr, uid, [], limit=1)[0]
# Test: message recipients
msg = self.mail_message.browse(cr, uid, msg2_id)
msg_pids = [p.id for p in msg.partner_ids]
msg_nids = [p.id for p in msg.notified_partner_ids]
test_pids = [self.partner_admin_id, self.partner_raoul_id]
test_nids = test_pids
self.assertEqual(msg.author_id.id, self.partner_bert_id,
'message_post: private discussion: wrong author through mailgatewya based on email')
self.assertEqual(set(msg_pids), set(test_pids),
'message_post: private discussion: incorrect recipients when replying')
self.assertEqual(set(msg_nids), set(test_nids),
'message_post: private discussion: incorrect notified recipients when replying')
# Do: Bert replies through chatter (is a customer)
msg3_id = self.mail_thread.message_post(
cr, uid, False,
author_id=self.partner_bert_id,
parent_id=msg1_id, subtype='mail.mt_comment')
# Test: message recipients
msg = self.mail_message.browse(cr, uid, msg3_id)
msg_pids = [p.id for p in msg.partner_ids]
msg_nids = [p.id for p in msg.notified_partner_ids]
test_pids = [self.partner_admin_id, self.partner_raoul_id]
test_nids = test_pids
self.assertEqual(set(msg_pids), set(test_pids),
'message_post: private discussion: incorrect recipients when replying')
self.assertEqual(set(msg_nids), set(test_nids),
'message_post: private discussion: incorrect notified recipients when replying')
# Do: Administrator replies
msg3_id = self.mail_thread.message_post(cr, uid, False, parent_id=msg3_id, subtype='mail.mt_comment')
# Test: message recipients
msg = self.mail_message.browse(cr, uid, msg3_id)
msg_pids = [p.id for p in msg.partner_ids]
msg_nids = [p.id for p in msg.notified_partner_ids]
test_pids = [self.partner_bert_id, self.partner_raoul_id]
test_nids = test_pids
self.assertEqual(set(msg_pids), set(test_pids),
'message_post: private discussion: incorrect recipients when replying')
self.assertEqual(set(msg_nids), set(test_nids),
'message_post: private discussion: incorrect notified recipients when replying')
| agpl-3.0 |
DxCx/nzbToMedia | libs/unidecode/x097.py | 252 | 4643 | data = (
'Xu ', # 0x00
'Ji ', # 0x01
'Mu ', # 0x02
'Chen ', # 0x03
'Xiao ', # 0x04
'Zha ', # 0x05
'Ting ', # 0x06
'Zhen ', # 0x07
'Pei ', # 0x08
'Mei ', # 0x09
'Ling ', # 0x0a
'Qi ', # 0x0b
'Chou ', # 0x0c
'Huo ', # 0x0d
'Sha ', # 0x0e
'Fei ', # 0x0f
'Weng ', # 0x10
'Zhan ', # 0x11
'Yin ', # 0x12
'Ni ', # 0x13
'Chou ', # 0x14
'Tun ', # 0x15
'Lin ', # 0x16
'[?] ', # 0x17
'Dong ', # 0x18
'Ying ', # 0x19
'Wu ', # 0x1a
'Ling ', # 0x1b
'Shuang ', # 0x1c
'Ling ', # 0x1d
'Xia ', # 0x1e
'Hong ', # 0x1f
'Yin ', # 0x20
'Mo ', # 0x21
'Mai ', # 0x22
'Yun ', # 0x23
'Liu ', # 0x24
'Meng ', # 0x25
'Bin ', # 0x26
'Wu ', # 0x27
'Wei ', # 0x28
'Huo ', # 0x29
'Yin ', # 0x2a
'Xi ', # 0x2b
'Yi ', # 0x2c
'Ai ', # 0x2d
'Dan ', # 0x2e
'Deng ', # 0x2f
'Xian ', # 0x30
'Yu ', # 0x31
'Lu ', # 0x32
'Long ', # 0x33
'Dai ', # 0x34
'Ji ', # 0x35
'Pang ', # 0x36
'Yang ', # 0x37
'Ba ', # 0x38
'Pi ', # 0x39
'Wei ', # 0x3a
'[?] ', # 0x3b
'Xi ', # 0x3c
'Ji ', # 0x3d
'Mai ', # 0x3e
'Meng ', # 0x3f
'Meng ', # 0x40
'Lei ', # 0x41
'Li ', # 0x42
'Huo ', # 0x43
'Ai ', # 0x44
'Fei ', # 0x45
'Dai ', # 0x46
'Long ', # 0x47
'Ling ', # 0x48
'Ai ', # 0x49
'Feng ', # 0x4a
'Li ', # 0x4b
'Bao ', # 0x4c
'[?] ', # 0x4d
'He ', # 0x4e
'He ', # 0x4f
'Bing ', # 0x50
'Qing ', # 0x51
'Qing ', # 0x52
'Jing ', # 0x53
'Tian ', # 0x54
'Zhen ', # 0x55
'Jing ', # 0x56
'Cheng ', # 0x57
'Qing ', # 0x58
'Jing ', # 0x59
'Jing ', # 0x5a
'Dian ', # 0x5b
'Jing ', # 0x5c
'Tian ', # 0x5d
'Fei ', # 0x5e
'Fei ', # 0x5f
'Kao ', # 0x60
'Mi ', # 0x61
'Mian ', # 0x62
'Mian ', # 0x63
'Pao ', # 0x64
'Ye ', # 0x65
'Tian ', # 0x66
'Hui ', # 0x67
'Ye ', # 0x68
'Ge ', # 0x69
'Ding ', # 0x6a
'Cha ', # 0x6b
'Jian ', # 0x6c
'Ren ', # 0x6d
'Di ', # 0x6e
'Du ', # 0x6f
'Wu ', # 0x70
'Ren ', # 0x71
'Qin ', # 0x72
'Jin ', # 0x73
'Xue ', # 0x74
'Niu ', # 0x75
'Ba ', # 0x76
'Yin ', # 0x77
'Sa ', # 0x78
'Na ', # 0x79
'Mo ', # 0x7a
'Zu ', # 0x7b
'Da ', # 0x7c
'Ban ', # 0x7d
'Yi ', # 0x7e
'Yao ', # 0x7f
'Tao ', # 0x80
'Tuo ', # 0x81
'Jia ', # 0x82
'Hong ', # 0x83
'Pao ', # 0x84
'Yang ', # 0x85
'Tomo ', # 0x86
'Yin ', # 0x87
'Jia ', # 0x88
'Tao ', # 0x89
'Ji ', # 0x8a
'Xie ', # 0x8b
'An ', # 0x8c
'An ', # 0x8d
'Hen ', # 0x8e
'Gong ', # 0x8f
'Kohaze ', # 0x90
'Da ', # 0x91
'Qiao ', # 0x92
'Ting ', # 0x93
'Wan ', # 0x94
'Ying ', # 0x95
'Sui ', # 0x96
'Tiao ', # 0x97
'Qiao ', # 0x98
'Xuan ', # 0x99
'Kong ', # 0x9a
'Beng ', # 0x9b
'Ta ', # 0x9c
'Zhang ', # 0x9d
'Bing ', # 0x9e
'Kuo ', # 0x9f
'Ju ', # 0xa0
'La ', # 0xa1
'Xie ', # 0xa2
'Rou ', # 0xa3
'Bang ', # 0xa4
'Yi ', # 0xa5
'Qiu ', # 0xa6
'Qiu ', # 0xa7
'He ', # 0xa8
'Xiao ', # 0xa9
'Mu ', # 0xaa
'Ju ', # 0xab
'Jian ', # 0xac
'Bian ', # 0xad
'Di ', # 0xae
'Jian ', # 0xaf
'On ', # 0xb0
'Tao ', # 0xb1
'Gou ', # 0xb2
'Ta ', # 0xb3
'Bei ', # 0xb4
'Xie ', # 0xb5
'Pan ', # 0xb6
'Ge ', # 0xb7
'Bi ', # 0xb8
'Kuo ', # 0xb9
'Tang ', # 0xba
'Lou ', # 0xbb
'Gui ', # 0xbc
'Qiao ', # 0xbd
'Xue ', # 0xbe
'Ji ', # 0xbf
'Jian ', # 0xc0
'Jiang ', # 0xc1
'Chan ', # 0xc2
'Da ', # 0xc3
'Huo ', # 0xc4
'Xian ', # 0xc5
'Qian ', # 0xc6
'Du ', # 0xc7
'Wa ', # 0xc8
'Jian ', # 0xc9
'Lan ', # 0xca
'Wei ', # 0xcb
'Ren ', # 0xcc
'Fu ', # 0xcd
'Mei ', # 0xce
'Juan ', # 0xcf
'Ge ', # 0xd0
'Wei ', # 0xd1
'Qiao ', # 0xd2
'Han ', # 0xd3
'Chang ', # 0xd4
'[?] ', # 0xd5
'Rou ', # 0xd6
'Xun ', # 0xd7
'She ', # 0xd8
'Wei ', # 0xd9
'Ge ', # 0xda
'Bei ', # 0xdb
'Tao ', # 0xdc
'Gou ', # 0xdd
'Yun ', # 0xde
'[?] ', # 0xdf
'Bi ', # 0xe0
'Wei ', # 0xe1
'Hui ', # 0xe2
'Du ', # 0xe3
'Wa ', # 0xe4
'Du ', # 0xe5
'Wei ', # 0xe6
'Ren ', # 0xe7
'Fu ', # 0xe8
'Han ', # 0xe9
'Wei ', # 0xea
'Yun ', # 0xeb
'Tao ', # 0xec
'Jiu ', # 0xed
'Jiu ', # 0xee
'Xian ', # 0xef
'Xie ', # 0xf0
'Xian ', # 0xf1
'Ji ', # 0xf2
'Yin ', # 0xf3
'Za ', # 0xf4
'Yun ', # 0xf5
'Shao ', # 0xf6
'Le ', # 0xf7
'Peng ', # 0xf8
'Heng ', # 0xf9
'Ying ', # 0xfa
'Yun ', # 0xfb
'Peng ', # 0xfc
'Yin ', # 0xfd
'Yin ', # 0xfe
'Xiang ', # 0xff
)
| gpl-3.0 |
eonpatapon/neutron | neutron/db/db_base_plugin_common.py | 2 | 11824 | # Copyright (c) 2015 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import functools
from oslo_config import cfg
from oslo_log import log as logging
from sqlalchemy.orm import exc
from neutron.api.v2 import attributes
from neutron.common import constants
from neutron.common import exceptions as n_exc
from neutron.common import utils
from neutron.db import common_db_mixin
from neutron.db import models_v2
LOG = logging.getLogger(__name__)
class DbBasePluginCommon(common_db_mixin.CommonDbMixin):
"""Stores getters and helper methods for db_base_plugin_v2
All private getters and simple helpers like _make_*_dict were moved from
db_base_plugin_v2.
More complicated logic and public methods left in db_base_plugin_v2.
Main purpose of this class is to make getters accessible for Ipam
backends.
"""
@staticmethod
def _generate_mac():
return utils.get_random_mac(cfg.CONF.base_mac.split(':'))
@staticmethod
def _delete_ip_allocation(context, network_id, subnet_id, ip_address):
# Delete the IP address from the IPAllocate table
LOG.debug("Delete allocated IP %(ip_address)s "
"(%(network_id)s/%(subnet_id)s)",
{'ip_address': ip_address,
'network_id': network_id,
'subnet_id': subnet_id})
context.session.query(models_v2.IPAllocation).filter_by(
network_id=network_id,
ip_address=ip_address,
subnet_id=subnet_id).delete()
@staticmethod
def _store_ip_allocation(context, ip_address, network_id, subnet_id,
port_id):
LOG.debug("Allocated IP %(ip_address)s "
"(%(network_id)s/%(subnet_id)s/%(port_id)s)",
{'ip_address': ip_address,
'network_id': network_id,
'subnet_id': subnet_id,
'port_id': port_id})
allocated = models_v2.IPAllocation(
network_id=network_id,
port_id=port_id,
ip_address=ip_address,
subnet_id=subnet_id
)
context.session.add(allocated)
def _make_subnet_dict(self, subnet, fields=None, context=None):
res = {'id': subnet['id'],
'name': subnet['name'],
'tenant_id': subnet['tenant_id'],
'network_id': subnet['network_id'],
'ip_version': subnet['ip_version'],
'cidr': subnet['cidr'],
'subnetpool_id': subnet.get('subnetpool_id'),
'allocation_pools': [{'start': pool['first_ip'],
'end': pool['last_ip']}
for pool in subnet['allocation_pools']],
'gateway_ip': subnet['gateway_ip'],
'enable_dhcp': subnet['enable_dhcp'],
'ipv6_ra_mode': subnet['ipv6_ra_mode'],
'ipv6_address_mode': subnet['ipv6_address_mode'],
'dns_nameservers': [dns['address']
for dns in subnet['dns_nameservers']],
'host_routes': [{'destination': route['destination'],
'nexthop': route['nexthop']}
for route in subnet['routes']],
}
# The shared attribute for a subnet is the same as its parent network
res['shared'] = self._make_network_dict(subnet.networks,
context=context)['shared']
# Call auxiliary extend functions, if any
self._apply_dict_extend_functions(attributes.SUBNETS, res, subnet)
return self._fields(res, fields)
def _make_subnetpool_dict(self, subnetpool, fields=None):
default_prefixlen = str(subnetpool['default_prefixlen'])
min_prefixlen = str(subnetpool['min_prefixlen'])
max_prefixlen = str(subnetpool['max_prefixlen'])
res = {'id': subnetpool['id'],
'name': subnetpool['name'],
'tenant_id': subnetpool['tenant_id'],
'default_prefixlen': default_prefixlen,
'min_prefixlen': min_prefixlen,
'max_prefixlen': max_prefixlen,
'shared': subnetpool['shared'],
'prefixes': [prefix['cidr']
for prefix in subnetpool['prefixes']],
'ip_version': subnetpool['ip_version'],
'default_quota': subnetpool['default_quota']}
return self._fields(res, fields)
def _make_port_dict(self, port, fields=None,
process_extensions=True):
res = {"id": port["id"],
'name': port['name'],
"network_id": port["network_id"],
'tenant_id': port['tenant_id'],
"mac_address": port["mac_address"],
"admin_state_up": port["admin_state_up"],
"status": port["status"],
"fixed_ips": [{'subnet_id': ip["subnet_id"],
'ip_address': ip["ip_address"]}
for ip in port["fixed_ips"]],
"device_id": port["device_id"],
"device_owner": port["device_owner"]}
# Call auxiliary extend functions, if any
if process_extensions:
self._apply_dict_extend_functions(
attributes.PORTS, res, port)
return self._fields(res, fields)
def _get_network(self, context, id):
try:
network = self._get_by_id(context, models_v2.Network, id)
except exc.NoResultFound:
raise n_exc.NetworkNotFound(net_id=id)
return network
def _get_subnet(self, context, id):
try:
subnet = self._get_by_id(context, models_v2.Subnet, id)
except exc.NoResultFound:
raise n_exc.SubnetNotFound(subnet_id=id)
return subnet
def _get_subnetpool(self, context, id):
try:
return self._get_by_id(context, models_v2.SubnetPool, id)
except exc.NoResultFound:
raise n_exc.SubnetPoolNotFound(subnetpool_id=id)
def _get_all_subnetpools(self, context):
# NOTE(tidwellr): see note in _get_all_subnets()
return context.session.query(models_v2.SubnetPool).all()
def _get_port(self, context, id):
try:
port = self._get_by_id(context, models_v2.Port, id)
except exc.NoResultFound:
raise n_exc.PortNotFound(port_id=id)
return port
def _get_dns_by_subnet(self, context, subnet_id):
dns_qry = context.session.query(models_v2.DNSNameServer)
return dns_qry.filter_by(subnet_id=subnet_id).all()
def _get_route_by_subnet(self, context, subnet_id):
route_qry = context.session.query(models_v2.SubnetRoute)
return route_qry.filter_by(subnet_id=subnet_id).all()
def _get_router_gw_ports_by_network(self, context, network_id):
port_qry = context.session.query(models_v2.Port)
return port_qry.filter_by(network_id=network_id,
device_owner=constants.DEVICE_OWNER_ROUTER_GW).all()
def _get_subnets_by_network(self, context, network_id):
subnet_qry = context.session.query(models_v2.Subnet)
return subnet_qry.filter_by(network_id=network_id).all()
def _get_subnets_by_subnetpool(self, context, subnetpool_id):
subnet_qry = context.session.query(models_v2.Subnet)
return subnet_qry.filter_by(subnetpool_id=subnetpool_id).all()
def _get_all_subnets(self, context):
# NOTE(salvatore-orlando): This query might end up putting
# a lot of stress on the db. Consider adding a cache layer
return context.session.query(models_v2.Subnet).all()
def _get_subnets(self, context, filters=None, fields=None,
sorts=None, limit=None, marker=None,
page_reverse=False):
marker_obj = self._get_marker_obj(context, 'subnet', limit, marker)
make_subnet_dict = functools.partial(self._make_subnet_dict,
context=context)
return self._get_collection(context, models_v2.Subnet,
make_subnet_dict,
filters=filters, fields=fields,
sorts=sorts,
limit=limit,
marker_obj=marker_obj,
page_reverse=page_reverse)
def _make_network_dict(self, network, fields=None,
process_extensions=True, context=None):
res = {'id': network['id'],
'name': network['name'],
'tenant_id': network['tenant_id'],
'admin_state_up': network['admin_state_up'],
'mtu': network.get('mtu', constants.DEFAULT_NETWORK_MTU),
'status': network['status'],
'subnets': [subnet['id']
for subnet in network['subnets']]}
# The shared attribute for a network now reflects if the network
# is shared to the calling tenant via an RBAC entry.
shared = False
matches = ('*',) + ((context.tenant_id,) if context else ())
for entry in network.rbac_entries:
if (entry.action == 'access_as_shared' and
entry.target_tenant in matches):
shared = True
break
res['shared'] = shared
# TODO(pritesh): Move vlan_transparent to the extension module.
# vlan_transparent here is only added if the vlantransparent
# extension is enabled.
if ('vlan_transparent' in network and network['vlan_transparent'] !=
attributes.ATTR_NOT_SPECIFIED):
res['vlan_transparent'] = network['vlan_transparent']
# Call auxiliary extend functions, if any
if process_extensions:
self._apply_dict_extend_functions(
attributes.NETWORKS, res, network)
return self._fields(res, fields)
def _make_subnet_args(self, detail, subnet, subnetpool_id):
gateway_ip = str(detail.gateway_ip) if detail.gateway_ip else None
args = {'tenant_id': detail.tenant_id,
'id': detail.subnet_id,
'name': subnet['name'],
'network_id': subnet['network_id'],
'ip_version': subnet['ip_version'],
'cidr': str(detail.subnet_cidr),
'subnetpool_id': subnetpool_id,
'enable_dhcp': subnet['enable_dhcp'],
'gateway_ip': gateway_ip}
if subnet['ip_version'] == 6 and subnet['enable_dhcp']:
if attributes.is_attr_set(subnet['ipv6_ra_mode']):
args['ipv6_ra_mode'] = subnet['ipv6_ra_mode']
if attributes.is_attr_set(subnet['ipv6_address_mode']):
args['ipv6_address_mode'] = subnet['ipv6_address_mode']
return args
def _make_fixed_ip_dict(self, ips):
# Excludes from dict all keys except subnet_id and ip_address
return [{'subnet_id': ip["subnet_id"],
'ip_address': ip["ip_address"]}
for ip in ips]
| apache-2.0 |
cubicova17/annet | venv/lib/python2.7/site-packages/south/management/commands/convert_to_south.py | 24 | 3676 | """
Quick conversion command module.
"""
from optparse import make_option
import sys
from django.core.management.base import BaseCommand
from django.core.management.color import no_style
from django.conf import settings
from django.db import models
from django.core import management
from django.core.exceptions import ImproperlyConfigured
from south.migration import Migrations
from south.hacks import hacks
from south.exceptions import NoMigrations
class Command(BaseCommand):
option_list = BaseCommand.option_list
if '--verbosity' not in [opt.get_opt_string() for opt in BaseCommand.option_list]:
option_list += (
make_option('--verbosity', action='store', dest='verbosity', default='1',
type='choice', choices=['0', '1', '2'],
help='Verbosity level; 0=minimal output, 1=normal output, 2=all output'),
)
option_list += (
make_option('--delete-ghost-migrations', action='store_true', dest='delete_ghosts', default=False,
help="Tells South to delete any 'ghost' migrations (ones in the database but not on disk)."),
make_option('--ignore-ghost-migrations', action='store_true', dest='ignore_ghosts', default=False,
help="Tells South to ignore any 'ghost' migrations (ones in the database but not on disk) and continue to apply new migrations."),
)
help = "Quickly converts the named application to use South if it is currently using syncdb."
def handle(self, app=None, *args, **options):
# Make sure we have an app
if not app:
print "Please specify an app to convert."
return
# See if the app exists
app = app.split(".")[-1]
try:
app_module = models.get_app(app)
except ImproperlyConfigured:
print "There is no enabled application matching '%s'." % app
return
# Try to get its list of models
model_list = models.get_models(app_module)
if not model_list:
print "This application has no models; this command is for applications that already have models syncdb'd."
print "Make some models, and then use ./manage.py schemamigration %s --initial instead." % app
return
# Ask South if it thinks it's already got migrations
try:
Migrations(app)
except NoMigrations:
pass
else:
print "This application is already managed by South."
return
# Finally! It seems we've got a candidate, so do the two-command trick
verbosity = int(options.get('verbosity', 0))
management.call_command("schemamigration", app, initial=True, verbosity=verbosity)
# Now, we need to re-clean and sanitise appcache
hacks.clear_app_cache()
hacks.repopulate_app_cache()
# And also clear our cached Migration classes
Migrations._clear_cache()
# Now, migrate
management.call_command(
"migrate",
app,
"0001",
fake=True,
verbosity=verbosity,
ignore_ghosts=options.get("ignore_ghosts", False),
delete_ghosts=options.get("delete_ghosts", False),
)
print
print "App '%s' converted. Note that South assumed the application's models matched the database" % app
print "(i.e. you haven't changed it since last syncdb); if you have, you should delete the %s/migrations" % app
print "directory, revert models.py so it matches the database, and try again."
| mit |
sdague/home-assistant | homeassistant/helpers/template.py | 2 | 43562 | """Template helper methods for rendering strings with Home Assistant data."""
from ast import literal_eval
import asyncio
import base64
import collections.abc
from datetime import datetime, timedelta
from functools import partial, wraps
import json
import logging
import math
from operator import attrgetter
import random
import re
from typing import Any, Dict, Generator, Iterable, Optional, Type, Union
from urllib.parse import urlencode as urllib_urlencode
import weakref
import jinja2
from jinja2 import contextfilter, contextfunction
from jinja2.sandbox import ImmutableSandboxedEnvironment
from jinja2.utils import Namespace # type: ignore
import voluptuous as vol
from homeassistant.const import (
ATTR_ENTITY_ID,
ATTR_LATITUDE,
ATTR_LONGITUDE,
ATTR_UNIT_OF_MEASUREMENT,
LENGTH_METERS,
STATE_UNKNOWN,
)
from homeassistant.core import State, callback, split_entity_id, valid_entity_id
from homeassistant.exceptions import TemplateError
from homeassistant.helpers import location as loc_helper
from homeassistant.helpers.typing import HomeAssistantType, TemplateVarsType
from homeassistant.loader import bind_hass
from homeassistant.util import convert, dt as dt_util, location as loc_util
from homeassistant.util.async_ import run_callback_threadsafe
from homeassistant.util.thread import ThreadWithException
# mypy: allow-untyped-calls, allow-untyped-defs
# mypy: no-check-untyped-defs, no-warn-return-any
_LOGGER = logging.getLogger(__name__)
_SENTINEL = object()
DATE_STR_FORMAT = "%Y-%m-%d %H:%M:%S"
_RENDER_INFO = "template.render_info"
_ENVIRONMENT = "template.environment"
_RE_JINJA_DELIMITERS = re.compile(r"\{%|\{\{|\{#")
_RESERVED_NAMES = {"contextfunction", "evalcontextfunction", "environmentfunction"}
_GROUP_DOMAIN_PREFIX = "group."
_COLLECTABLE_STATE_ATTRIBUTES = {
"state",
"attributes",
"last_changed",
"last_updated",
"context",
"domain",
"object_id",
"name",
}
ALL_STATES_RATE_LIMIT = timedelta(minutes=1)
DOMAIN_STATES_RATE_LIMIT = timedelta(seconds=1)
@bind_hass
def attach(hass: HomeAssistantType, obj: Any) -> None:
"""Recursively attach hass to all template instances in list and dict."""
if isinstance(obj, list):
for child in obj:
attach(hass, child)
elif isinstance(obj, collections.abc.Mapping):
for child_key, child_value in obj.items():
attach(hass, child_key)
attach(hass, child_value)
elif isinstance(obj, Template):
obj.hass = hass
def render_complex(value: Any, variables: TemplateVarsType = None) -> Any:
"""Recursive template creator helper function."""
if isinstance(value, list):
return [render_complex(item, variables) for item in value]
if isinstance(value, collections.abc.Mapping):
return {
render_complex(key, variables): render_complex(item, variables)
for key, item in value.items()
}
if isinstance(value, Template):
return value.async_render(variables)
return value
def is_complex(value: Any) -> bool:
"""Test if data structure is a complex template."""
if isinstance(value, Template):
return True
if isinstance(value, list):
return any(is_complex(val) for val in value)
if isinstance(value, collections.abc.Mapping):
return any(is_complex(val) for val in value.keys()) or any(
is_complex(val) for val in value.values()
)
return False
def is_template_string(maybe_template: str) -> bool:
"""Check if the input is a Jinja2 template."""
return _RE_JINJA_DELIMITERS.search(maybe_template) is not None
class ResultWrapper:
"""Result wrapper class to store render result."""
render_result: Optional[str]
def gen_result_wrapper(kls):
"""Generate a result wrapper."""
class Wrapper(kls, ResultWrapper):
"""Wrapper of a kls that can store render_result."""
def __init__(self, *args: tuple, render_result: Optional[str] = None) -> None:
super().__init__(*args)
self.render_result = render_result
def __str__(self) -> str:
if self.render_result is None:
# Can't get set repr to work
if kls is set:
return str(set(self))
return kls.__str__(self)
return self.render_result
return Wrapper
class TupleWrapper(tuple, ResultWrapper):
"""Wrap a tuple."""
# This is all magic to be allowed to subclass a tuple.
def __new__(
cls, value: tuple, *, render_result: Optional[str] = None
) -> "TupleWrapper":
"""Create a new tuple class."""
return super().__new__(cls, tuple(value))
# pylint: disable=super-init-not-called
def __init__(self, value: tuple, *, render_result: Optional[str] = None):
"""Initialize a new tuple class."""
self.render_result = render_result
def __str__(self) -> str:
"""Return string representation."""
if self.render_result is None:
return super().__str__()
return self.render_result
RESULT_WRAPPERS: Dict[Type, Type] = {
kls: gen_result_wrapper(kls) for kls in (list, dict, set)
}
RESULT_WRAPPERS[tuple] = TupleWrapper
def _true(arg: Any) -> bool:
return True
def _false(arg: Any) -> bool:
return False
class RenderInfo:
"""Holds information about a template render."""
def __init__(self, template):
"""Initialise."""
self.template = template
# Will be set sensibly once frozen.
self.filter_lifecycle = _true
self.filter = _true
self._result = None
self.is_static = False
self.exception = None
self.all_states = False
self.all_states_lifecycle = False
self.domains = set()
self.domains_lifecycle = set()
self.entities = set()
self.rate_limit = None
self.has_time = False
def __repr__(self) -> str:
"""Representation of RenderInfo."""
return f"<RenderInfo {self.template} all_states={self.all_states} all_states_lifecycle={self.all_states_lifecycle} domains={self.domains} domains_lifecycle={self.domains_lifecycle} entities={self.entities} rate_limit={self.rate_limit}> has_time={self.has_time}"
def _filter_domains_and_entities(self, entity_id: str) -> bool:
"""Template should re-render if the entity state changes when we match specific domains or entities."""
return (
split_entity_id(entity_id)[0] in self.domains or entity_id in self.entities
)
def _filter_entities(self, entity_id: str) -> bool:
"""Template should re-render if the entity state changes when we match specific entities."""
return entity_id in self.entities
def _filter_lifecycle_domains(self, entity_id: str) -> bool:
"""Template should re-render if the entity is added or removed with domains watched."""
return split_entity_id(entity_id)[0] in self.domains_lifecycle
def result(self) -> str:
"""Results of the template computation."""
if self.exception is not None:
raise self.exception
return self._result
def _freeze_static(self) -> None:
self.is_static = True
self._freeze_sets()
self.all_states = False
def _freeze_sets(self) -> None:
self.entities = frozenset(self.entities)
self.domains = frozenset(self.domains)
self.domains_lifecycle = frozenset(self.domains_lifecycle)
def _freeze(self) -> None:
self._freeze_sets()
if self.rate_limit is None:
if self.all_states or self.exception:
self.rate_limit = ALL_STATES_RATE_LIMIT
elif self.domains or self.domains_lifecycle:
self.rate_limit = DOMAIN_STATES_RATE_LIMIT
if self.exception:
return
if not self.all_states_lifecycle:
if self.domains_lifecycle:
self.filter_lifecycle = self._filter_lifecycle_domains
else:
self.filter_lifecycle = _false
if self.all_states:
return
if self.domains:
self.filter = self._filter_domains_and_entities
elif self.entities:
self.filter = self._filter_entities
else:
self.filter = _false
class Template:
"""Class to hold a template and manage caching and rendering."""
__slots__ = (
"__weakref__",
"template",
"hass",
"is_static",
"_compiled_code",
"_compiled",
)
def __init__(self, template, hass=None):
"""Instantiate a template."""
if not isinstance(template, str):
raise TypeError("Expected template to be a string")
self.template: str = template.strip()
self._compiled_code = None
self._compiled = None
self.hass = hass
self.is_static = not is_template_string(template)
@property
def _env(self):
if self.hass is None:
return _NO_HASS_ENV
ret = self.hass.data.get(_ENVIRONMENT)
if ret is None:
ret = self.hass.data[_ENVIRONMENT] = TemplateEnvironment(self.hass)
return ret
def ensure_valid(self):
"""Return if template is valid."""
if self._compiled_code is not None:
return
try:
self._compiled_code = self._env.compile(self.template)
except jinja2.TemplateError as err:
raise TemplateError(err) from err
def render(
self,
variables: TemplateVarsType = None,
parse_result: bool = True,
**kwargs: Any,
) -> Any:
"""Render given template."""
if self.is_static:
if self.hass.config.legacy_templates or not parse_result:
return self.template
return self._parse_result(self.template)
return run_callback_threadsafe(
self.hass.loop,
partial(self.async_render, variables, parse_result, **kwargs),
).result()
@callback
def async_render(
self,
variables: TemplateVarsType = None,
parse_result: bool = True,
**kwargs: Any,
) -> Any:
"""Render given template.
This method must be run in the event loop.
"""
if self.is_static:
if self.hass.config.legacy_templates or not parse_result:
return self.template
return self._parse_result(self.template)
compiled = self._compiled or self._ensure_compiled()
if variables is not None:
kwargs.update(variables)
try:
render_result = compiled.render(kwargs)
except Exception as err: # pylint: disable=broad-except
raise TemplateError(err) from err
render_result = render_result.strip()
if self.hass.config.legacy_templates or not parse_result:
return render_result
return self._parse_result(render_result)
def _parse_result(self, render_result: str) -> Any: # pylint: disable=no-self-use
"""Parse the result."""
try:
result = literal_eval(render_result)
if type(result) in RESULT_WRAPPERS:
result = RESULT_WRAPPERS[type(result)](
result, render_result=render_result
)
# If the literal_eval result is a string, use the original
# render, by not returning right here. The evaluation of strings
# resulting in strings impacts quotes, to avoid unexpected
# output; use the original render instead of the evaluated one.
if not isinstance(result, str):
return result
except (ValueError, TypeError, SyntaxError, MemoryError):
pass
return render_result
async def async_render_will_timeout(
self, timeout: float, variables: TemplateVarsType = None, **kwargs: Any
) -> bool:
"""Check to see if rendering a template will timeout during render.
This is intended to check for expensive templates
that will make the system unstable. The template
is rendered in the executor to ensure it does not
tie up the event loop.
This function is not a security control and is only
intended to be used as a safety check when testing
templates.
This method must be run in the event loop.
"""
assert self.hass
if self.is_static:
return False
compiled = self._compiled or self._ensure_compiled()
if variables is not None:
kwargs.update(variables)
finish_event = asyncio.Event()
def _render_template():
try:
compiled.render(kwargs)
except TimeoutError:
pass
finally:
run_callback_threadsafe(self.hass.loop, finish_event.set)
try:
template_render_thread = ThreadWithException(target=_render_template)
template_render_thread.start()
await asyncio.wait_for(finish_event.wait(), timeout=timeout)
except asyncio.TimeoutError:
template_render_thread.raise_exc(TimeoutError)
return True
finally:
template_render_thread.join()
return False
@callback
def async_render_to_info(
self, variables: TemplateVarsType = None, **kwargs: Any
) -> RenderInfo:
"""Render the template and collect an entity filter."""
assert self.hass and _RENDER_INFO not in self.hass.data
render_info = RenderInfo(self)
# pylint: disable=protected-access
if self.is_static:
render_info._result = self.template.strip()
render_info._freeze_static()
return render_info
self.hass.data[_RENDER_INFO] = render_info
try:
render_info._result = self.async_render(variables, **kwargs)
except TemplateError as ex:
render_info.exception = ex
finally:
del self.hass.data[_RENDER_INFO]
render_info._freeze()
return render_info
def render_with_possible_json_value(self, value, error_value=_SENTINEL):
"""Render template with value exposed.
If valid JSON will expose value_json too.
"""
if self.is_static:
return self.template
return run_callback_threadsafe(
self.hass.loop,
self.async_render_with_possible_json_value,
value,
error_value,
).result()
@callback
def async_render_with_possible_json_value(
self, value, error_value=_SENTINEL, variables=None
):
"""Render template with value exposed.
If valid JSON will expose value_json too.
This method must be run in the event loop.
"""
if self.is_static:
return self.template
if self._compiled is None:
self._ensure_compiled()
variables = dict(variables or {})
variables["value"] = value
try:
variables["value_json"] = json.loads(value)
except (ValueError, TypeError):
pass
try:
return self._compiled.render(variables).strip()
except jinja2.TemplateError as ex:
if error_value is _SENTINEL:
_LOGGER.error(
"Error parsing value: %s (value: %s, template: %s)",
ex,
value,
self.template,
)
return value if error_value is _SENTINEL else error_value
def _ensure_compiled(self):
"""Bind a template to a specific hass instance."""
self.ensure_valid()
assert self.hass is not None, "hass variable not set on template"
env = self._env
self._compiled = jinja2.Template.from_code(
env, self._compiled_code, env.globals, None
)
return self._compiled
def __eq__(self, other):
"""Compare template with another."""
return (
self.__class__ == other.__class__
and self.template == other.template
and self.hass == other.hass
)
def __hash__(self) -> int:
"""Hash code for template."""
return hash(self.template)
def __repr__(self) -> str:
"""Representation of Template."""
return 'Template("' + self.template + '")'
class AllStates:
"""Class to expose all HA states as attributes."""
def __init__(self, hass):
"""Initialize all states."""
self._hass = hass
def __getattr__(self, name):
"""Return the domain state."""
if "." in name:
return _get_state_if_valid(self._hass, name)
if name in _RESERVED_NAMES:
return None
if not valid_entity_id(f"{name}.entity"):
raise TemplateError(f"Invalid domain name '{name}'")
return DomainStates(self._hass, name)
# Jinja will try __getitem__ first and it avoids the need
# to call is_safe_attribute
__getitem__ = __getattr__
def _collect_all(self) -> None:
render_info = self._hass.data.get(_RENDER_INFO)
if render_info is not None:
render_info.all_states = True
def _collect_all_lifecycle(self) -> None:
render_info = self._hass.data.get(_RENDER_INFO)
if render_info is not None:
render_info.all_states_lifecycle = True
def __iter__(self):
"""Return all states."""
self._collect_all()
return _state_generator(self._hass, None)
def __len__(self) -> int:
"""Return number of states."""
self._collect_all_lifecycle()
return self._hass.states.async_entity_ids_count()
def __call__(self, entity_id):
"""Return the states."""
state = _get_state(self._hass, entity_id)
return STATE_UNKNOWN if state is None else state.state
def __repr__(self) -> str:
"""Representation of All States."""
return "<template AllStates>"
class DomainStates:
"""Class to expose a specific HA domain as attributes."""
def __init__(self, hass, domain):
"""Initialize the domain states."""
self._hass = hass
self._domain = domain
def __getattr__(self, name):
"""Return the states."""
return _get_state_if_valid(self._hass, f"{self._domain}.{name}")
# Jinja will try __getitem__ first and it avoids the need
# to call is_safe_attribute
__getitem__ = __getattr__
def _collect_domain(self) -> None:
entity_collect = self._hass.data.get(_RENDER_INFO)
if entity_collect is not None:
entity_collect.domains.add(self._domain)
def _collect_domain_lifecycle(self) -> None:
entity_collect = self._hass.data.get(_RENDER_INFO)
if entity_collect is not None:
entity_collect.domains_lifecycle.add(self._domain)
def __iter__(self):
"""Return the iteration over all the states."""
self._collect_domain()
return _state_generator(self._hass, self._domain)
def __len__(self) -> int:
"""Return number of states."""
self._collect_domain_lifecycle()
return self._hass.states.async_entity_ids_count(self._domain)
def __repr__(self) -> str:
"""Representation of Domain States."""
return f"<template DomainStates('{self._domain}')>"
class TemplateState(State):
"""Class to represent a state object in a template."""
__slots__ = ("_hass", "_state", "_collect")
# Inheritance is done so functions that check against State keep working
# pylint: disable=super-init-not-called
def __init__(self, hass, state, collect=True):
"""Initialize template state."""
self._hass = hass
self._state = state
self._collect = collect
def _collect_state(self):
if self._collect and _RENDER_INFO in self._hass.data:
self._hass.data[_RENDER_INFO].entities.add(self._state.entity_id)
# Jinja will try __getitem__ first and it avoids the need
# to call is_safe_attribute
def __getitem__(self, item):
"""Return a property as an attribute for jinja."""
if item in _COLLECTABLE_STATE_ATTRIBUTES:
# _collect_state inlined here for performance
if self._collect and _RENDER_INFO in self._hass.data:
self._hass.data[_RENDER_INFO].entities.add(self._state.entity_id)
return getattr(self._state, item)
if item == "entity_id":
return self._state.entity_id
if item == "state_with_unit":
return self.state_with_unit
raise KeyError
@property
def entity_id(self):
"""Wrap State.entity_id.
Intentionally does not collect state
"""
return self._state.entity_id
@property
def state(self):
"""Wrap State.state."""
self._collect_state()
return self._state.state
@property
def attributes(self):
"""Wrap State.attributes."""
self._collect_state()
return self._state.attributes
@property
def last_changed(self):
"""Wrap State.last_changed."""
self._collect_state()
return self._state.last_changed
@property
def last_updated(self):
"""Wrap State.last_updated."""
self._collect_state()
return self._state.last_updated
@property
def context(self):
"""Wrap State.context."""
self._collect_state()
return self._state.context
@property
def domain(self):
"""Wrap State.domain."""
self._collect_state()
return self._state.domain
@property
def object_id(self):
"""Wrap State.object_id."""
self._collect_state()
return self._state.object_id
@property
def name(self):
"""Wrap State.name."""
self._collect_state()
return self._state.name
@property
def state_with_unit(self) -> str:
"""Return the state concatenated with the unit if available."""
self._collect_state()
unit = self._state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)
return f"{self._state.state} {unit}" if unit else self._state.state
def __eq__(self, other: Any) -> bool:
"""Ensure we collect on equality check."""
self._collect_state()
return self._state.__eq__(other)
def __repr__(self) -> str:
"""Representation of Template State."""
return f"<template TemplateState({self._state.__repr__()})>"
def _collect_state(hass: HomeAssistantType, entity_id: str) -> None:
entity_collect = hass.data.get(_RENDER_INFO)
if entity_collect is not None:
entity_collect.entities.add(entity_id)
def _state_generator(hass: HomeAssistantType, domain: Optional[str]) -> Generator:
"""State generator for a domain or all states."""
for state in sorted(hass.states.async_all(domain), key=attrgetter("entity_id")):
yield TemplateState(hass, state, collect=False)
def _get_state_if_valid(
hass: HomeAssistantType, entity_id: str
) -> Optional[TemplateState]:
state = hass.states.get(entity_id)
if state is None and not valid_entity_id(entity_id):
raise TemplateError(f"Invalid entity ID '{entity_id}'") # type: ignore
return _get_template_state_from_state(hass, entity_id, state)
def _get_state(hass: HomeAssistantType, entity_id: str) -> Optional[TemplateState]:
return _get_template_state_from_state(hass, entity_id, hass.states.get(entity_id))
def _get_template_state_from_state(
hass: HomeAssistantType, entity_id: str, state: Optional[State]
) -> Optional[TemplateState]:
if state is None:
# Only need to collect if none, if not none collect first actual
# access to the state properties in the state wrapper.
_collect_state(hass, entity_id)
return None
return TemplateState(hass, state)
def _resolve_state(
hass: HomeAssistantType, entity_id_or_state: Any
) -> Union[State, TemplateState, None]:
"""Return state or entity_id if given."""
if isinstance(entity_id_or_state, State):
return entity_id_or_state
if isinstance(entity_id_or_state, str):
return _get_state(hass, entity_id_or_state)
return None
def result_as_boolean(template_result: Optional[str]) -> bool:
"""Convert the template result to a boolean.
True/not 0/'1'/'true'/'yes'/'on'/'enable' are considered truthy
False/0/None/'0'/'false'/'no'/'off'/'disable' are considered falsy
"""
try:
# Import here, not at top-level to avoid circular import
from homeassistant.helpers import ( # pylint: disable=import-outside-toplevel
config_validation as cv,
)
return cv.boolean(template_result)
except vol.Invalid:
return False
def expand(hass: HomeAssistantType, *args: Any) -> Iterable[State]:
"""Expand out any groups into entity states."""
search = list(args)
found = {}
while search:
entity = search.pop()
if isinstance(entity, str):
entity_id = entity
entity = _get_state(hass, entity)
if entity is None:
continue
elif isinstance(entity, State):
entity_id = entity.entity_id
elif isinstance(entity, collections.abc.Iterable):
search += entity
continue
else:
# ignore other types
continue
if entity_id.startswith(_GROUP_DOMAIN_PREFIX):
# Collect state will be called in here since it's wrapped
group_entities = entity.attributes.get(ATTR_ENTITY_ID)
if group_entities:
search += group_entities
else:
_collect_state(hass, entity_id)
found[entity_id] = entity
return sorted(found.values(), key=lambda a: a.entity_id)
def closest(hass, *args):
"""Find closest entity.
Closest to home:
closest(states)
closest(states.device_tracker)
closest('group.children')
closest(states.group.children)
Closest to a point:
closest(23.456, 23.456, 'group.children')
closest('zone.school', 'group.children')
closest(states.zone.school, 'group.children')
As a filter:
states | closest
states.device_tracker | closest
['group.children', states.device_tracker] | closest
'group.children' | closest(23.456, 23.456)
states.device_tracker | closest('zone.school')
'group.children' | closest(states.zone.school)
"""
if len(args) == 1:
latitude = hass.config.latitude
longitude = hass.config.longitude
entities = args[0]
elif len(args) == 2:
point_state = _resolve_state(hass, args[0])
if point_state is None:
_LOGGER.warning("Closest:Unable to find state %s", args[0])
return None
if not loc_helper.has_location(point_state):
_LOGGER.warning(
"Closest:State does not contain valid location: %s", point_state
)
return None
latitude = point_state.attributes.get(ATTR_LATITUDE)
longitude = point_state.attributes.get(ATTR_LONGITUDE)
entities = args[1]
else:
latitude = convert(args[0], float)
longitude = convert(args[1], float)
if latitude is None or longitude is None:
_LOGGER.warning(
"Closest:Received invalid coordinates: %s, %s", args[0], args[1]
)
return None
entities = args[2]
states = expand(hass, entities)
# state will already be wrapped here
return loc_helper.closest(latitude, longitude, states)
def closest_filter(hass, *args):
"""Call closest as a filter. Need to reorder arguments."""
new_args = list(args[1:])
new_args.append(args[0])
return closest(hass, *new_args)
def distance(hass, *args):
"""Calculate distance.
Will calculate distance from home to a point or between points.
Points can be passed in using state objects or lat/lng coordinates.
"""
locations = []
to_process = list(args)
while to_process:
value = to_process.pop(0)
if isinstance(value, str) and not valid_entity_id(value):
point_state = None
else:
point_state = _resolve_state(hass, value)
if point_state is None:
# We expect this and next value to be lat&lng
if not to_process:
_LOGGER.warning(
"Distance:Expected latitude and longitude, got %s", value
)
return None
value_2 = to_process.pop(0)
latitude = convert(value, float)
longitude = convert(value_2, float)
if latitude is None or longitude is None:
_LOGGER.warning(
"Distance:Unable to process latitude and longitude: %s, %s",
value,
value_2,
)
return None
else:
if not loc_helper.has_location(point_state):
_LOGGER.warning(
"distance:State does not contain valid location: %s", point_state
)
return None
latitude = point_state.attributes.get(ATTR_LATITUDE)
longitude = point_state.attributes.get(ATTR_LONGITUDE)
locations.append((latitude, longitude))
if len(locations) == 1:
return hass.config.distance(*locations[0])
return hass.config.units.length(
loc_util.distance(*locations[0] + locations[1]), LENGTH_METERS
)
def is_state(hass: HomeAssistantType, entity_id: str, state: State) -> bool:
"""Test if a state is a specific value."""
state_obj = _get_state(hass, entity_id)
return state_obj is not None and state_obj.state == state
def is_state_attr(hass, entity_id, name, value):
"""Test if a state's attribute is a specific value."""
attr = state_attr(hass, entity_id, name)
return attr is not None and attr == value
def state_attr(hass, entity_id, name):
"""Get a specific attribute from a state."""
state_obj = _get_state(hass, entity_id)
if state_obj is not None:
return state_obj.attributes.get(name)
return None
def now(hass):
"""Record fetching now."""
render_info = hass.data.get(_RENDER_INFO)
if render_info is not None:
render_info.has_time = True
return dt_util.now()
def utcnow(hass):
"""Record fetching utcnow."""
render_info = hass.data.get(_RENDER_INFO)
if render_info is not None:
render_info.has_time = True
return dt_util.utcnow()
def forgiving_round(value, precision=0, method="common"):
"""Round accepted strings."""
try:
# support rounding methods like jinja
multiplier = float(10 ** precision)
if method == "ceil":
value = math.ceil(float(value) * multiplier) / multiplier
elif method == "floor":
value = math.floor(float(value) * multiplier) / multiplier
elif method == "half":
value = round(float(value) * 2) / 2
else:
# if method is common or something else, use common rounding
value = round(float(value), precision)
return int(value) if precision == 0 else value
except (ValueError, TypeError):
# If value can't be converted to float
return value
def multiply(value, amount):
"""Filter to convert value to float and multiply it."""
try:
return float(value) * amount
except (ValueError, TypeError):
# If value can't be converted to float
return value
def logarithm(value, base=math.e):
"""Filter to get logarithm of the value with a specific base."""
try:
return math.log(float(value), float(base))
except (ValueError, TypeError):
return value
def sine(value):
"""Filter to get sine of the value."""
try:
return math.sin(float(value))
except (ValueError, TypeError):
return value
def cosine(value):
"""Filter to get cosine of the value."""
try:
return math.cos(float(value))
except (ValueError, TypeError):
return value
def tangent(value):
"""Filter to get tangent of the value."""
try:
return math.tan(float(value))
except (ValueError, TypeError):
return value
def arc_sine(value):
"""Filter to get arc sine of the value."""
try:
return math.asin(float(value))
except (ValueError, TypeError):
return value
def arc_cosine(value):
"""Filter to get arc cosine of the value."""
try:
return math.acos(float(value))
except (ValueError, TypeError):
return value
def arc_tangent(value):
"""Filter to get arc tangent of the value."""
try:
return math.atan(float(value))
except (ValueError, TypeError):
return value
def arc_tangent2(*args):
"""Filter to calculate four quadrant arc tangent of y / x."""
try:
if len(args) == 1 and isinstance(args[0], (list, tuple)):
args = args[0]
return math.atan2(float(args[0]), float(args[1]))
except (ValueError, TypeError):
return args
def square_root(value):
"""Filter to get square root of the value."""
try:
return math.sqrt(float(value))
except (ValueError, TypeError):
return value
def timestamp_custom(value, date_format=DATE_STR_FORMAT, local=True):
"""Filter to convert given timestamp to format."""
try:
date = dt_util.utc_from_timestamp(value)
if local:
date = dt_util.as_local(date)
return date.strftime(date_format)
except (ValueError, TypeError):
# If timestamp can't be converted
return value
def timestamp_local(value):
"""Filter to convert given timestamp to local date/time."""
try:
return dt_util.as_local(dt_util.utc_from_timestamp(value)).strftime(
DATE_STR_FORMAT
)
except (ValueError, TypeError):
# If timestamp can't be converted
return value
def timestamp_utc(value):
"""Filter to convert given timestamp to UTC date/time."""
try:
return dt_util.utc_from_timestamp(value).strftime(DATE_STR_FORMAT)
except (ValueError, TypeError):
# If timestamp can't be converted
return value
def forgiving_as_timestamp(value):
"""Try to convert value to timestamp."""
try:
return dt_util.as_timestamp(value)
except (ValueError, TypeError):
return None
def strptime(string, fmt):
"""Parse a time string to datetime."""
try:
return datetime.strptime(string, fmt)
except (ValueError, AttributeError, TypeError):
return string
def fail_when_undefined(value):
"""Filter to force a failure when the value is undefined."""
if isinstance(value, jinja2.Undefined):
value()
return value
def forgiving_float(value):
"""Try to convert value to a float."""
try:
return float(value)
except (ValueError, TypeError):
return value
def regex_match(value, find="", ignorecase=False):
"""Match value using regex."""
if not isinstance(value, str):
value = str(value)
flags = re.I if ignorecase else 0
return bool(re.match(find, value, flags))
def regex_replace(value="", find="", replace="", ignorecase=False):
"""Replace using regex."""
if not isinstance(value, str):
value = str(value)
flags = re.I if ignorecase else 0
regex = re.compile(find, flags)
return regex.sub(replace, value)
def regex_search(value, find="", ignorecase=False):
"""Search using regex."""
if not isinstance(value, str):
value = str(value)
flags = re.I if ignorecase else 0
return bool(re.search(find, value, flags))
def regex_findall_index(value, find="", index=0, ignorecase=False):
"""Find all matches using regex and then pick specific match index."""
if not isinstance(value, str):
value = str(value)
flags = re.I if ignorecase else 0
return re.findall(find, value, flags)[index]
def bitwise_and(first_value, second_value):
"""Perform a bitwise and operation."""
return first_value & second_value
def bitwise_or(first_value, second_value):
"""Perform a bitwise or operation."""
return first_value | second_value
def base64_encode(value):
"""Perform base64 encode."""
return base64.b64encode(value.encode("utf-8")).decode("utf-8")
def base64_decode(value):
"""Perform base64 denode."""
return base64.b64decode(value).decode("utf-8")
def ordinal(value):
"""Perform ordinal conversion."""
return str(value) + (
list(["th", "st", "nd", "rd"] + ["th"] * 6)[(int(str(value)[-1])) % 10]
if int(str(value)[-2:]) % 100 not in range(11, 14)
else "th"
)
def from_json(value):
"""Convert a JSON string to an object."""
return json.loads(value)
def to_json(value):
"""Convert an object to a JSON string."""
return json.dumps(value)
@contextfilter
def random_every_time(context, values):
"""Choose a random value.
Unlike Jinja's random filter,
this is context-dependent to avoid caching the chosen value.
"""
return random.choice(values)
def relative_time(value):
"""
Take a datetime and return its "age" as a string.
The age can be in second, minute, hour, day, month or year. Only the
biggest unit is considered, e.g. if it's 2 days and 3 hours, "2 days" will
be returned.
Make sure date is not in the future, or else it will return None.
If the input are not a datetime object the input will be returned unmodified.
"""
if not isinstance(value, datetime):
return value
if not value.tzinfo:
value = dt_util.as_local(value)
if dt_util.now() < value:
return value
return dt_util.get_age(value)
def urlencode(value):
"""Urlencode dictionary and return as UTF-8 string."""
return urllib_urlencode(value).encode("utf-8")
class TemplateEnvironment(ImmutableSandboxedEnvironment):
"""The Home Assistant template environment."""
def __init__(self, hass):
"""Initialise template environment."""
super().__init__()
self.hass = hass
self.template_cache = weakref.WeakValueDictionary()
self.filters["round"] = forgiving_round
self.filters["multiply"] = multiply
self.filters["log"] = logarithm
self.filters["sin"] = sine
self.filters["cos"] = cosine
self.filters["tan"] = tangent
self.filters["asin"] = arc_sine
self.filters["acos"] = arc_cosine
self.filters["atan"] = arc_tangent
self.filters["atan2"] = arc_tangent2
self.filters["sqrt"] = square_root
self.filters["as_timestamp"] = forgiving_as_timestamp
self.filters["as_local"] = dt_util.as_local
self.filters["timestamp_custom"] = timestamp_custom
self.filters["timestamp_local"] = timestamp_local
self.filters["timestamp_utc"] = timestamp_utc
self.filters["to_json"] = to_json
self.filters["from_json"] = from_json
self.filters["is_defined"] = fail_when_undefined
self.filters["max"] = max
self.filters["min"] = min
self.filters["random"] = random_every_time
self.filters["base64_encode"] = base64_encode
self.filters["base64_decode"] = base64_decode
self.filters["ordinal"] = ordinal
self.filters["regex_match"] = regex_match
self.filters["regex_replace"] = regex_replace
self.filters["regex_search"] = regex_search
self.filters["regex_findall_index"] = regex_findall_index
self.filters["bitwise_and"] = bitwise_and
self.filters["bitwise_or"] = bitwise_or
self.filters["ord"] = ord
self.globals["log"] = logarithm
self.globals["sin"] = sine
self.globals["cos"] = cosine
self.globals["tan"] = tangent
self.globals["sqrt"] = square_root
self.globals["pi"] = math.pi
self.globals["tau"] = math.pi * 2
self.globals["e"] = math.e
self.globals["asin"] = arc_sine
self.globals["acos"] = arc_cosine
self.globals["atan"] = arc_tangent
self.globals["atan2"] = arc_tangent2
self.globals["float"] = forgiving_float
self.globals["as_local"] = dt_util.as_local
self.globals["as_timestamp"] = forgiving_as_timestamp
self.globals["relative_time"] = relative_time
self.globals["timedelta"] = timedelta
self.globals["strptime"] = strptime
self.globals["urlencode"] = urlencode
if hass is None:
return
# We mark these as a context functions to ensure they get
# evaluated fresh with every execution, rather than executed
# at compile time and the value stored. The context itself
# can be discarded, we only need to get at the hass object.
def hassfunction(func):
"""Wrap function that depend on hass."""
@wraps(func)
def wrapper(*args, **kwargs):
return func(hass, *args[1:], **kwargs)
return contextfunction(wrapper)
self.globals["expand"] = hassfunction(expand)
self.filters["expand"] = contextfilter(self.globals["expand"])
self.globals["closest"] = hassfunction(closest)
self.filters["closest"] = contextfilter(hassfunction(closest_filter))
self.globals["distance"] = hassfunction(distance)
self.globals["is_state"] = hassfunction(is_state)
self.globals["is_state_attr"] = hassfunction(is_state_attr)
self.globals["state_attr"] = hassfunction(state_attr)
self.globals["states"] = AllStates(hass)
self.globals["utcnow"] = hassfunction(utcnow)
self.globals["now"] = hassfunction(now)
def is_safe_callable(self, obj):
"""Test if callback is safe."""
return isinstance(obj, AllStates) or super().is_safe_callable(obj)
def is_safe_attribute(self, obj, attr, value):
"""Test if attribute is safe."""
if isinstance(obj, (AllStates, DomainStates, TemplateState)):
return not attr[0] == "_"
if isinstance(obj, Namespace):
return True
return super().is_safe_attribute(obj, attr, value)
def compile(self, source, name=None, filename=None, raw=False, defer_init=False):
"""Compile the template."""
if (
name is not None
or filename is not None
or raw is not False
or defer_init is not False
):
# If there are any non-default keywords args, we do
# not cache. In prodution we currently do not have
# any instance of this.
return super().compile(source, name, filename, raw, defer_init)
cached = self.template_cache.get(source)
if cached is None:
cached = self.template_cache[source] = super().compile(source)
return cached
_NO_HASS_ENV = TemplateEnvironment(None)
| apache-2.0 |
vathpela/anaconda | tests/glade/check_mnemonics.py | 8 | 2031 | #
# Copyright (C) 2015 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation; either version 2.1 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from gladecheck import GladeTest
class CheckMnemonics(GladeTest):
def checkGlade(self, glade_tree):
"""Check for widgets with keyboard accelerators but no mnemonic"""
# Look for labels with use-underline=True and no mnemonic-widget
for label in glade_tree.xpath(".//object[@class='GtkLabel' and ./property[@name='use_underline' and ./text() = 'True'] and not(./property[@name='mnemonic_widget'])]"):
# And now filter out the cases where the label actually does have a mnemonic.
# This list is not comprehensive, probably.
parent = label.getparent()
# Is the label the child of a GtkButton? The button might be pretty far up there.
# Assume widget names that end in "Button" are subclasses of GtkButton
if parent.tag == 'child' and \
label.xpath("ancestor::object[substring(@class, string-length(@class) - string-length('Button') + 1) = 'Button']"):
continue
# Is the label a GtkNotebook tab?
if parent.tag == 'child' and parent.get('type') == 'tab' and \
parent.getparent().get('class') == 'GtkNotebook':
continue
raise AssertionError("Label with accelerator and no mnemonic at %s:%d" % (label.base, label.sourceline))
| gpl-2.0 |
altaf-ali/luigi | setup.py | 15 | 2810 | # Copyright (c) 2012 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
import os
import sys
try:
from setuptools import setup
except:
from distutils.core import setup
def get_static_files(path):
return [os.path.join(dirpath.replace("luigi/", ""), ext)
for (dirpath, dirnames, filenames) in os.walk(path)
for ext in ["*.html", "*.js", "*.css", "*.png"]]
luigi_package_data = sum(map(get_static_files, ["luigi/static", "luigi/templates"]), [])
readme_note = """\
.. note::
For the latest source, discussion, etc, please visit the
`GitHub repository <https://github.com/spotify/luigi>`_\n\n
"""
with open('README.rst') as fobj:
long_description = readme_note + fobj.read()
install_requires = [
'cached_property',
'pyparsing',
'tornado',
'python-daemon',
]
if os.environ.get('READTHEDOCS', None) == 'True':
install_requires.append('sqlalchemy')
# So that we can build documentation for luigi.db_task_history and luigi.contrib.sqla
setup(
name='luigi',
version='1.3.0',
description='Workflow mgmgt + task scheduling + dependency resolution',
long_description=long_description,
author='Erik Bernhardsson',
author_email='[email protected]',
url='https://github.com/spotify/luigi',
license='Apache License 2.0',
packages=[
'luigi',
'luigi.contrib',
'luigi.contrib.hdfs',
'luigi.tools'
],
package_data={
'luigi': luigi_package_data
},
entry_points={
'console_scripts': [
'luigi = luigi.cmdline:luigi_run',
'luigid = luigi.cmdline:luigid',
'luigi-grep = luigi.tools.luigi_grep:main',
'luigi-deps = luigi.tools.deps:main',
]
},
install_requires=install_requires,
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: System :: Monitoring',
],
)
| apache-2.0 |
lino-framework/xl | lino_xl/lib/humanlinks/choicelists.py | 1 | 4263 | # -*- coding: UTF-8 -*-
# Copyright 2014-2015 Rumma & Ko Ltd
#
# License: GNU Affero General Public License v3 (see file COPYING for details)
"""
Choicelists for `lino_xl.lib.humanlinks`.
"""
from __future__ import unicode_literals
from __future__ import print_function
from django.utils.translation import gettext_lazy as _
from django.utils.translation import pgettext_lazy as pgettext
from django.utils.text import format_lazy
from lino_xl.lib.contacts.roles import ContactsStaff
from lino.api import dd
class LinkType(dd.Choice):
symmetric = False
def __init__(self, value, name,
mptext, fptext,
mctext, fctext,
**kw):
self.mptext = mptext # male parent
self.fptext = fptext
self.mctext = mctext
self.fctext = fctext
# text = string_concat(
# mptext, ' (', fptext, ') / ', mctext, ' (', fctext, ')')
# text = string_concat(mctext, ' (', fctext, ')')
text = format_lazy(u"{}({})",mptext, fptext)
# text = "%s (%s) / %s (%s)" % (mptext, fptext, mctext, fctext)
super(LinkType, self).__init__(value, text, name, **kw)
def as_parent(self, human):
if human is None:
return self.text
return human.mf(self.mptext, self.fptext)
def as_child(self, human):
if human is None:
return self.text
return human.mf(self.mctext, self.fctext)
class LinkTypes(dd.ChoiceList):
"""The global list of human link types. This is used as choicelist
for the :attr:`type <lino_xl.lib.humanlinks.models.Link.type>`
field of a human link.
The default list contains the following data:
.. django2rst::
rt.show(humanlinks.LinkTypes)
.. attribute:: adoptive_parent
A person who adopts a child of other parents as his or her own child.
.. attribute:: stepparent
Someone that your mother or father marries after the marriage
to or relationship with your other parent has ended
.. attribute:: foster_parent
A man (woman) who looks after or brings up a child or children
as a father (mother), in place of the natural or adoptive
father (mother). [`thefreedictionary
<http://www.thefreedictionary.com/foster+father>`_]
"""
required_roles = dd.login_required(ContactsStaff)
verbose_name = _("Parency type")
verbose_name_plural = _("Parency types")
item_class = LinkType
add = LinkTypes.add_item
add('01', 'parent',
_("Father"), _("Mother"),
_("Son"), _("Daughter"))
add('02',
'adoptive_parent',
_("Adoptive father"), _("Adoptive mother"),
_("Adopted son"), _("Adopted daughter"))
add('03',
'grandparent',
_("Grandfather"), _("Grandmother"),
_("Grandson"), _("Granddaughter"))
add('05',
'spouse',
_("Husband"), _("Wife"),
_("Husband"), _("Wife"), symmetric=True)
add('06',
'friend',
pgettext("male", "Friend"), pgettext("female", "Friend"),
pgettext("male", "Friend"), pgettext("female", "Friend"),
symmetric=True)
add('07',
'partner',
pgettext("male", "Partner"), pgettext("female", "Partner"),
pgettext("male", "Partner"), pgettext("female", "Partner"),
symmetric=True)
add('08',
'stepparent',
_("Stepfather"), _("Stepmother"),
_("Stepson"), _("Stepdaughter"))
add('09',
'foster_parent',
_("Foster father"), _("Foster mother"),
_("Foster son"), _("Foster daughter"))
add('10',
'sibling',
pgettext("male", "Brother"), pgettext("female", "Sister"),
pgettext("male", "Brother"), pgettext("female", "Sister"),
symmetric=True)
add('11',
'cousin',
pgettext("male", "Cousin"), pgettext("female", "Cousin"),
pgettext("male", "Cousin"), pgettext("female", "Cousin"),
symmetric=True)
add('12',
'uncle',
_("Uncle"), _("Aunt"),
_("Nephew"), _("Niece"))
add('80',
'relative',
pgettext("male", "Relative"), pgettext("female", "Relative"),
pgettext("male", "Relative"), pgettext("female", "Relative"),
symmetric=True)
add('90',
'other',
pgettext("male", "Other"), pgettext("female", "Other"),
pgettext("male", "Other"), pgettext("female", "Other"),
symmetric=True)
| bsd-2-clause |
disqus/pgshovel | tests/pgshovel/replication/streams/kafka.py | 1 | 4446 | from __future__ import absolute_import
import pytest
from itertools import islice
from kafka import (
KafkaClient,
SimpleProducer,
)
from tests.pgshovel.fixtures import (
cluster,
create_temporary_database,
)
from tests.pgshovel.streams.fixtures import (
DEFAULT_PUBLISHER,
begin,
transaction,
transactions,
)
from pgshovel.interfaces.common_pb2 import Snapshot
from pgshovel.interfaces.configurations_pb2 import ReplicationSetConfiguration
from pgshovel.interfaces.replication_pb2 import (
ConsumerState,
State,
BootstrapState,
TransactionState,
)
from pgshovel.interfaces.streams_pb2 import (
Header,
Message,
)
from pgshovel.replication.streams.kafka import KafkaStream
from pgshovel.replication.validation.consumers import SequencingError
from pgshovel.replication.validation.transactions import InvalidEventError
from pgshovel.relay.streams.kafka import KafkaWriter
from pgshovel.streams.utilities import UnableToPrimeError
@pytest.yield_fixture
def configuration():
yield {'hosts': 'kafka:9092'}
@pytest.yield_fixture
def stream(configuration, cluster, client):
stream = KafkaStream.configure(configuration, cluster, 'default')
client.ensure_topic_exists(stream.topic)
yield stream
@pytest.yield_fixture
def client(configuration):
yield KafkaClient(configuration['hosts'])
@pytest.yield_fixture
def writer(client, stream):
producer = SimpleProducer(client)
yield KafkaWriter(producer, stream.topic)
@pytest.yield_fixture
def state():
bootstrap_state = BootstrapState(
node='1234',
snapshot=Snapshot(min=1, max=2),
)
yield State(bootstrap_state=bootstrap_state)
@pytest.yield_fixture
def sliced_transaction():
two_transactions = list(islice(transactions(), 6))
head, remainder = two_transactions[0], two_transactions[1:]
assert head.batch_operation.begin_operation == begin
yield remainder
def test_starts_at_beginning_of_stream_for_bootstrapped_state(writer, stream, state):
writer.push(transaction)
consumed = list(islice(stream.consume(state), 3))
assert [message for _, _, message in consumed] == transaction
def test_yields_new_update_state_after_each_message(writer, stream, state):
expected_states = {
0: 'in_transaction',
1: 'in_transaction',
2: 'committed'
}
writer.push(transaction)
for state, offset, message in islice(stream.consume(state), 3):
assert state.stream_state.consumer_state.offset == offset
assert state.stream_state.consumer_state.header == message.header
assert state.stream_state.transaction_state.WhichOneof('state') == expected_states[offset]
def test_uses_existing_stream_state_if_it_exists(writer, stream, state):
writer.push(islice(transactions(), 6))
iterator = stream.consume(state)
next(iterator)
next(iterator)
(new_state, offset, message) = next(iterator)
new_iterator = stream.consume(new_state)
(_, new_offset, _) = next(new_iterator)
assert new_offset == 3
def test_crashes_on_no_state(stream):
with pytest.raises(AttributeError):
next(stream.consume(None))
def test_validates_stream_and_crashes_when_invalid(writer, stream, state):
messages = list(islice(transactions(), 3))
messages[1] = messages[0]
writer.push(messages)
with pytest.raises(SequencingError):
list(stream.consume(state))
def test_discards_messages_until_start_of_transaction(writer, stream, state, sliced_transaction):
writer.push(sliced_transaction)
consumed = list(islice(stream.consume(state), 3))
assert [message for _, _, message in consumed] == sliced_transaction[-3:]
def test_discarded_messages_is_configurable(configuration, cluster, client, state, writer, sliced_transaction):
writer.push(sliced_transaction)
configuration['prime_threshold'] = 1
antsy_stream = KafkaStream.configure(configuration, cluster, 'default')
less_antsy_config = configuration.copy()
less_antsy_config['prime_threshold'] = 3
less_antsy_stream = KafkaStream.configure(less_antsy_config, cluster, 'default')
client.ensure_topic_exists(antsy_stream.topic)
with pytest.raises(UnableToPrimeError):
list(islice(antsy_stream.consume(state), 3))
consumed = list(islice(less_antsy_stream.consume(state), 3))
assert [message for _, _, message in consumed] == sliced_transaction[-3:]
| apache-2.0 |
neilpelow/wmap-django | venv/lib/python3.5/site-packages/django/contrib/flatpages/forms.py | 108 | 2162 | from django import forms
from django.conf import settings
from django.contrib.flatpages.models import FlatPage
from django.utils.translation import ugettext, ugettext_lazy as _
class FlatpageForm(forms.ModelForm):
url = forms.RegexField(
label=_("URL"),
max_length=100,
regex=r'^[-\w/\.~]+$',
help_text=_("Example: '/about/contact/'. Make sure to have leading and trailing slashes."),
error_messages={
"invalid": _(
"This value must contain only letters, numbers, dots, "
"underscores, dashes, slashes or tildes."
),
},
)
class Meta:
model = FlatPage
fields = '__all__'
def clean_url(self):
url = self.cleaned_data['url']
if not url.startswith('/'):
raise forms.ValidationError(
ugettext("URL is missing a leading slash."),
code='missing_leading_slash',
)
if (settings.APPEND_SLASH and (
(settings.MIDDLEWARE and 'django.middleware.common.CommonMiddleware' in settings.MIDDLEWARE) or
'django.middleware.common.CommonMiddleware' in settings.MIDDLEWARE_CLASSES) and
not url.endswith('/')):
raise forms.ValidationError(
ugettext("URL is missing a trailing slash."),
code='missing_trailing_slash',
)
return url
def clean(self):
url = self.cleaned_data.get('url')
sites = self.cleaned_data.get('sites')
same_url = FlatPage.objects.filter(url=url)
if self.instance.pk:
same_url = same_url.exclude(pk=self.instance.pk)
if sites and same_url.filter(sites__in=sites).exists():
for site in sites:
if same_url.filter(sites=site).exists():
raise forms.ValidationError(
_('Flatpage with url %(url)s already exists for site %(site)s'),
code='duplicate_url',
params={'url': url, 'site': site},
)
return super(FlatpageForm, self).clean()
| gpl-3.0 |
Inspq/ansible | lib/ansible/modules/web_infrastructure/ansible_tower/tower_job_cancel.py | 33 | 3250 | #!/usr/bin/python
#coding: utf-8 -*-
# (c) 2017, Wayne Witzel III <[email protected]>
#
# This module is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: tower_job_cancel
author: "Wayne Witzel III (@wwitzel3)"
version_added: "2.3"
short_description: Cancel an Ansible Tower Job.
description:
- Cancel Ansible Tower jobs. See
U(https://www.ansible.com/tower) for an overview.
options:
job_id:
description:
- ID of the job to cancel
required: True
fail_if_not_running:
description:
- Fail loudly if the job_id does not reference a running job.
default: False
extends_documentation_fragment: tower
'''
EXAMPLES = '''
- name: Cancel job
tower_job_cancel:
job_id: job.id
'''
RETURN = '''
id:
description: job id requesting to cancel
returned: success
type: int
sample: 94
status:
description: status of the cancel request
returned: success
type: string
sample: canceled
'''
from ansible.module_utils.basic import AnsibleModule
try:
import tower_cli
import tower_cli.utils.exceptions as exc
from tower_cli.conf import settings
from ansible.module_utils.ansible_tower import (
tower_auth_config,
tower_check_mode,
tower_argument_spec,
)
HAS_TOWER_CLI = True
except ImportError:
HAS_TOWER_CLI = False
def main():
argument_spec = tower_argument_spec()
argument_spec.update(dict(
job_id = dict(type='int', required=True),
fail_if_not_running = dict(type='bool', default=False),
))
module = AnsibleModule(
argument_spec = argument_spec,
supports_check_mode=True,
)
if not HAS_TOWER_CLI:
module.fail_json(msg='ansible-tower-cli required for this module')
job_id = module.params.get('job_id')
json_output = {}
tower_auth = tower_auth_config(module)
with settings.runtime_values(**tower_auth):
tower_check_mode(module)
job = tower_cli.get_resource('job')
params = module.params.copy()
try:
result = job.cancel(job_id, **params)
json_output['id'] = job_id
except (exc.ConnectionError, exc.BadRequest, exc.TowerCLIError) as excinfo:
module.fail_json(msg='Unable to cancel job_id/{0}: {1}'.format(job_id, excinfo), changed=False)
json_output['changed'] = result['changed']
json_output['status'] = result['status']
module.exit_json(**json_output)
if __name__ == '__main__':
main()
| gpl-3.0 |
jmacmahon/invenio | modules/bibindex/lib/bibindex_engine_stemmer_unit_tests.py | 3 | 4306 | # -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2004, 2005, 2006, 2007, 2008, 2010, 2011 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Unit tests for the indexing engine."""
__revision__ = "$Id$"
from invenio.testutils import InvenioTestCase
from invenio import bibindex_engine_stemmer
from invenio.testutils import make_test_suite, run_test_suite
class TestStemmer(InvenioTestCase):
"""Test stemmer."""
def test_stemmer_none(self):
"""bibindex engine - no stemmer"""
self.assertEqual("information",
bibindex_engine_stemmer.stem("information", None))
def test_stemmer_english(self):
"""bibindex engine - English stemmer"""
english_test_cases = [['information', 'inform'],
['experiment', 'experi'],
['experiments', 'experi'],
['experimented', 'experi'],
['experimenting', 'experi'],
['experimental', 'experiment'],
['experimentally', 'experiment'],
['experimentation', 'experiment'],
['experimentalism', 'experiment'],
['experimenter', 'experiment'],
['experimentalise', 'experimentalis'],
['experimentalist', 'experimentalist'],
['experimentalists', 'experimentalist'],
['GeV', 'GeV'],
['$\Omega$', '$\Omega$'],
['e^-', 'e^-'],
['C#', 'C#'],
['C++', 'C++']]
for test_word, expected_result in english_test_cases:
self.assertEqual(expected_result,
bibindex_engine_stemmer.stem(test_word, "en"))
def test_stemmer_greek(self):
"""bibindex engine - Greek stemmer"""
greek_test_cases = [['πληροφορίες', 'ΠΛΗΡΟΦΟΡΙ'],
['πείραμα', 'ΠΕΙΡΑΜ'],
['πειράματα', 'ΠΕΙΡΑΜ'],
['πειραματιστής', 'ΠΕΙΡΑΜΑΤΙΣΤ'],
['πειραματίζομαι', 'ΠΕΙΡΑΜΑΤΙΖ'],
['πειραματίζεσαι', 'ΠΕΙΡΑΜΑΤΙΖ'],
['πειραματίστηκα', 'ΠΕΙΡΑΜΑΤΙΣΤ'],
['πειραματόζωο', 'ΠΕΙΡΑΜΑΤΟΖΩ'],
['ζώο', 'ΖΩ'],
['πειραματισμός', 'ΠΕΙΡΑΜΑΤΙΣΜ'],
['πειραματικός', 'ΠΕΙΡΑΜΑΤΙΚ'],
['πειραματικά', 'ΠΕΙΡΑΜΑΤ'],
['ηλεκτρόνιο', 'ΗΛΕΚΤΡΟΝΙ'],
['ηλεκτρονιακός', 'ΗΛΕΚΤΡΟΝΙΑΚ'],
['ακτίνα', 'ΑΚΤΙΝ'],
['ακτινοβολία', 'ΑΚΤΙΝΟΒΟΛ'],
['E=mc^2', 'E=MC^2'],
['α+β=γ', 'Α+Β=Γ']]
for test_word, expected_result in greek_test_cases:
self.assertEqual(expected_result,
bibindex_engine_stemmer.stem(test_word, "el"))
TEST_SUITE = make_test_suite(TestStemmer,)
if __name__ == "__main__":
run_test_suite(TEST_SUITE)
| gpl-2.0 |
kingmotley/SickRage | lib/backports/ssl_match_hostname/__init__.py | 134 | 3650 | """The match_hostname() function from Python 3.3.3, essential when using SSL."""
import re
__version__ = '3.4.0.2'
class CertificateError(ValueError):
pass
def _dnsname_match(dn, hostname, max_wildcards=1):
"""Matching according to RFC 6125, section 6.4.3
http://tools.ietf.org/html/rfc6125#section-6.4.3
"""
pats = []
if not dn:
return False
# Ported from python3-syntax:
# leftmost, *remainder = dn.split(r'.')
parts = dn.split(r'.')
leftmost = parts[0]
remainder = parts[1:]
wildcards = leftmost.count('*')
if wildcards > max_wildcards:
# Issue #17980: avoid denials of service by refusing more
# than one wildcard per fragment. A survey of established
# policy among SSL implementations showed it to be a
# reasonable choice.
raise CertificateError(
"too many wildcards in certificate DNS name: " + repr(dn))
# speed up common case w/o wildcards
if not wildcards:
return dn.lower() == hostname.lower()
# RFC 6125, section 6.4.3, subitem 1.
# The client SHOULD NOT attempt to match a presented identifier in which
# the wildcard character comprises a label other than the left-most label.
if leftmost == '*':
# When '*' is a fragment by itself, it matches a non-empty dotless
# fragment.
pats.append('[^.]+')
elif leftmost.startswith('xn--') or hostname.startswith('xn--'):
# RFC 6125, section 6.4.3, subitem 3.
# The client SHOULD NOT attempt to match a presented identifier
# where the wildcard character is embedded within an A-label or
# U-label of an internationalized domain name.
pats.append(re.escape(leftmost))
else:
# Otherwise, '*' matches any dotless string, e.g. www*
pats.append(re.escape(leftmost).replace(r'\*', '[^.]*'))
# add the remaining fragments, ignore any wildcards
for frag in remainder:
pats.append(re.escape(frag))
pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE)
return pat.match(hostname)
def match_hostname(cert, hostname):
"""Verify that *cert* (in decoded format as returned by
SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125
rules are followed, but IP addresses are not accepted for *hostname*.
CertificateError is raised on failure. On success, the function
returns nothing.
"""
if not cert:
raise ValueError("empty or no certificate")
dnsnames = []
san = cert.get('subjectAltName', ())
for key, value in san:
if key == 'DNS':
if _dnsname_match(value, hostname):
return
dnsnames.append(value)
if not dnsnames:
# The subject is only checked when there is no dNSName entry
# in subjectAltName
for sub in cert.get('subject', ()):
for key, value in sub:
# XXX according to RFC 2818, the most specific Common Name
# must be used.
if key == 'commonName':
if _dnsname_match(value, hostname):
return
dnsnames.append(value)
if len(dnsnames) > 1:
raise CertificateError("hostname %r "
"doesn't match either of %s"
% (hostname, ', '.join(map(repr, dnsnames))))
elif len(dnsnames) == 1:
raise CertificateError("hostname %r "
"doesn't match %r"
% (hostname, dnsnames[0]))
else:
raise CertificateError("no appropriate commonName or "
"subjectAltName fields were found")
| gpl-3.0 |
mancoast/CPythonPyc_test | fail/300_test_urllib2net.py | 2 | 9015 | #!/usr/bin/env python
import unittest
from test import support
from test.test_urllib2 import sanepathname2url
import os
import socket
import sys
import urllib.error
import urllib.request
def _retry_thrice(func, exc, *args, **kwargs):
for i in range(3):
try:
return func(*args, **kwargs)
except exc as e:
last_exc = e
continue
except:
raise
raise last_exc
def _wrap_with_retry_thrice(func, exc):
def wrapped(*args, **kwargs):
return _retry_thrice(func, exc, *args, **kwargs)
return wrapped
# Connecting to remote hosts is flaky. Make it more robust by retrying
# the connection several times.
_urlopen_with_retry = _wrap_with_retry_thrice(urllib.request.urlopen,
urllib.error.URLError)
class AuthTests(unittest.TestCase):
"""Tests urllib2 authentication features."""
## Disabled at the moment since there is no page under python.org which
## could be used to HTTP authentication.
#
# def test_basic_auth(self):
# import http.client
#
# test_url = "http://www.python.org/test/test_urllib2/basic_auth"
# test_hostport = "www.python.org"
# test_realm = 'Test Realm'
# test_user = 'test.test_urllib2net'
# test_password = 'blah'
#
# # failure
# try:
# _urlopen_with_retry(test_url)
# except urllib2.HTTPError, exc:
# self.assertEqual(exc.code, 401)
# else:
# self.fail("urlopen() should have failed with 401")
#
# # success
# auth_handler = urllib2.HTTPBasicAuthHandler()
# auth_handler.add_password(test_realm, test_hostport,
# test_user, test_password)
# opener = urllib2.build_opener(auth_handler)
# f = opener.open('http://localhost/')
# response = _urlopen_with_retry("http://www.python.org/")
#
# # The 'userinfo' URL component is deprecated by RFC 3986 for security
# # reasons, let's not implement it! (it's already implemented for proxy
# # specification strings (that is, URLs or authorities specifying a
# # proxy), so we must keep that)
# self.assertRaises(http.client.InvalidURL,
# urllib2.urlopen, "http://evil:[email protected]")
class CloseSocketTest(unittest.TestCase):
def test_close(self):
import socket, http.client, gc
# calling .close() on urllib2's response objects should close the
# underlying socket
response = _urlopen_with_retry("http://www.python.org/")
sock = response.fp
self.assert_(not sock.closed)
response.close()
self.assert_(sock.closed)
class OtherNetworkTests(unittest.TestCase):
def setUp(self):
if 0: # for debugging
import logging
logger = logging.getLogger("test_urllib2net")
logger.addHandler(logging.StreamHandler())
# XXX The rest of these tests aren't very good -- they don't check much.
# They do sometimes catch some major disasters, though.
def test_ftp(self):
urls = [
'ftp://ftp.kernel.org/pub/linux/kernel/README',
'ftp://ftp.kernel.org/pub/linux/kernel/non-existant-file',
#'ftp://ftp.kernel.org/pub/leenox/kernel/test',
'ftp://gatekeeper.research.compaq.com/pub/DEC/SRC'
'/research-reports/00README-Legal-Rules-Regs',
]
self._test_urls(urls, self._extra_handlers())
def test_file(self):
TESTFN = support.TESTFN
f = open(TESTFN, 'w')
try:
f.write('hi there\n')
f.close()
urls = [
'file:' + sanepathname2url(os.path.abspath(TESTFN)),
('file:///nonsensename/etc/passwd', None,
urllib.error.URLError),
]
self._test_urls(urls, self._extra_handlers(), retry=True)
finally:
os.remove(TESTFN)
# XXX Following test depends on machine configurations that are internal
# to CNRI. Need to set up a public server with the right authentication
# configuration for test purposes.
## def test_cnri(self):
## if socket.gethostname() == 'bitdiddle':
## localhost = 'bitdiddle.cnri.reston.va.us'
## elif socket.gethostname() == 'bitdiddle.concentric.net':
## localhost = 'localhost'
## else:
## localhost = None
## if localhost is not None:
## urls = [
## 'file://%s/etc/passwd' % localhost,
## 'http://%s/simple/' % localhost,
## 'http://%s/digest/' % localhost,
## 'http://%s/not/found.h' % localhost,
## ]
## bauth = HTTPBasicAuthHandler()
## bauth.add_password('basic_test_realm', localhost, 'jhylton',
## 'password')
## dauth = HTTPDigestAuthHandler()
## dauth.add_password('digest_test_realm', localhost, 'jhylton',
## 'password')
## self._test_urls(urls, self._extra_handlers()+[bauth, dauth])
def _test_urls(self, urls, handlers, retry=True):
import socket
import time
import logging
debug = logging.getLogger("test_urllib2").debug
urlopen = urllib.request.build_opener(*handlers).open
if retry:
urlopen = _wrap_with_retry_thrice(urlopen, urllib.error.URLError)
for url in urls:
if isinstance(url, tuple):
url, req, expected_err = url
else:
req = expected_err = None
debug(url)
try:
f = urlopen(url, req)
except EnvironmentError as err:
debug(err)
if expected_err:
msg = ("Didn't get expected error(s) %s for %s %s, got %s: %s" %
(expected_err, url, req, type(err), err))
self.assert_(isinstance(err, expected_err), msg)
else:
with support.transient_internet():
buf = f.read()
f.close()
debug("read %d bytes" % len(buf))
debug("******** next url coming up...")
time.sleep(0.1)
def _extra_handlers(self):
handlers = []
cfh = urllib.request.CacheFTPHandler()
cfh.setTimeout(1)
handlers.append(cfh)
return handlers
class TimeoutTest(unittest.TestCase):
def test_http_basic(self):
self.assertTrue(socket.getdefaulttimeout() is None)
u = _urlopen_with_retry("http://www.python.org")
self.assertTrue(u.fp._sock.gettimeout() is None)
def test_http_default_timeout(self):
self.assertTrue(socket.getdefaulttimeout() is None)
socket.setdefaulttimeout(60)
try:
u = _urlopen_with_retry("http://www.python.org")
finally:
socket.setdefaulttimeout(None)
self.assertEqual(u.fp._sock.gettimeout(), 60)
def test_http_no_timeout(self):
self.assertTrue(socket.getdefaulttimeout() is None)
socket.setdefaulttimeout(60)
try:
u = _urlopen_with_retry("http://www.python.org", timeout=None)
finally:
socket.setdefaulttimeout(None)
self.assertTrue(u.fp._sock.gettimeout() is None)
def test_http_timeout(self):
u = _urlopen_with_retry("http://www.python.org", timeout=120)
self.assertEqual(u.fp._sock.gettimeout(), 120)
FTP_HOST = "ftp://ftp.mirror.nl/pub/mirror/gnu/"
def test_ftp_basic(self):
self.assertTrue(socket.getdefaulttimeout() is None)
u = _urlopen_with_retry(self.FTP_HOST)
self.assertTrue(u.fp.fp.raw._sock.gettimeout() is None)
def test_ftp_default_timeout(self):
self.assertTrue(socket.getdefaulttimeout() is None)
socket.setdefaulttimeout(60)
try:
u = _urlopen_with_retry(self.FTP_HOST)
finally:
socket.setdefaulttimeout(None)
self.assertEqual(u.fp.fp.raw._sock.gettimeout(), 60)
def test_ftp_no_timeout(self):
self.assertTrue(socket.getdefaulttimeout() is None)
socket.setdefaulttimeout(60)
try:
u = _urlopen_with_retry(self.FTP_HOST, timeout=None)
finally:
socket.setdefaulttimeout(None)
self.assertTrue(u.fp.fp.raw._sock.gettimeout() is None)
def test_ftp_timeout(self):
u = _urlopen_with_retry(self.FTP_HOST, timeout=60)
self.assertEqual(u.fp.fp.raw._sock.gettimeout(), 60)
def test_main():
support.requires("network")
support.run_unittest(AuthTests,
OtherNetworkTests,
CloseSocketTest,
TimeoutTest,
)
if __name__ == "__main__":
test_main()
| gpl-3.0 |
kalahbrown/HueBigSQL | desktop/core/ext-py/Django-1.6.10/tests/requests/tests.py | 47 | 31824 | # -*- encoding: utf-8 -*-
from __future__ import unicode_literals
import time
import warnings
from datetime import datetime, timedelta
from io import BytesIO
from django.db import connection, connections, DEFAULT_DB_ALIAS
from django.core import signals
from django.core.exceptions import SuspiciousOperation
from django.core.handlers.wsgi import WSGIRequest, LimitedStream
from django.http import HttpRequest, HttpResponse, parse_cookie, build_request_repr, UnreadablePostError
from django.test import SimpleTestCase, TransactionTestCase
from django.test.client import FakePayload
from django.test.utils import override_settings, str_prefix
from django.utils import six
from django.utils.unittest import skipIf
from django.utils.http import cookie_date, urlencode
from django.utils.six.moves.urllib.parse import urlencode as original_urlencode
from django.utils.timezone import utc
class RequestsTests(SimpleTestCase):
def test_httprequest(self):
request = HttpRequest()
self.assertEqual(list(request.GET.keys()), [])
self.assertEqual(list(request.POST.keys()), [])
self.assertEqual(list(request.COOKIES.keys()), [])
self.assertEqual(list(request.META.keys()), [])
def test_httprequest_repr(self):
request = HttpRequest()
request.path = '/somepath/'
request.GET = {'get-key': 'get-value'}
request.POST = {'post-key': 'post-value'}
request.COOKIES = {'post-key': 'post-value'}
request.META = {'post-key': 'post-value'}
self.assertEqual(repr(request), str_prefix("<HttpRequest\npath:/somepath/,\nGET:{%(_)s'get-key': %(_)s'get-value'},\nPOST:{%(_)s'post-key': %(_)s'post-value'},\nCOOKIES:{%(_)s'post-key': %(_)s'post-value'},\nMETA:{%(_)s'post-key': %(_)s'post-value'}>"))
self.assertEqual(build_request_repr(request), repr(request))
self.assertEqual(build_request_repr(request, path_override='/otherpath/', GET_override={'a': 'b'}, POST_override={'c': 'd'}, COOKIES_override={'e': 'f'}, META_override={'g': 'h'}),
str_prefix("<HttpRequest\npath:/otherpath/,\nGET:{%(_)s'a': %(_)s'b'},\nPOST:{%(_)s'c': %(_)s'd'},\nCOOKIES:{%(_)s'e': %(_)s'f'},\nMETA:{%(_)s'g': %(_)s'h'}>"))
def test_wsgirequest(self):
request = WSGIRequest({'PATH_INFO': 'bogus', 'REQUEST_METHOD': 'bogus', 'wsgi.input': BytesIO(b'')})
self.assertEqual(list(request.GET.keys()), [])
self.assertEqual(list(request.POST.keys()), [])
self.assertEqual(list(request.COOKIES.keys()), [])
self.assertEqual(set(request.META.keys()), set(['PATH_INFO', 'REQUEST_METHOD', 'SCRIPT_NAME', 'wsgi.input']))
self.assertEqual(request.META['PATH_INFO'], 'bogus')
self.assertEqual(request.META['REQUEST_METHOD'], 'bogus')
self.assertEqual(request.META['SCRIPT_NAME'], '')
def test_wsgirequest_with_script_name(self):
"""
Ensure that the request's path is correctly assembled, regardless of
whether or not the SCRIPT_NAME has a trailing slash.
Refs #20169.
"""
# With trailing slash
request = WSGIRequest({'PATH_INFO': '/somepath/', 'SCRIPT_NAME': '/PREFIX/', 'REQUEST_METHOD': 'get', 'wsgi.input': BytesIO(b'')})
self.assertEqual(request.path, '/PREFIX/somepath/')
# Without trailing slash
request = WSGIRequest({'PATH_INFO': '/somepath/', 'SCRIPT_NAME': '/PREFIX', 'REQUEST_METHOD': 'get', 'wsgi.input': BytesIO(b'')})
self.assertEqual(request.path, '/PREFIX/somepath/')
def test_wsgirequest_with_force_script_name(self):
"""
Ensure that the FORCE_SCRIPT_NAME setting takes precedence over the
request's SCRIPT_NAME environment parameter.
Refs #20169.
"""
with override_settings(FORCE_SCRIPT_NAME='/FORCED_PREFIX/'):
request = WSGIRequest({'PATH_INFO': '/somepath/', 'SCRIPT_NAME': '/PREFIX/', 'REQUEST_METHOD': 'get', 'wsgi.input': BytesIO(b'')})
self.assertEqual(request.path, '/FORCED_PREFIX/somepath/')
def test_wsgirequest_path_with_force_script_name_trailing_slash(self):
"""
Ensure that the request's path is correctly assembled, regardless of
whether or not the FORCE_SCRIPT_NAME setting has a trailing slash.
Refs #20169.
"""
# With trailing slash
with override_settings(FORCE_SCRIPT_NAME='/FORCED_PREFIX/'):
request = WSGIRequest({'PATH_INFO': '/somepath/', 'REQUEST_METHOD': 'get', 'wsgi.input': BytesIO(b'')})
self.assertEqual(request.path, '/FORCED_PREFIX/somepath/')
# Without trailing slash
with override_settings(FORCE_SCRIPT_NAME='/FORCED_PREFIX'):
request = WSGIRequest({'PATH_INFO': '/somepath/', 'REQUEST_METHOD': 'get', 'wsgi.input': BytesIO(b'')})
self.assertEqual(request.path, '/FORCED_PREFIX/somepath/')
def test_wsgirequest_repr(self):
request = WSGIRequest({'PATH_INFO': '/somepath/', 'REQUEST_METHOD': 'get', 'wsgi.input': BytesIO(b'')})
request.GET = {'get-key': 'get-value'}
request.POST = {'post-key': 'post-value'}
request.COOKIES = {'post-key': 'post-value'}
request.META = {'post-key': 'post-value'}
self.assertEqual(repr(request), str_prefix("<WSGIRequest\npath:/somepath/,\nGET:{%(_)s'get-key': %(_)s'get-value'},\nPOST:{%(_)s'post-key': %(_)s'post-value'},\nCOOKIES:{%(_)s'post-key': %(_)s'post-value'},\nMETA:{%(_)s'post-key': %(_)s'post-value'}>"))
self.assertEqual(build_request_repr(request), repr(request))
self.assertEqual(build_request_repr(request, path_override='/otherpath/', GET_override={'a': 'b'}, POST_override={'c': 'd'}, COOKIES_override={'e': 'f'}, META_override={'g': 'h'}),
str_prefix("<WSGIRequest\npath:/otherpath/,\nGET:{%(_)s'a': %(_)s'b'},\nPOST:{%(_)s'c': %(_)s'd'},\nCOOKIES:{%(_)s'e': %(_)s'f'},\nMETA:{%(_)s'g': %(_)s'h'}>"))
def test_wsgirequest_path_info(self):
def wsgi_str(path_info):
path_info = path_info.encode('utf-8') # Actual URL sent by the browser (bytestring)
if six.PY3:
path_info = path_info.decode('iso-8859-1') # Value in the WSGI environ dict (native string)
return path_info
# Regression for #19468
request = WSGIRequest({'PATH_INFO': wsgi_str("/سلام/"), 'REQUEST_METHOD': 'get', 'wsgi.input': BytesIO(b'')})
self.assertEqual(request.path, "/سلام/")
def test_parse_cookie(self):
self.assertEqual(parse_cookie('invalid@key=true'), {})
def test_httprequest_location(self):
request = HttpRequest()
self.assertEqual(request.build_absolute_uri(location="https://www.example.com/asdf"),
'https://www.example.com/asdf')
request.get_host = lambda: 'www.example.com'
request.path = ''
self.assertEqual(request.build_absolute_uri(location="/path/with:colons"),
'http://www.example.com/path/with:colons')
@override_settings(
USE_X_FORWARDED_HOST=False,
ALLOWED_HOSTS=[
'forward.com', 'example.com', 'internal.com', '12.34.56.78',
'[2001:19f0:feee::dead:beef:cafe]', 'xn--4ca9at.com',
'.multitenant.com', 'INSENSITIVE.com',
])
def test_http_get_host(self):
# Check if X_FORWARDED_HOST is provided.
request = HttpRequest()
request.META = {
'HTTP_X_FORWARDED_HOST': 'forward.com',
'HTTP_HOST': 'example.com',
'SERVER_NAME': 'internal.com',
'SERVER_PORT': 80,
}
# X_FORWARDED_HOST is ignored.
self.assertEqual(request.get_host(), 'example.com')
# Check if X_FORWARDED_HOST isn't provided.
request = HttpRequest()
request.META = {
'HTTP_HOST': 'example.com',
'SERVER_NAME': 'internal.com',
'SERVER_PORT': 80,
}
self.assertEqual(request.get_host(), 'example.com')
# Check if HTTP_HOST isn't provided.
request = HttpRequest()
request.META = {
'SERVER_NAME': 'internal.com',
'SERVER_PORT': 80,
}
self.assertEqual(request.get_host(), 'internal.com')
# Check if HTTP_HOST isn't provided, and we're on a nonstandard port
request = HttpRequest()
request.META = {
'SERVER_NAME': 'internal.com',
'SERVER_PORT': 8042,
}
self.assertEqual(request.get_host(), 'internal.com:8042')
# Poisoned host headers are rejected as suspicious
legit_hosts = [
'example.com',
'example.com:80',
'12.34.56.78',
'12.34.56.78:443',
'[2001:19f0:feee::dead:beef:cafe]',
'[2001:19f0:feee::dead:beef:cafe]:8080',
'xn--4ca9at.com', # Punnycode for öäü.com
'anything.multitenant.com',
'multitenant.com',
'insensitive.com',
]
poisoned_hosts = [
'[email protected]',
'example.com:[email protected]',
'example.com:[email protected]:80',
'example.com:80/badpath',
'example.com: recovermypassword.com',
'other.com', # not in ALLOWED_HOSTS
]
for host in legit_hosts:
request = HttpRequest()
request.META = {
'HTTP_HOST': host,
}
request.get_host()
for host in poisoned_hosts:
with self.assertRaises(SuspiciousOperation):
request = HttpRequest()
request.META = {
'HTTP_HOST': host,
}
request.get_host()
@override_settings(USE_X_FORWARDED_HOST=True, ALLOWED_HOSTS=['*'])
def test_http_get_host_with_x_forwarded_host(self):
# Check if X_FORWARDED_HOST is provided.
request = HttpRequest()
request.META = {
'HTTP_X_FORWARDED_HOST': 'forward.com',
'HTTP_HOST': 'example.com',
'SERVER_NAME': 'internal.com',
'SERVER_PORT': 80,
}
# X_FORWARDED_HOST is obeyed.
self.assertEqual(request.get_host(), 'forward.com')
# Check if X_FORWARDED_HOST isn't provided.
request = HttpRequest()
request.META = {
'HTTP_HOST': 'example.com',
'SERVER_NAME': 'internal.com',
'SERVER_PORT': 80,
}
self.assertEqual(request.get_host(), 'example.com')
# Check if HTTP_HOST isn't provided.
request = HttpRequest()
request.META = {
'SERVER_NAME': 'internal.com',
'SERVER_PORT': 80,
}
self.assertEqual(request.get_host(), 'internal.com')
# Check if HTTP_HOST isn't provided, and we're on a nonstandard port
request = HttpRequest()
request.META = {
'SERVER_NAME': 'internal.com',
'SERVER_PORT': 8042,
}
self.assertEqual(request.get_host(), 'internal.com:8042')
# Poisoned host headers are rejected as suspicious
legit_hosts = [
'example.com',
'example.com:80',
'12.34.56.78',
'12.34.56.78:443',
'[2001:19f0:feee::dead:beef:cafe]',
'[2001:19f0:feee::dead:beef:cafe]:8080',
'xn--4ca9at.com', # Punnycode for öäü.com
]
poisoned_hosts = [
'[email protected]',
'example.com:[email protected]',
'example.com:[email protected]:80',
'example.com:80/badpath',
'example.com: recovermypassword.com',
]
for host in legit_hosts:
request = HttpRequest()
request.META = {
'HTTP_HOST': host,
}
request.get_host()
for host in poisoned_hosts:
with self.assertRaises(SuspiciousOperation):
request = HttpRequest()
request.META = {
'HTTP_HOST': host,
}
request.get_host()
@override_settings(DEBUG=True, ALLOWED_HOSTS=[])
def test_host_validation_disabled_in_debug_mode(self):
"""If ALLOWED_HOSTS is empty and DEBUG is True, all hosts pass."""
request = HttpRequest()
request.META = {
'HTTP_HOST': 'example.com',
}
self.assertEqual(request.get_host(), 'example.com')
@override_settings(ALLOWED_HOSTS=[])
def test_get_host_suggestion_of_allowed_host(self):
"""get_host() makes helpful suggestions if a valid-looking host is not in ALLOWED_HOSTS."""
msg_invalid_host = "Invalid HTTP_HOST header: %r."
msg_suggestion = msg_invalid_host + "You may need to add %r to ALLOWED_HOSTS."
for host in [ # Valid-looking hosts
'example.com',
'12.34.56.78',
'[2001:19f0:feee::dead:beef:cafe]',
'xn--4ca9at.com', # Punnycode for öäü.com
]:
request = HttpRequest()
request.META = {'HTTP_HOST': host}
self.assertRaisesMessage(
SuspiciousOperation,
msg_suggestion % (host, host),
request.get_host
)
for domain, port in [ # Valid-looking hosts with a port number
('example.com', 80),
('12.34.56.78', 443),
('[2001:19f0:feee::dead:beef:cafe]', 8080),
]:
host = '%s:%s' % (domain, port)
request = HttpRequest()
request.META = {'HTTP_HOST': host}
self.assertRaisesMessage(
SuspiciousOperation,
msg_suggestion % (host, domain),
request.get_host
)
for host in [ # Invalid hosts
'[email protected]',
'example.com:[email protected]',
'example.com:[email protected]:80',
'example.com:80/badpath',
'example.com: recovermypassword.com',
]:
request = HttpRequest()
request.META = {'HTTP_HOST': host}
self.assertRaisesMessage(
SuspiciousOperation,
msg_invalid_host % host,
request.get_host
)
def test_near_expiration(self):
"Cookie will expire when an near expiration time is provided"
response = HttpResponse()
# There is a timing weakness in this test; The
# expected result for max-age requires that there be
# a very slight difference between the evaluated expiration
# time, and the time evaluated in set_cookie(). If this
# difference doesn't exist, the cookie time will be
# 1 second larger. To avoid the problem, put in a quick sleep,
# which guarantees that there will be a time difference.
expires = datetime.utcnow() + timedelta(seconds=10)
time.sleep(0.001)
response.set_cookie('datetime', expires=expires)
datetime_cookie = response.cookies['datetime']
self.assertEqual(datetime_cookie['max-age'], 10)
def test_aware_expiration(self):
"Cookie accepts an aware datetime as expiration time"
response = HttpResponse()
expires = (datetime.utcnow() + timedelta(seconds=10)).replace(tzinfo=utc)
time.sleep(0.001)
response.set_cookie('datetime', expires=expires)
datetime_cookie = response.cookies['datetime']
self.assertEqual(datetime_cookie['max-age'], 10)
def test_far_expiration(self):
"Cookie will expire when an distant expiration time is provided"
response = HttpResponse()
response.set_cookie('datetime', expires=datetime(2028, 1, 1, 4, 5, 6))
datetime_cookie = response.cookies['datetime']
self.assertEqual(datetime_cookie['expires'], 'Sat, 01-Jan-2028 04:05:06 GMT')
def test_max_age_expiration(self):
"Cookie will expire if max_age is provided"
response = HttpResponse()
response.set_cookie('max_age', max_age=10)
max_age_cookie = response.cookies['max_age']
self.assertEqual(max_age_cookie['max-age'], 10)
self.assertEqual(max_age_cookie['expires'], cookie_date(time.time()+10))
def test_httponly_cookie(self):
response = HttpResponse()
response.set_cookie('example', httponly=True)
example_cookie = response.cookies['example']
# A compat cookie may be in use -- check that it has worked
# both as an output string, and using the cookie attributes
self.assertTrue('; httponly' in str(example_cookie))
self.assertTrue(example_cookie['httponly'])
def test_limited_stream(self):
# Read all of a limited stream
stream = LimitedStream(BytesIO(b'test'), 2)
self.assertEqual(stream.read(), b'te')
# Reading again returns nothing.
self.assertEqual(stream.read(), b'')
# Read a number of characters greater than the stream has to offer
stream = LimitedStream(BytesIO(b'test'), 2)
self.assertEqual(stream.read(5), b'te')
# Reading again returns nothing.
self.assertEqual(stream.readline(5), b'')
# Read sequentially from a stream
stream = LimitedStream(BytesIO(b'12345678'), 8)
self.assertEqual(stream.read(5), b'12345')
self.assertEqual(stream.read(5), b'678')
# Reading again returns nothing.
self.assertEqual(stream.readline(5), b'')
# Read lines from a stream
stream = LimitedStream(BytesIO(b'1234\n5678\nabcd\nefgh\nijkl'), 24)
# Read a full line, unconditionally
self.assertEqual(stream.readline(), b'1234\n')
# Read a number of characters less than a line
self.assertEqual(stream.readline(2), b'56')
# Read the rest of the partial line
self.assertEqual(stream.readline(), b'78\n')
# Read a full line, with a character limit greater than the line length
self.assertEqual(stream.readline(6), b'abcd\n')
# Read the next line, deliberately terminated at the line end
self.assertEqual(stream.readline(4), b'efgh')
# Read the next line... just the line end
self.assertEqual(stream.readline(), b'\n')
# Read everything else.
self.assertEqual(stream.readline(), b'ijkl')
# Regression for #15018
# If a stream contains a newline, but the provided length
# is less than the number of provided characters, the newline
# doesn't reset the available character count
stream = LimitedStream(BytesIO(b'1234\nabcdef'), 9)
self.assertEqual(stream.readline(10), b'1234\n')
self.assertEqual(stream.readline(3), b'abc')
# Now expire the available characters
self.assertEqual(stream.readline(3), b'd')
# Reading again returns nothing.
self.assertEqual(stream.readline(2), b'')
# Same test, but with read, not readline.
stream = LimitedStream(BytesIO(b'1234\nabcdef'), 9)
self.assertEqual(stream.read(6), b'1234\na')
self.assertEqual(stream.read(2), b'bc')
self.assertEqual(stream.read(2), b'd')
self.assertEqual(stream.read(2), b'')
self.assertEqual(stream.read(), b'')
def test_stream(self):
payload = FakePayload('name=value')
request = WSGIRequest({'REQUEST_METHOD': 'POST',
'CONTENT_TYPE': 'application/x-www-form-urlencoded',
'CONTENT_LENGTH': len(payload),
'wsgi.input': payload})
self.assertEqual(request.read(), b'name=value')
def test_read_after_value(self):
"""
Reading from request is allowed after accessing request contents as
POST or body.
"""
payload = FakePayload('name=value')
request = WSGIRequest({'REQUEST_METHOD': 'POST',
'CONTENT_TYPE': 'application/x-www-form-urlencoded',
'CONTENT_LENGTH': len(payload),
'wsgi.input': payload})
self.assertEqual(request.POST, {'name': ['value']})
self.assertEqual(request.body, b'name=value')
self.assertEqual(request.read(), b'name=value')
def test_value_after_read(self):
"""
Construction of POST or body is not allowed after reading
from request.
"""
payload = FakePayload('name=value')
request = WSGIRequest({'REQUEST_METHOD': 'POST',
'CONTENT_TYPE': 'application/x-www-form-urlencoded',
'CONTENT_LENGTH': len(payload),
'wsgi.input': payload})
self.assertEqual(request.read(2), b'na')
self.assertRaises(Exception, lambda: request.body)
self.assertEqual(request.POST, {})
def test_non_ascii_POST(self):
payload = FakePayload(urlencode({'key': 'España'}))
request = WSGIRequest({
'REQUEST_METHOD': 'POST',
'CONTENT_LENGTH': len(payload),
'CONTENT_TYPE': 'application/x-www-form-urlencoded',
'wsgi.input': payload,
})
self.assertEqual(request.POST, {'key': ['España']})
def test_alternate_charset_POST(self):
"""
Test a POST with non-utf-8 payload encoding.
"""
payload = FakePayload(original_urlencode({'key': 'España'.encode('latin-1')}))
request = WSGIRequest({
'REQUEST_METHOD': 'POST',
'CONTENT_LENGTH': len(payload),
'CONTENT_TYPE': 'application/x-www-form-urlencoded; charset=iso-8859-1',
'wsgi.input': payload,
})
self.assertEqual(request.POST, {'key': ['España']})
def test_body_after_POST_multipart_form_data(self):
"""
Reading body after parsing multipart/form-data is not allowed
"""
# Because multipart is used for large amounts fo data i.e. file uploads,
# we don't want the data held in memory twice, and we don't want to
# silence the error by setting body = '' either.
payload = FakePayload("\r\n".join([
'--boundary',
'Content-Disposition: form-data; name="name"',
'',
'value',
'--boundary--'
'']))
request = WSGIRequest({'REQUEST_METHOD': 'POST',
'CONTENT_TYPE': 'multipart/form-data; boundary=boundary',
'CONTENT_LENGTH': len(payload),
'wsgi.input': payload})
self.assertEqual(request.POST, {'name': ['value']})
self.assertRaises(Exception, lambda: request.body)
def test_body_after_POST_multipart_related(self):
"""
Reading body after parsing multipart that isn't form-data is allowed
"""
# Ticket #9054
# There are cases in which the multipart data is related instead of
# being a binary upload, in which case it should still be accessible
# via body.
payload_data = b"\r\n".join([
b'--boundary',
b'Content-ID: id; name="name"',
b'',
b'value',
b'--boundary--'
b''])
payload = FakePayload(payload_data)
request = WSGIRequest({'REQUEST_METHOD': 'POST',
'CONTENT_TYPE': 'multipart/related; boundary=boundary',
'CONTENT_LENGTH': len(payload),
'wsgi.input': payload})
self.assertEqual(request.POST, {})
self.assertEqual(request.body, payload_data)
def test_POST_multipart_with_content_length_zero(self):
"""
Multipart POST requests with Content-Length >= 0 are valid and need to be handled.
"""
# According to:
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.13
# Every request.POST with Content-Length >= 0 is a valid request,
# this test ensures that we handle Content-Length == 0.
payload = FakePayload("\r\n".join([
'--boundary',
'Content-Disposition: form-data; name="name"',
'',
'value',
'--boundary--'
'']))
request = WSGIRequest({'REQUEST_METHOD': 'POST',
'CONTENT_TYPE': 'multipart/form-data; boundary=boundary',
'CONTENT_LENGTH': 0,
'wsgi.input': payload})
self.assertEqual(request.POST, {})
def test_POST_binary_only(self):
payload = b'\r\n\x01\x00\x00\x00ab\x00\x00\xcd\xcc,@'
environ = {'REQUEST_METHOD': 'POST',
'CONTENT_TYPE': 'application/octet-stream',
'CONTENT_LENGTH': len(payload),
'wsgi.input': BytesIO(payload)}
request = WSGIRequest(environ)
self.assertEqual(request.POST, {})
self.assertEqual(request.FILES, {})
self.assertEqual(request.body, payload)
# Same test without specifying content-type
environ.update({'CONTENT_TYPE': '', 'wsgi.input': BytesIO(payload)})
request = WSGIRequest(environ)
self.assertEqual(request.POST, {})
self.assertEqual(request.FILES, {})
self.assertEqual(request.body, payload)
def test_read_by_lines(self):
payload = FakePayload('name=value')
request = WSGIRequest({'REQUEST_METHOD': 'POST',
'CONTENT_TYPE': 'application/x-www-form-urlencoded',
'CONTENT_LENGTH': len(payload),
'wsgi.input': payload})
self.assertEqual(list(request), [b'name=value'])
def test_POST_after_body_read(self):
"""
POST should be populated even if body is read first
"""
payload = FakePayload('name=value')
request = WSGIRequest({'REQUEST_METHOD': 'POST',
'CONTENT_TYPE': 'application/x-www-form-urlencoded',
'CONTENT_LENGTH': len(payload),
'wsgi.input': payload})
raw_data = request.body
self.assertEqual(request.POST, {'name': ['value']})
def test_POST_after_body_read_and_stream_read(self):
"""
POST should be populated even if body is read first, and then
the stream is read second.
"""
payload = FakePayload('name=value')
request = WSGIRequest({'REQUEST_METHOD': 'POST',
'CONTENT_TYPE': 'application/x-www-form-urlencoded',
'CONTENT_LENGTH': len(payload),
'wsgi.input': payload})
raw_data = request.body
self.assertEqual(request.read(1), b'n')
self.assertEqual(request.POST, {'name': ['value']})
def test_POST_after_body_read_and_stream_read_multipart(self):
"""
POST should be populated even if body is read first, and then
the stream is read second. Using multipart/form-data instead of urlencoded.
"""
payload = FakePayload("\r\n".join([
'--boundary',
'Content-Disposition: form-data; name="name"',
'',
'value',
'--boundary--'
'']))
request = WSGIRequest({'REQUEST_METHOD': 'POST',
'CONTENT_TYPE': 'multipart/form-data; boundary=boundary',
'CONTENT_LENGTH': len(payload),
'wsgi.input': payload})
raw_data = request.body
# Consume enough data to mess up the parsing:
self.assertEqual(request.read(13), b'--boundary\r\nC')
self.assertEqual(request.POST, {'name': ['value']})
def test_POST_connection_error(self):
"""
If wsgi.input.read() raises an exception while trying to read() the
POST, the exception should be identifiable (not a generic IOError).
"""
class ExplodingBytesIO(BytesIO):
def read(self, len=0):
raise IOError("kaboom!")
payload = b'name=value'
request = WSGIRequest({'REQUEST_METHOD': 'POST',
'CONTENT_TYPE': 'application/x-www-form-urlencoded',
'CONTENT_LENGTH': len(payload),
'wsgi.input': ExplodingBytesIO(payload)})
with self.assertRaises(UnreadablePostError):
request.body
def test_FILES_connection_error(self):
"""
If wsgi.input.read() raises an exception while trying to read() the
FILES, the exception should be identifiable (not a generic IOError).
"""
class ExplodingBytesIO(BytesIO):
def read(self, len=0):
raise IOError("kaboom!")
payload = b'x'
request = WSGIRequest({'REQUEST_METHOD': 'POST',
'CONTENT_TYPE': 'multipart/form-data; boundary=foo_',
'CONTENT_LENGTH': len(payload),
'wsgi.input': ExplodingBytesIO(payload)})
with self.assertRaises(UnreadablePostError):
request.FILES
@skipIf(connection.vendor == 'sqlite'
and connection.settings_dict['TEST_NAME'] in (None, '', ':memory:'),
"Cannot establish two connections to an in-memory SQLite database.")
class DatabaseConnectionHandlingTests(TransactionTestCase):
available_apps = []
def setUp(self):
# Use a temporary connection to avoid messing with the main one.
self._old_default_connection = connections['default']
del connections['default']
def tearDown(self):
try:
connections['default'].close()
finally:
connections['default'] = self._old_default_connection
def test_request_finished_db_state(self):
# Force closing connection on request end
connection.settings_dict['CONN_MAX_AGE'] = 0
# The GET below will not succeed, but it will give a response with
# defined ._handler_class. That is needed for sending the
# request_finished signal.
response = self.client.get('/')
# Make sure there is an open connection
connection.cursor()
connection.enter_transaction_management()
signals.request_finished.send(sender=response._handler_class)
self.assertEqual(len(connection.transaction_state), 0)
def test_request_finished_failed_connection(self):
# Force closing connection on request end
connection.settings_dict['CONN_MAX_AGE'] = 0
connection.enter_transaction_management()
connection.set_dirty()
# Test that the rollback doesn't succeed (for example network failure
# could cause this).
def fail_horribly():
raise Exception("Horrible failure!")
connection._rollback = fail_horribly
try:
with self.assertRaises(Exception):
signals.request_finished.send(sender=self.__class__)
# The connection's state wasn't cleaned up
self.assertEqual(len(connection.transaction_state), 1)
finally:
del connection._rollback
# The connection will be cleaned on next request where the conn
# works again.
signals.request_finished.send(sender=self.__class__)
self.assertEqual(len(connection.transaction_state), 0)
| apache-2.0 |
jjas0nn/solvem | tensorflow/lib/python2.7/site-packages/tensorflow/python/training/slot_creator.py | 14 | 4942 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Standard functions for creating slots.
A slot is a `Variable` created with the same shape as a primary variable or
`Tensor`. A slot is always scoped in the namespace of the primary object and
typically has the same device and type.
Slots are typically used as accumulators to track values associated with
the primary object:
```python
# Optimizers can create a slot for each variable to track accumulators
accumulators = {var : create_zeros_slot(var, "momentum") for var in vs}
for var in vs:
apply_momentum(var, accumulators[var], lr, grad, momentum_tensor)
# Slots can also be used for moving averages
mavg = create_slot(var, var.initialized_value(), "exponential_moving_avg")
update_mavg = mavg.assign_sub((mavg - var) * (1 - decay))
```
"""
# pylint: disable=g-bad-name
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import variables
from tensorflow.python.ops import variable_scope
def _create_slot_var(primary, val, scope):
"""Helper function for creating a slot variable."""
# TODO(lukaszkaiser): Consider allowing partitioners to be set in the current
# scope.
current_partitioner = variable_scope.get_variable_scope().partitioner
variable_scope.get_variable_scope().set_partitioner(None)
slot = variable_scope.get_variable(scope, initializer=val, trainable=False)
variable_scope.get_variable_scope().set_partitioner(current_partitioner)
# pylint: disable=protected-access
if isinstance(primary, variables.Variable) and primary._save_slice_info:
# Primary is a partitioned variable, so we need to also indicate that
# the slot is a partitioned variable. Slots have the same partitioning
# as their primaries.
# For examples when using AdamOptimizer in linear model, slot.name
# here can be "linear//weights/Adam:0", while primary.op.name is
# "linear//weight". We want to get 'Adam' as real_slot_name, so we
# remove "'linear//weight' + '/'" and ':0'.
real_slot_name = slot.name[len(primary.op.name + "/"):-2]
slice_info = primary._save_slice_info
slot._set_save_slice_info(variables.Variable.SaveSliceInfo(
slice_info.full_name + "/" + real_slot_name,
slice_info.full_shape[:],
slice_info.var_offset[:],
slice_info.var_shape[:]))
# pylint: enable=protected-access
return slot
def create_slot(primary, val, name, colocate_with_primary=True):
"""Create a slot initialized to the given value.
The type of the slot is determined by the given value.
Args:
primary: The primary `Variable` or `Tensor`.
val: A `Tensor` specifying the initial value of the slot.
name: Name to use for the slot variable.
colocate_with_primary: Boolean. If True the slot is located
on the same device as `primary`.
Returns:
A `Variable` object.
"""
# Scope the slot name in the namespace of the primary variable.
# Set "primary.op.name + '/' + name" as default name, so the scope name of
# optimizer can be shared when reuse is True. Meanwhile when reuse is False
# and the same name has been previously used, the scope name will add '_N'
# as suffix for unique identifications.
with variable_scope.variable_scope(None, primary.op.name + '/' + name):
if colocate_with_primary:
with ops.colocate_with(primary):
return _create_slot_var(primary, val, '')
else:
return _create_slot_var(primary, val, '')
def create_zeros_slot(primary, name, dtype=None, colocate_with_primary=True):
"""Create a slot initialized to 0 with same shape as the primary object.
Args:
primary: The primary `Variable` or `Tensor`.
name: Name to use for the slot variable.
dtype: Type of the slot variable. Defaults to the type of `primary`.
colocate_with_primary: Boolean. If True the slot is located
on the same device as `primary`.
Returns:
A `Variable` object.
"""
if dtype is None:
dtype = primary.dtype
val = array_ops.zeros(primary.get_shape().as_list(), dtype=dtype)
return create_slot(primary, val, name,
colocate_with_primary=colocate_with_primary)
| mit |
luofei98/qgis | python/plugins/processing/algs/saga/RasterCalculator.py | 1 | 3148 | # -*- coding: utf-8 -*-
"""
***************************************************************************
RasterCalculator.py
---------------------
Date : May 2014
Copyright : (C) 2014 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
from processing.core.parameters import ParameterMultipleInput
from processing.algs.saga.SagaAlgorithm import SagaAlgorithm
from processing.core.GeoAlgorithm import GeoAlgorithm
from processing.core.parameters import ParameterString
from processing.algs.saga.SagaGroupNameDecorator import SagaGroupNameDecorator
__author__ = 'Victor Olaya'
__date__ = 'May 2014'
__copyright__ = '(C) 2014, Victor Olaya'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
from PyQt4 import QtGui
from processing.core.parameters import ParameterRaster
from processing.core.outputs import OutputRaster
from processing.tools.system import *
class RasterCalculator(SagaAlgorithm):
FORMULA = "FORMULA"
GRIDS = 'GRIDS'
XGRIDS = 'XGRIDS'
RESULT = "RESULT"
def __init__(self):
self.allowUnmatchingGridExtents = True
self.hardcodedStrings = []
GeoAlgorithm.__init__(self)
def getCopy(self):
newone = RasterCalculator()
newone.provider = self.provider
return newone
def defineCharacteristics(self):
self.name = 'Raster calculator'
self.cmdname = 'Grid Calculator'
self.undecoratedGroup = "grid_calculus"
self.group = SagaGroupNameDecorator.getDecoratedName(self.undecoratedGroup)
self.addParameter(ParameterRaster(self.GRIDS, 'Main input layers'))
self.addParameter(ParameterMultipleInput(self.XGRIDS, 'Additional layers',
ParameterMultipleInput.TYPE_RASTER, True))
self.addParameter(ParameterString(self.FORMULA, "Formula"))
self.addOutput(OutputRaster(self.RESULT, "Result"))
#===========================================================================
# def processAlgorithm(self, progress):
# xgrids = self.getParameterValue(self.XGRIDS)
# layers = xgrids.split(';')
# grid = layers[0]
# self.setParameterValue(self.GRIDS, grid)
# xgrids = ";".join(layers[1:])
# if xgrids == "": xgrids = None
# self.setParameterValue(self.XGRIDS, xgrids)
# SagaAlgorithm.processAlgorithm(self, progress)
#===========================================================================
| gpl-2.0 |
luofei98/qgis | python/ext-libs/jinja2/_compat.py | 638 | 4042 | # -*- coding: utf-8 -*-
"""
jinja2._compat
~~~~~~~~~~~~~~
Some py2/py3 compatibility support based on a stripped down
version of six so we don't have to depend on a specific version
of it.
:copyright: Copyright 2013 by the Jinja team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import sys
PY2 = sys.version_info[0] == 2
PYPY = hasattr(sys, 'pypy_translation_info')
_identity = lambda x: x
if not PY2:
unichr = chr
range_type = range
text_type = str
string_types = (str,)
iterkeys = lambda d: iter(d.keys())
itervalues = lambda d: iter(d.values())
iteritems = lambda d: iter(d.items())
import pickle
from io import BytesIO, StringIO
NativeStringIO = StringIO
def reraise(tp, value, tb=None):
if value.__traceback__ is not tb:
raise value.with_traceback(tb)
raise value
ifilter = filter
imap = map
izip = zip
intern = sys.intern
implements_iterator = _identity
implements_to_string = _identity
encode_filename = _identity
get_next = lambda x: x.__next__
else:
unichr = unichr
text_type = unicode
range_type = xrange
string_types = (str, unicode)
iterkeys = lambda d: d.iterkeys()
itervalues = lambda d: d.itervalues()
iteritems = lambda d: d.iteritems()
import cPickle as pickle
from cStringIO import StringIO as BytesIO, StringIO
NativeStringIO = BytesIO
exec('def reraise(tp, value, tb=None):\n raise tp, value, tb')
from itertools import imap, izip, ifilter
intern = intern
def implements_iterator(cls):
cls.next = cls.__next__
del cls.__next__
return cls
def implements_to_string(cls):
cls.__unicode__ = cls.__str__
cls.__str__ = lambda x: x.__unicode__().encode('utf-8')
return cls
get_next = lambda x: x.next
def encode_filename(filename):
if isinstance(filename, unicode):
return filename.encode('utf-8')
return filename
try:
next = next
except NameError:
def next(it):
return it.next()
def with_metaclass(meta, *bases):
# This requires a bit of explanation: the basic idea is to make a
# dummy metaclass for one level of class instanciation that replaces
# itself with the actual metaclass. Because of internal type checks
# we also need to make sure that we downgrade the custom metaclass
# for one level to something closer to type (that's why __call__ and
# __init__ comes back from type etc.).
#
# This has the advantage over six.with_metaclass in that it does not
# introduce dummy classes into the final MRO.
class metaclass(meta):
__call__ = type.__call__
__init__ = type.__init__
def __new__(cls, name, this_bases, d):
if this_bases is None:
return type.__new__(cls, name, (), d)
return meta(name, bases, d)
return metaclass('temporary_class', None, {})
try:
from collections import Mapping as mapping_types
except ImportError:
import UserDict
mapping_types = (UserDict.UserDict, UserDict.DictMixin, dict)
# common types. These do exist in the special types module too which however
# does not exist in IronPython out of the box. Also that way we don't have
# to deal with implementation specific stuff here
class _C(object):
def method(self): pass
def _func():
yield None
function_type = type(_func)
generator_type = type(_func())
method_type = type(_C().method)
code_type = type(_C.method.__code__)
try:
raise TypeError()
except TypeError:
_tb = sys.exc_info()[2]
traceback_type = type(_tb)
frame_type = type(_tb.tb_frame)
try:
from urllib.parse import quote_from_bytes as url_quote
except ImportError:
from urllib import quote as url_quote
try:
from thread import allocate_lock
except ImportError:
try:
from threading import Lock as allocate_lock
except ImportError:
from dummy_thread import allocate_lock
| gpl-2.0 |
bbaltz505/iotkit-libpy | iotkitclient/__init__.py | 1 | 2339 | # Copyright (c) 2015, Intel Corporation
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of Intel Corporation nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from connect import Connect
from device import Device
from account import Account
from user import User
from data import Data
from rule import Rule
from component import Component
from utils import prettyprint, update_properties
import json
import os
import sys
# load Configuration file and store values as class attributes
def load_config(infile):
''' Load global settings for Cloud connection
- server name
- username
- password
- proxy server(s)
- API root location on REST server
'''
if os.path.isfile(infile):
obj = sys.modules[__name__]
js = open(infile)
data = json.load(js)
update_properties(obj, data)
js.close()
return data
else:
raise ValueError("Config file not found: %s" % infile)
| bsd-3-clause |
Tudorvr/metagoofil | hachoir_metadata/filter.py | 85 | 1664 | from hachoir_metadata.timezone import UTC
from datetime import date, datetime
# Year in 1850..2030
MIN_YEAR = 1850
MAX_YEAR = 2030
class Filter:
def __init__(self, valid_types, min=None, max=None):
self.types = valid_types
self.min = min
self.max = max
def __call__(self, value):
if not isinstance(value, self.types):
return True
if self.min is not None and value < self.min:
return False
if self.max is not None and self.max < value:
return False
return True
class NumberFilter(Filter):
def __init__(self, min=None, max=None):
Filter.__init__(self, (int, long, float), min, max)
class DatetimeFilter(Filter):
def __init__(self, min=None, max=None):
Filter.__init__(self, (date, datetime),
datetime(MIN_YEAR, 1, 1),
datetime(MAX_YEAR, 12, 31))
self.min_date = date(MIN_YEAR, 1, 1)
self.max_date = date(MAX_YEAR, 12, 31)
self.min_tz = datetime(MIN_YEAR, 1, 1, tzinfo=UTC)
self.max_tz = datetime(MAX_YEAR, 12, 31, tzinfo=UTC)
def __call__(self, value):
"""
Use different min/max values depending on value type
(datetime with timezone, datetime or date).
"""
if not isinstance(value, self.types):
return True
if hasattr(value, "tzinfo") and value.tzinfo:
return (self.min_tz <= value <= self.max_tz)
elif isinstance(value, datetime):
return (self.min <= value <= self.max)
else:
return (self.min_date <= value <= self.max_date)
DATETIME_FILTER = DatetimeFilter()
| gpl-2.0 |
Xiangua/symphony | vendor/doctrine/orm/docs/en/conf.py | 2448 | 6497 | # -*- coding: utf-8 -*-
#
# Doctrine 2 ORM documentation build configuration file, created by
# sphinx-quickstart on Fri Dec 3 18:10:24 2010.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.append(os.path.abspath('_exts'))
# -- General configuration -----------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['configurationblock']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Doctrine 2 ORM'
copyright = u'2010-12, Doctrine Project Team'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '2'
# The full version, including alpha/beta/rc tags.
release = '2'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
language = 'en'
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of documents that shouldn't be included in the build.
#unused_docs = []
# List of directories, relative to source directory, that shouldn't be searched
# for source files.
exclude_trees = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
show_authors = True
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'.
html_theme = 'doctrine'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = ['_theme']
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_use_modindex = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = ''
# Output file base name for HTML help builder.
htmlhelp_basename = 'Doctrine2ORMdoc'
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'Doctrine2ORM.tex', u'Doctrine 2 ORM Documentation',
u'Doctrine Project Team', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_use_modindex = True
primary_domain = "dcorm"
def linkcode_resolve(domain, info):
if domain == 'dcorm':
return 'http://'
return None
| mit |
landscapeio/astroid | exceptions.py | 87 | 1814 | # copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:[email protected]
#
# This file is part of astroid.
#
# astroid is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 2.1 of the License, or (at your
# option) any later version.
#
# astroid is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
# for more details.
#
# You should have received a copy of the GNU Lesser General Public License along
# with astroid. If not, see <http://www.gnu.org/licenses/>.
"""this module contains exceptions used in the astroid library
"""
__doctype__ = "restructuredtext en"
class AstroidError(Exception):
"""base exception class for all astroid related exceptions"""
class AstroidBuildingException(AstroidError):
"""exception class when we are unable to build an astroid representation"""
class ResolveError(AstroidError):
"""base class of astroid resolution/inference error"""
class NotFoundError(ResolveError):
"""raised when we are unable to resolve a name"""
class InferenceError(ResolveError):
"""raised when we are unable to infer a node"""
class UseInferenceDefault(Exception):
"""exception to be raised in custom inference function to indicate that it
should go back to the default behaviour
"""
class UnresolvableName(InferenceError):
"""raised when we are unable to resolve a name"""
class NoDefault(AstroidError):
"""raised by function's `default_value` method when an argument has
no default value
"""
| gpl-2.0 |
rukku/inasafe | realtime/sftp_client.py | 5 | 4885 | """
InaSAFE Disaster risk assessment tool developed by AusAid and World Bank
- **Ftp Client for Retrieving ftp data.**
Contact : [email protected]
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = '[email protected]'
__version__ = '0.5.0'
__date__ = '10/01/2013'
__copyright__ = ('Copyright 2012, Australia Indonesia Facility for '
'Disaster Reduction')
import sys
import paramiko
import ntpath
from stat import S_ISDIR
from errno import ENOENT
import os
import logging
from utils import mkDir
# The logger is intialised in utils.py by init
LOGGER = logging.getLogger('InaSAFE')
my_host = '118.97.83.243'
my_username = 'geospasial'
try:
my_password = os.environ['QUAKE_SERVER_PASSWORD']
except KeyError:
LOGGER.exception('QUAKE_SERVER_PASSWORD not set!')
sys.exit()
my_remote_path = 'shakemaps'
class SFtpClient:
"""A utility class that contains methods to fetch a listings and files
from an SSH protocol"""
def __init__(self, the_host=my_host, the_username=my_username,
the_password=my_password, the_working_dir=my_remote_path):
self.host = the_host
self.username = the_username
self.password = the_password
self.working_dir = the_working_dir
# create transport object
self.transport = paramiko.Transport(self.host)
self.transport.connect(username=self.username, password=self.password)
# create sftp object
self.sftp = paramiko.SFTPClient.from_transport(self.transport)
# go to remote_path folder, this is the default folder
if not self.working_dir is None:
self.sftp.chdir(self.working_dir)
self.workdir_path = self.sftp.getcwd()
def download_path(self, remote_path, local_path):
""" Download remote_dir to local_dir.
for example : remote_path = '20130111133900' will be download to
local_dir/remote_path
Must be in the parent directory of remote dir.
"""
# Check if remote_dir is exist
if not self.is_path_exist(remote_path):
print 'remote path is not exist %s' % remote_path
return False
if self.is_dir(remote_path):
# get directory name
dir_name = get_path_tail(remote_path)
# create directory in local machine
local_dir_path = os.path.join(local_path, dir_name)
mkDir(local_dir_path)
# list all directory in remote path
list_dir = self.sftp.listdir(remote_path)
# iterate recursive
for my_dir in list_dir:
new_remote_path = os.path.join(remote_path, my_dir)
self.download_path(new_remote_path, local_dir_path)
else:
# download file to local_path
file_name = get_path_tail(remote_path)
local_file_path = os.path.join(local_path, file_name)
LOGGER.info('file %s will be downloaded to %s' % (remote_path,
local_file_path))
self.sftp.get(remote_path, local_file_path)
def is_dir(self, path):
"""Check if a path is a directory or not in sftp
Reference: http://stackoverflow.com/a/8307575/1198772
"""
try:
return S_ISDIR(self.sftp.stat(path).st_mode)
except IOError:
#Path does not exist, so by definition not a directory
return False
def is_path_exist(self, path):
"""os.path.exists for paramiko's SCP object
Reference: http://stackoverflow.com/q/850749/1198772
"""
try:
self.sftp.stat(path)
except IOError, e:
if e.errno == ENOENT:
return False
raise
else:
return True
def getListing(self, remote_dir=None, my_func=None):
"""Return list of files and directories name under a remote_dir
and return true when it is input to my_func
"""
if remote_dir is None:
remote_dir = self.workdir_path
if self.is_path_exist(remote_dir):
temp_list = self.sftp.listdir(remote_dir)
else:
LOGGER.debug('Directory %s is not exist, return None' % remote_dir)
return None
retval = []
for my_temp in temp_list:
if my_func(my_temp):
retval.append(my_temp)
return retval
def get_path_tail(path):
'''Return tail of a path
Reference : http://stackoverflow.com/a/8384788/1198772
'''
head, tail = ntpath.split(path)
return tail or ntpath.basename(head)
| gpl-3.0 |
Alexander-M-Waldman/local_currency_site | lib/python2.7/site-packages/guardian/testapp/tests/test_custompkmodel.py | 1 | 1179 | from __future__ import unicode_literals
from django.contrib.contenttypes.models import ContentType
from django.test import TestCase
from guardian.compat import get_user_model
from guardian.shortcuts import assign_perm, remove_perm
class CustomPKModelTest(TestCase):
"""
Tests agains custom model with primary key other than *standard*
``id`` integer field.
"""
def setUp(self):
self.user = get_user_model().objects.create(username='joe')
self.ctype = ContentType.objects.create(model='bar', app_label='fake-for-guardian-tests')
def test_assign_perm(self):
assign_perm('contenttypes.change_contenttype', self.user, self.ctype)
self.assertTrue(self.user.has_perm('contenttypes.change_contenttype',
self.ctype))
def test_remove_perm(self):
assign_perm('contenttypes.change_contenttype', self.user, self.ctype)
self.assertTrue(self.user.has_perm('contenttypes.change_contenttype',
self.ctype))
remove_perm('contenttypes.change_contenttype', self.user, self.ctype)
self.assertFalse(self.user.has_perm('contenttypes.change_contenttype',
self.ctype))
| gpl-3.0 |
glogiotatidis/mozillians-new | vendor-local/lib/python/dateutil/zoneinfo/__init__.py | 265 | 2575 | """
Copyright (c) 2003-2005 Gustavo Niemeyer <[email protected]>
This module offers extensions to the standard python 2.3+
datetime module.
"""
from dateutil.tz import tzfile
from tarfile import TarFile
import os
__author__ = "Gustavo Niemeyer <[email protected]>"
__license__ = "PSF License"
__all__ = ["setcachesize", "gettz", "rebuild"]
CACHE = []
CACHESIZE = 10
class tzfile(tzfile):
def __reduce__(self):
return (gettz, (self._filename,))
def getzoneinfofile():
filenames = os.listdir(os.path.join(os.path.dirname(__file__)))
filenames.sort()
filenames.reverse()
for entry in filenames:
if entry.startswith("zoneinfo") and ".tar." in entry:
return os.path.join(os.path.dirname(__file__), entry)
return None
ZONEINFOFILE = getzoneinfofile()
del getzoneinfofile
def setcachesize(size):
global CACHESIZE, CACHE
CACHESIZE = size
del CACHE[size:]
def gettz(name):
tzinfo = None
if ZONEINFOFILE:
for cachedname, tzinfo in CACHE:
if cachedname == name:
break
else:
tf = TarFile.open(ZONEINFOFILE)
try:
zonefile = tf.extractfile(name)
except KeyError:
tzinfo = None
else:
tzinfo = tzfile(zonefile)
tf.close()
CACHE.insert(0, (name, tzinfo))
del CACHE[CACHESIZE:]
return tzinfo
def rebuild(filename, tag=None, format="gz"):
import tempfile, shutil
tmpdir = tempfile.mkdtemp()
zonedir = os.path.join(tmpdir, "zoneinfo")
moduledir = os.path.dirname(__file__)
if tag: tag = "-"+tag
targetname = "zoneinfo%s.tar.%s" % (tag, format)
try:
tf = TarFile.open(filename)
for name in tf.getnames():
if not (name.endswith(".sh") or
name.endswith(".tab") or
name == "leapseconds"):
tf.extract(name, tmpdir)
filepath = os.path.join(tmpdir, name)
os.system("zic -d %s %s" % (zonedir, filepath))
tf.close()
target = os.path.join(moduledir, targetname)
for entry in os.listdir(moduledir):
if entry.startswith("zoneinfo") and ".tar." in entry:
os.unlink(os.path.join(moduledir, entry))
tf = TarFile.open(target, "w:%s" % format)
for entry in os.listdir(zonedir):
entrypath = os.path.join(zonedir, entry)
tf.add(entrypath, entry)
tf.close()
finally:
shutil.rmtree(tmpdir)
| bsd-3-clause |
Pavel-Durov/pynetwork | pynetwork/wkhtmltoimage.py | 1 | 2154 | """ Python wrapper for wkhtmltopdf library, for rendering images from html files.
[ doc: https://wkhtmltopdf.org/docs.html ]
* Currently supporting only Linux distributions!
This script designed to be run on devices without display.
Uses xvfb (X virtual framebuffer) for that functionality.
[ doc: https://www.x.org/archive/X11R7.6/doc/man/man1/Xvfb.1.xhtml ].
Dependencies : [xvfb, wkhtmltopdf]
Dependencies install:
apt-get install wkhtmltopdf
apt-get install xvfb
"""
from subprocess import call
import logging
import argparse
import shutil
from config import Config
__version__ = '1.0.0'
__description__ = "Python wrapper for wkhtmltopdf library - renders html to image (.jpeg) file"
XVFB_RUN = "xvfb-run"
WKHTML_TO_IMAGE = "wkhtmltoimage"
XVFB_CMD = XVFB_RUN + " " + WKHTML_TO_IMAGE + " --window-status ready_to_print --crop-h 396 {0} {1}"
def dependencies_installed():
"""
Checks whether xvfb-run and wkhtmltoimage packages installed on local machine.abs
"""
result = False
if Config.linux_host():
xvfb = shutil.which(XVFB_RUN) is not None
wkhtml = shutil.which(WKHTML_TO_IMAGE) is not None
result = xvfb and wkhtml
return result
def convert_html_to_image(html_path, image_out_path):
"""Converts html file to image and stores to disk
Returns:
whether the operations succeeded
"""
result = False
if dependencies_installed():
try:
cmd = XVFB_CMD.format(html_path, image_out_path)
call(cmd, shell=True)
result = True
except IOError as ex:
logging.getLogger("PYNETWORK").exception(ex)
return result
def main():
"""Main entry point"""
arg_parser = argparse.ArgumentParser(
description=__description__,
usage='%(prog)s [OPTION]...')
arg_parser.add_argument("html", help="Html file path")
arg_parser.add_argument("image", help="Image Output file path")
args = arg_parser.parse_args()
if args:
convert_html_to_image(args.html, args.image)
if __name__ == "__main__":
main()
| mit |
wilvk/ansible | lib/ansible/modules/cloud/amazon/elb_application_lb.py | 27 | 41834 | #!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: elb_application_lb
short_description: Manage an Application load balancer
description:
- Manage an AWS Application Elastic Load Balancer. See U(https://aws.amazon.com/blogs/aws/new-aws-application-load-balancer/) for details.
version_added: "2.4"
requirements: [ boto3 ]
author: "Rob White (@wimnat)"
options:
access_logs_enabled:
description:
- "Whether or not to enable access logs. When true, I(access_logs_s3_bucket) must be set."
required: false
choices: [ 'yes', 'no' ]
access_logs_s3_bucket:
description:
- The name of the S3 bucket for the access logs. This attribute is required if access logs in Amazon S3 are enabled. The bucket must exist in the same
region as the load balancer and have a bucket policy that grants Elastic Load Balancing permission to write to the bucket.
required: false
access_logs_s3_prefix:
description:
- The prefix for the location in the S3 bucket. If you don't specify a prefix, the access logs are stored in the root of the bucket.
required: false
deletion_protection:
description:
- Indicates whether deletion protection for the ELB is enabled.
required: false
default: no
choices: [ 'yes', 'no' ]
idle_timeout:
description:
- The number of seconds to wait before an idle connection is closed.
required: false
default: 60
listeners:
description:
- A list of dicts containing listeners to attach to the ELB. See examples for detail of the dict required. Note that listener keys
are CamelCased.
required: false
name:
description:
- The name of the load balancer. This name must be unique within your AWS account, can have a maximum of 32 characters, must contain only alphanumeric
characters or hyphens, and must not begin or end with a hyphen.
required: true
purge_listeners:
description:
- If yes, existing listeners will be purged from the ELB to match exactly what is defined by I(listeners) parameter. If the I(listeners) parameter is
not set then listeners will not be modified
default: yes
choices: [ 'yes', 'no' ]
purge_tags:
description:
- If yes, existing tags will be purged from the resource to match exactly what is defined by I(tags) parameter. If the I(tags) parameter is not set then
tags will not be modified.
required: false
default: yes
choices: [ 'yes', 'no' ]
subnets:
description:
- A list of the IDs of the subnets to attach to the load balancer. You can specify only one subnet per Availability Zone. You must specify subnets from
at least two Availability Zones. Required if state=present.
required: false
security_groups:
description:
- A list of the names or IDs of the security groups to assign to the load balancer. Required if state=present.
required: false
default: []
scheme:
description:
- Internet-facing or internal load balancer. An ELB scheme can not be modified after creation.
required: false
default: internet-facing
choices: [ 'internet-facing', 'internal' ]
state:
description:
- Create or destroy the load balancer.
required: true
choices: [ 'present', 'absent' ]
tags:
description:
- A dictionary of one or more tags to assign to the load balancer.
required: false
extends_documentation_fragment:
- aws
- ec2
notes:
- Listeners are matched based on port. If a listener's port is changed then a new listener will be created.
- Listener rules are matched based on priority. If a rule's priority is changed then a new rule will be created.
'''
EXAMPLES = '''
# Note: These examples do not set authentication details, see the AWS Guide for details.
# Create an ELB and attach a listener
- elb_application_lb:
name: myelb
security_groups:
- sg-12345678
- my-sec-group
subnets:
- subnet-012345678
- subnet-abcdef000
listeners:
- Protocol: HTTP # Required. The protocol for connections from clients to the load balancer (HTTP or HTTPS) (case-sensitive).
Port: 80 # Required. The port on which the load balancer is listening.
# The security policy that defines which ciphers and protocols are supported. The default is the current predefined security policy.
SslPolicy: ELBSecurityPolicy-2015-05
Certificates: # The ARN of the certificate (only one certficate ARN should be provided)
- CertificateArn: arn:aws:iam::12345678987:server-certificate/test.domain.com
DefaultActions:
- Type: forward # Required. Only 'forward' is accepted at this time
TargetGroupName: # Required. The name of the target group
state: present
# Create an ELB and attach a listener with logging enabled
- elb_application_lb:
access_logs_enabled: yes
access_logs_s3_bucket: mybucket
access_logs_s3_prefix: "/logs"
name: myelb
security_groups:
- sg-12345678
- my-sec-group
subnets:
- subnet-012345678
- subnet-abcdef000
listeners:
- Protocol: HTTP # Required. The protocol for connections from clients to the load balancer (HTTP or HTTPS) (case-sensitive).
Port: 80 # Required. The port on which the load balancer is listening.
# The security policy that defines which ciphers and protocols are supported. The default is the current predefined security policy.
SslPolicy: ELBSecurityPolicy-2015-05
Certificates: # The ARN of the certificate (only one certficate ARN should be provided)
- CertificateArn: arn:aws:iam::12345678987:server-certificate/test.domain.com
DefaultActions:
- Type: forward # Required. Only 'forward' is accepted at this time
TargetGroupName: # Required. The name of the target group
state: present
# Create an ALB with listeners and rules
- elb_application_lb:
name: test-alb
subnets:
- subnet-12345678
- subnet-87654321
security_groups:
- sg-12345678
scheme: internal
listeners:
- Protocol: HTTPS
Port: 443
DefaultActions:
- Type: forward
TargetGroupName: test-target-group
Certificates:
- CertificateArn: arn:aws:iam::12345678987:server-certificate/test.domain.com
SslPolicy: ELBSecurityPolicy-2015-05
Rules:
- Conditions:
- Field: path-pattern
Values:
- '/test'
Priority: '1'
Actions:
- TargetGroupName: test-target-group
Type: forward
state: present
# Remove an ELB
- elb_application_lb:
name: myelb
state: absent
'''
RETURN = '''
access_logs_s3_bucket:
description: The name of the S3 bucket for the access logs.
returned: when state is present
type: string
sample: mys3bucket
access_logs_s3_enabled:
description: Indicates whether access logs stored in Amazon S3 are enabled.
returned: when state is present
type: string
sample: true
access_logs_s3_prefix:
description: The prefix for the location in the S3 bucket.
returned: when state is present
type: string
sample: /my/logs
availability_zones:
description: The Availability Zones for the load balancer.
returned: when state is present
type: list
sample: "[{'subnet_id': 'subnet-aabbccddff', 'zone_name': 'ap-southeast-2a'}]"
canonical_hosted_zone_id:
description: The ID of the Amazon Route 53 hosted zone associated with the load balancer.
returned: when state is present
type: string
sample: ABCDEF12345678
created_time:
description: The date and time the load balancer was created.
returned: when state is present
type: string
sample: "2015-02-12T02:14:02+00:00"
deletion_protection_enabled:
description: Indicates whether deletion protection is enabled.
returned: when state is present
type: string
sample: true
dns_name:
description: The public DNS name of the load balancer.
returned: when state is present
type: string
sample: internal-my-elb-123456789.ap-southeast-2.elb.amazonaws.com
idle_timeout_timeout_seconds:
description: The idle timeout value, in seconds.
returned: when state is present
type: string
sample: 60
ip_address_type:
description: The type of IP addresses used by the subnets for the load balancer.
returned: when state is present
type: string
sample: ipv4
listeners:
description: Information about the listeners.
returned: when state is present
type: complex
contains:
listener_arn:
description: The Amazon Resource Name (ARN) of the listener.
returned: when state is present
type: string
sample: ""
load_balancer_arn:
description: The Amazon Resource Name (ARN) of the load balancer.
returned: when state is present
type: string
sample: ""
port:
description: The port on which the load balancer is listening.
returned: when state is present
type: int
sample: 80
protocol:
description: The protocol for connections from clients to the load balancer.
returned: when state is present
type: string
sample: HTTPS
certificates:
description: The SSL server certificate.
returned: when state is present
type: complex
contains:
certificate_arn:
description: The Amazon Resource Name (ARN) of the certificate.
returned: when state is present
type: string
sample: ""
ssl_policy:
description: The security policy that defines which ciphers and protocols are supported.
returned: when state is present
type: string
sample: ""
default_actions:
description: The default actions for the listener.
returned: when state is present
type: string
contains:
type:
description: The type of action.
returned: when state is present
type: string
sample: ""
target_group_arn:
description: The Amazon Resource Name (ARN) of the target group.
returned: when state is present
type: string
sample: ""
load_balancer_arn:
description: The Amazon Resource Name (ARN) of the load balancer.
returned: when state is present
type: string
sample: arn:aws:elasticloadbalancing:ap-southeast-2:0123456789:loadbalancer/app/my-elb/001122334455
load_balancer_name:
description: The name of the load balancer.
returned: when state is present
type: string
sample: my-elb
scheme:
description: Internet-facing or internal load balancer.
returned: when state is present
type: string
sample: internal
security_groups:
description: The IDs of the security groups for the load balancer.
returned: when state is present
type: list
sample: ['sg-0011223344']
state:
description: The state of the load balancer.
returned: when state is present
type: dict
sample: "{'code': 'active'}"
tags:
description: The tags attached to the load balancer.
returned: when state is present
type: dict
sample: "{
'Tag': 'Example'
}"
type:
description: The type of load balancer.
returned: when state is present
type: string
sample: application
vpc_id:
description: The ID of the VPC for the load balancer.
returned: when state is present
type: string
sample: vpc-0011223344
'''
import time
import collections
from copy import deepcopy
import traceback
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six import string_types
from ansible.module_utils.ec2 import boto3_conn, get_aws_connection_info, camel_dict_to_snake_dict, ec2_argument_spec, get_ec2_security_group_ids_from_names, \
ansible_dict_to_boto3_tag_list, boto3_tag_list_to_ansible_dict, compare_aws_tags, HAS_BOTO3
try:
import boto3
from botocore.exceptions import ClientError, NoCredentialsError
except ImportError:
HAS_BOTO3 = False
def convert_tg_name_to_arn(connection, module, tg_name):
try:
response = connection.describe_target_groups(Names=[tg_name])
except ClientError as e:
module.fail_json(msg=e.message, exception=traceback.format_exc(), **camel_dict_to_snake_dict(e.response))
tg_arn = response['TargetGroups'][0]['TargetGroupArn']
return tg_arn
def wait_for_status(connection, module, elb_arn, status):
polling_increment_secs = 15
max_retries = module.params.get('wait_timeout') // polling_increment_secs
status_achieved = False
for x in range(0, max_retries):
try:
response = connection.describe_load_balancers(LoadBalancerArns=[elb_arn])
if response['LoadBalancers'][0]['State']['Code'] == status:
status_achieved = True
break
else:
time.sleep(polling_increment_secs)
except ClientError as e:
module.fail_json(msg=e.message, exception=traceback.format_exc(), **camel_dict_to_snake_dict(e.response))
result = response
return status_achieved, result
def _get_subnet_ids_from_subnet_list(subnet_list):
subnet_id_list = []
for subnet in subnet_list:
subnet_id_list.append(subnet['SubnetId'])
return subnet_id_list
def get_elb_listeners(connection, module, elb_arn):
try:
listener_paginator = connection.get_paginator('describe_listeners')
return (listener_paginator.paginate(LoadBalancerArn=elb_arn).build_full_result())['Listeners']
except ClientError as e:
module.fail_json(msg=e.message, exception=traceback.format_exc(), **camel_dict_to_snake_dict(e.response))
def get_elb_attributes(connection, module, elb_arn):
try:
elb_attributes = boto3_tag_list_to_ansible_dict(connection.describe_load_balancer_attributes(LoadBalancerArn=elb_arn)['Attributes'])
except ClientError as e:
module.fail_json(msg=e.message, exception=traceback.format_exc(), **camel_dict_to_snake_dict(e.response))
# Replace '.' with '_' in attribute key names to make it more Ansibley
return dict((k.replace('.', '_'), v) for k, v in elb_attributes.items())
def get_listener(connection, module, elb_arn, listener_port):
"""
Get a listener based on the port provided.
:param connection: ELBv2 boto3 connection
:param module: Ansible module object
:param listener_port:
:return:
"""
try:
listener_paginator = connection.get_paginator('describe_listeners')
listeners = (listener_paginator.paginate(LoadBalancerArn=elb_arn).build_full_result())['Listeners']
except ClientError as e:
module.fail_json(msg=e.message, exception=traceback.format_exc(), **camel_dict_to_snake_dict(e.response))
l = None
for listener in listeners:
if listener['Port'] == listener_port:
l = listener
break
return l
def get_elb(connection, module):
"""
Get an application load balancer based on name. If not found, return None
:param connection: ELBv2 boto3 connection
:param module: Ansible module object
:return: Dict of load balancer attributes or None if not found
"""
try:
load_balancer_paginator = connection.get_paginator('describe_load_balancers')
return (load_balancer_paginator.paginate(Names=[module.params.get("name")]).build_full_result())['LoadBalancers'][0]
except ClientError as e:
if e.response['Error']['Code'] == 'LoadBalancerNotFound':
return None
else:
module.fail_json(msg=e.message, exception=traceback.format_exc(), **camel_dict_to_snake_dict(e.response))
def get_listener_rules(connection, module, listener_arn):
try:
return connection.describe_rules(ListenerArn=listener_arn)['Rules']
except ClientError as e:
module.fail_json(msg=e.message, exception=traceback.format_exc(), **camel_dict_to_snake_dict(e.response))
def ensure_listeners_default_action_has_arn(connection, module, listeners):
"""
If a listener DefaultAction has been passed with a Target Group Name instead of ARN, lookup the ARN and
replace the name.
:param connection: ELBv2 boto3 connection
:param module: Ansible module object
:param listeners: a list of listener dicts
:return: the same list of dicts ensuring that each listener DefaultActions dict has TargetGroupArn key. If a TargetGroupName key exists, it is removed.
"""
if not listeners:
listeners = []
for listener in listeners:
if 'TargetGroupName' in listener['DefaultActions'][0]:
listener['DefaultActions'][0]['TargetGroupArn'] = convert_tg_name_to_arn(connection, module, listener['DefaultActions'][0]['TargetGroupName'])
del listener['DefaultActions'][0]['TargetGroupName']
return listeners
def ensure_rules_action_has_arn(connection, module, rules):
"""
If a rule Action has been passed with a Target Group Name instead of ARN, lookup the ARN and
replace the name.
:param connection: ELBv2 boto3 connection
:param module: Ansible module object
:param rules: a list of rule dicts
:return: the same list of dicts ensuring that each rule Actions dict has TargetGroupArn key. If a TargetGroupName key exists, it is removed.
"""
for rule in rules:
if 'TargetGroupName' in rule['Actions'][0]:
rule['Actions'][0]['TargetGroupArn'] = convert_tg_name_to_arn(connection, module, rule['Actions'][0]['TargetGroupName'])
del rule['Actions'][0]['TargetGroupName']
return rules
def compare_listener(current_listener, new_listener):
"""
Compare two listeners.
:param current_listener:
:param new_listener:
:return:
"""
modified_listener = {}
# Port
if current_listener['Port'] != new_listener['Port']:
modified_listener['Port'] = new_listener['Port']
# Protocol
if current_listener['Protocol'] != new_listener['Protocol']:
modified_listener['Protocol'] = new_listener['Protocol']
# If Protocol is HTTPS, check additional attributes
if current_listener['Protocol'] == 'HTTPS' and new_listener['Protocol'] == 'HTTPS':
# Cert
if current_listener['SslPolicy'] != new_listener['SslPolicy']:
modified_listener['SslPolicy'] = new_listener['SslPolicy']
if current_listener['Certificates'][0]['CertificateArn'] != new_listener['Certificates'][0]['CertificateArn']:
modified_listener['Certificates'] = []
modified_listener['Certificates'].append({})
modified_listener['Certificates'][0]['CertificateArn'] = new_listener['Certificates'][0]['CertificateArn']
elif current_listener['Protocol'] != 'HTTPS' and new_listener['Protocol'] == 'HTTPS':
modified_listener['SslPolicy'] = new_listener['SslPolicy']
modified_listener['Certificates'] = []
modified_listener['Certificates'].append({})
modified_listener['Certificates'][0]['CertificateArn'] = new_listener['Certificates'][0]['CertificateArn']
# Default action
# We wont worry about the Action Type because it is always 'forward'
if current_listener['DefaultActions'][0]['TargetGroupArn'] != new_listener['DefaultActions'][0]['TargetGroupArn']:
modified_listener['DefaultActions'] = []
modified_listener['DefaultActions'].append({})
modified_listener['DefaultActions'][0]['TargetGroupArn'] = new_listener['DefaultActions'][0]['TargetGroupArn']
modified_listener['DefaultActions'][0]['Type'] = 'forward'
if modified_listener:
return modified_listener
else:
return None
def compare_condition(current_conditions, condition):
"""
:param current_conditions:
:param condition:
:return:
"""
condition_found = False
for current_condition in current_conditions:
if current_condition['Field'] == condition['Field'] and current_condition['Values'][0] == condition['Values'][0]:
condition_found = True
break
return condition_found
def compare_rule(current_rule, new_rule):
"""
Compare two rules.
:param current_rule:
:param new_rule:
:return:
"""
modified_rule = {}
# Priority
if current_rule['Priority'] != new_rule['Priority']:
modified_rule['Priority'] = new_rule['Priority']
# Actions
# We wont worry about the Action Type because it is always 'forward'
if current_rule['Actions'][0]['TargetGroupArn'] != new_rule['Actions'][0]['TargetGroupArn']:
modified_rule['Actions'] = []
modified_rule['Actions'].append({})
modified_rule['Actions'][0]['TargetGroupArn'] = new_rule['Actions'][0]['TargetGroupArn']
modified_rule['Actions'][0]['Type'] = 'forward'
# Conditions
modified_conditions = []
for condition in new_rule['Conditions']:
if not compare_condition(current_rule['Conditions'], condition):
modified_conditions.append(condition)
if modified_conditions:
modified_rule['Conditions'] = modified_conditions
return modified_rule
def compare_listeners(connection, module, current_listeners, new_listeners, purge_listeners):
"""
Compare listeners and return listeners to add, listeners to modify and listeners to remove
Listeners are compared based on port
:param connection: ELBv2 boto3 connection
:param module: Ansible module object
:param current_listeners:
:param new_listeners:
:param purge_listeners:
:return:
"""
listeners_to_modify = []
listeners_to_delete = []
# Check each current listener port to see if it's been passed to the module
for current_listener in current_listeners:
current_listener_passed_to_module = False
for new_listener in new_listeners[:]:
new_listener['Port'] = int(new_listener['Port'])
if current_listener['Port'] == new_listener['Port']:
current_listener_passed_to_module = True
# Remove what we match so that what is left can be marked as 'to be added'
new_listeners.remove(new_listener)
modified_listener = compare_listener(current_listener, new_listener)
if modified_listener:
modified_listener['Port'] = current_listener['Port']
modified_listener['ListenerArn'] = current_listener['ListenerArn']
listeners_to_modify.append(modified_listener)
break
# If the current listener was not matched against passed listeners and purge is True, mark for removal
if not current_listener_passed_to_module and purge_listeners:
listeners_to_delete.append(current_listener['ListenerArn'])
listeners_to_add = new_listeners
return listeners_to_add, listeners_to_modify, listeners_to_delete
def compare_rules(connection, module, current_listeners, listener):
"""
Compare rules and return rules to add, rules to modify and rules to remove
Rules are compared based on priority
:param connection: ELBv2 boto3 connection
:param module: Ansible module object
:param current_listeners: list of listeners currently associated with the ELB
:param listener: dict object of a listener passed by the user
:return:
"""
# Run through listeners looking for a match (by port) to get the ARN
for current_listener in current_listeners:
if current_listener['Port'] == listener['Port']:
listener['ListenerArn'] = current_listener['ListenerArn']
break
# If the listener exists (i.e. has an ARN) get rules for the listener
if 'ListenerArn' in listener:
current_rules = get_listener_rules(connection, module, listener['ListenerArn'])
else:
current_rules = []
rules_to_modify = []
rules_to_delete = []
for current_rule in current_rules:
current_rule_passed_to_module = False
for new_rule in listener['Rules'][:]:
if current_rule['Priority'] == new_rule['Priority']:
current_rule_passed_to_module = True
# Remove what we match so that what is left can be marked as 'to be added'
listener['Rules'].remove(new_rule)
modified_rule = compare_rule(current_rule, new_rule)
if modified_rule:
modified_rule['Priority'] = int(current_rule['Priority'])
modified_rule['RuleArn'] = current_rule['RuleArn']
modified_rule['Actions'] = new_rule['Actions']
modified_rule['Conditions'] = new_rule['Conditions']
rules_to_modify.append(modified_rule)
break
# If the current rule was not matched against passed rules, mark for removal
if not current_rule_passed_to_module and not current_rule['IsDefault']:
rules_to_delete.append(current_rule['RuleArn'])
rules_to_add = listener['Rules']
return rules_to_add, rules_to_modify, rules_to_delete
def create_or_update_elb_listeners(connection, module, elb):
"""Create or update ELB listeners. Return true if changed, else false"""
listener_changed = False
# Ensure listeners are using Target Group ARN not name
listeners = ensure_listeners_default_action_has_arn(connection, module, module.params.get("listeners"))
purge_listeners = module.params.get("purge_listeners")
# Does the ELB have any listeners exist?
current_listeners = get_elb_listeners(connection, module, elb['LoadBalancerArn'])
listeners_to_add, listeners_to_modify, listeners_to_delete = compare_listeners(connection, module, current_listeners, deepcopy(listeners), purge_listeners)
# Add listeners
for listener_to_add in listeners_to_add:
try:
listener_to_add['LoadBalancerArn'] = elb['LoadBalancerArn']
# Rules is not a valid parameter for create_listener
if 'Rules' in listener_to_add:
listener_to_add.pop('Rules')
response = connection.create_listener(**listener_to_add)
# Add the new listener
current_listeners.append(response['Listeners'][0])
listener_changed = True
except ClientError as e:
module.fail_json(msg=e.message, exception=traceback.format_exc(), **camel_dict_to_snake_dict(e.response))
# Modify listeners
for listener_to_modify in listeners_to_modify:
try:
# Rules is not a valid parameter for modify_listener
if 'Rules' in listener_to_modify:
listener_to_modify.pop('Rules')
connection.modify_listener(**listener_to_modify)
listener_changed = True
except ClientError as e:
module.fail_json(msg=e.message, exception=traceback.format_exc(), **camel_dict_to_snake_dict(e.response))
# Delete listeners
for listener_to_delete in listeners_to_delete:
try:
connection.delete_listener(ListenerArn=listener_to_delete)
listener_changed = True
except ClientError as e:
module.fail_json(msg=e.message, exception=traceback.format_exc(), **camel_dict_to_snake_dict(e.response))
# For each listener, check rules
for listener in deepcopy(listeners):
if 'Rules' in listener:
# Ensure rules are using Target Group ARN not name
listener['Rules'] = ensure_rules_action_has_arn(connection, module, listener['Rules'])
rules_to_add, rules_to_modify, rules_to_delete = compare_rules(connection, module, current_listeners, listener)
# Get listener based on port so we can use ARN
looked_up_listener = get_listener(connection, module, elb['LoadBalancerArn'], listener['Port'])
# Delete rules
for rule in rules_to_delete:
try:
connection.delete_rule(RuleArn=rule)
listener_changed = True
except ClientError as e:
module.fail_json(msg=e.message, exception=traceback.format_exc(), **camel_dict_to_snake_dict(e.response))
# Add rules
for rule in rules_to_add:
try:
rule['ListenerArn'] = looked_up_listener['ListenerArn']
rule['Priority'] = int(rule['Priority'])
connection.create_rule(**rule)
listener_changed = True
except ClientError as e:
module.fail_json(msg=e.message, exception=traceback.format_exc(), **camel_dict_to_snake_dict(e.response))
# Modify rules
for rule in rules_to_modify:
try:
del rule['Priority']
connection.modify_rule(**rule)
listener_changed = True
except ClientError as e:
module.fail_json(msg=e.message, exception=traceback.format_exc(), **camel_dict_to_snake_dict(e.response))
return listener_changed
def create_or_update_elb(connection, connection_ec2, module):
"""Create ELB or modify main attributes. json_exit here"""
changed = False
new_load_balancer = False
params = dict()
params['Name'] = module.params.get("name")
params['Subnets'] = module.params.get("subnets")
try:
params['SecurityGroups'] = get_ec2_security_group_ids_from_names(module.params.get('security_groups'), connection_ec2, boto3=True)
except ValueError as e:
module.fail_json(msg=str(e), exception=traceback.format_exc())
except ClientError as e:
module.fail_json(msg=e.message, exception=traceback.format_exc(), **camel_dict_to_snake_dict(e.response))
except NoCredentialsError as e:
module.fail_json(msg="AWS authentication problem. " + e.message, exception=traceback.format_exc())
params['Scheme'] = module.params.get("scheme")
if module.params.get("tags"):
params['Tags'] = ansible_dict_to_boto3_tag_list(module.params.get("tags"))
purge_tags = module.params.get("purge_tags")
access_logs_enabled = module.params.get("access_logs_enabled")
access_logs_s3_bucket = module.params.get("access_logs_s3_bucket")
access_logs_s3_prefix = module.params.get("access_logs_s3_prefix")
deletion_protection = module.params.get("deletion_protection")
idle_timeout = module.params.get("idle_timeout")
# Does the ELB currently exist?
elb = get_elb(connection, module)
if elb:
# ELB exists so check subnets, security groups and tags match what has been passed
# Subnets
if set(_get_subnet_ids_from_subnet_list(elb['AvailabilityZones'])) != set(params['Subnets']):
try:
connection.set_subnets(LoadBalancerArn=elb['LoadBalancerArn'], Subnets=params['Subnets'])
except ClientError as e:
module.fail_json(msg=e.message, exception=traceback.format_exc(), **camel_dict_to_snake_dict(e.response))
changed = True
# Security Groups
if set(elb['SecurityGroups']) != set(params['SecurityGroups']):
try:
connection.set_security_groups(LoadBalancerArn=elb['LoadBalancerArn'], SecurityGroups=params['SecurityGroups'])
except ClientError as e:
module.fail_json(msg=e.message, exception=traceback.format_exc(), **camel_dict_to_snake_dict(e.response))
changed = True
# Tags - only need to play with tags if tags parameter has been set to something
if module.params.get("tags"):
try:
elb_tags = connection.describe_tags(ResourceArns=[elb['LoadBalancerArn']])['TagDescriptions'][0]['Tags']
except ClientError as e:
module.fail_json(msg=e.message, exception=traceback.format_exc(), **camel_dict_to_snake_dict(e.response))
# Delete necessary tags
tags_need_modify, tags_to_delete = compare_aws_tags(boto3_tag_list_to_ansible_dict(elb_tags), boto3_tag_list_to_ansible_dict(params['Tags']),
purge_tags)
if tags_to_delete:
try:
connection.remove_tags(ResourceArns=[elb['LoadBalancerArn']], TagKeys=tags_to_delete)
except ClientError as e:
module.fail_json(msg=e.message, exception=traceback.format_exc(), **camel_dict_to_snake_dict(e.response))
changed = True
# Add/update tags
if tags_need_modify:
try:
connection.add_tags(ResourceArns=[elb['LoadBalancerArn']], Tags=params['Tags'])
except ClientError as e:
module.fail_json(msg=e.message, exception=traceback.format_exc(), **camel_dict_to_snake_dict(e.response))
changed = True
else:
try:
elb = connection.create_load_balancer(**params)['LoadBalancers'][0]
changed = True
new_load_balancer = True
except ClientError as e:
module.fail_json(msg=e.message, exception=traceback.format_exc(), **camel_dict_to_snake_dict(e.response))
if module.params.get("wait"):
status_achieved, new_elb = wait_for_status(connection, module, elb['LoadBalancerArn'], 'active')
# Now set ELB attributes. Use try statement here so we can remove the ELB if this stage fails
update_attributes = []
# Get current attributes
current_elb_attributes = get_elb_attributes(connection, module, elb['LoadBalancerArn'])
if access_logs_enabled and current_elb_attributes['access_logs_s3_enabled'] != "true":
update_attributes.append({'Key': 'access_logs.s3.enabled', 'Value': "true"})
if not access_logs_enabled and current_elb_attributes['access_logs_s3_enabled'] != "false":
update_attributes.append({'Key': 'access_logs.s3.enabled', 'Value': 'false'})
if access_logs_s3_bucket is not None and access_logs_s3_bucket != current_elb_attributes['access_logs_s3_bucket']:
update_attributes.append({'Key': 'access_logs.s3.bucket', 'Value': access_logs_s3_bucket})
if access_logs_s3_prefix is not None and access_logs_s3_prefix != current_elb_attributes['access_logs_s3_prefix']:
update_attributes.append({'Key': 'access_logs.s3.prefix', 'Value': access_logs_s3_prefix})
if deletion_protection and current_elb_attributes['deletion_protection_enabled'] != "true":
update_attributes.append({'Key': 'deletion_protection.enabled', 'Value': "true"})
if not deletion_protection and current_elb_attributes['deletion_protection_enabled'] != "false":
update_attributes.append({'Key': 'deletion_protection.enabled', 'Value': "false"})
if idle_timeout is not None and str(idle_timeout) != current_elb_attributes['idle_timeout_timeout_seconds']:
update_attributes.append({'Key': 'idle_timeout.timeout_seconds', 'Value': str(idle_timeout)})
if update_attributes:
try:
connection.modify_load_balancer_attributes(LoadBalancerArn=elb['LoadBalancerArn'], Attributes=update_attributes)
changed = True
except ClientError as e:
# Something went wrong setting attributes. If this ELB was created during this task, delete it to leave a consistent state
if new_load_balancer:
connection.delete_load_balancer(LoadBalancerArn=elb['LoadBalancerArn'])
module.fail_json(msg=e.message, exception=traceback.format_exc(), **camel_dict_to_snake_dict(e.response))
# Now, if required, set ELB listeners. Use try statement here so we can remove the ELB if this stage fails
try:
listener_changed = create_or_update_elb_listeners(connection, module, elb)
if listener_changed:
changed = True
except ClientError as e:
# Something went wrong setting listeners. If this ELB was created during this task, delete it to leave a consistent state
if new_load_balancer:
connection.delete_load_balancer(LoadBalancerArn=elb['LoadBalancerArn'])
module.fail_json(msg=e.message, exception=traceback.format_exc(), **camel_dict_to_snake_dict(e.response))
# Get the ELB again
elb = get_elb(connection, module)
# Get the ELB listeners again
elb['listeners'] = get_elb_listeners(connection, module, elb['LoadBalancerArn'])
# For each listener, get listener rules
for listener in elb['listeners']:
listener['rules'] = get_listener_rules(connection, module, listener['ListenerArn'])
# Get the ELB attributes again
elb.update(get_elb_attributes(connection, module, elb['LoadBalancerArn']))
# Convert to snake_case
snaked_elb = camel_dict_to_snake_dict(elb)
# Get the tags of the ELB
elb_tags = connection.describe_tags(ResourceArns=[elb['LoadBalancerArn']])['TagDescriptions'][0]['Tags']
snaked_elb['tags'] = boto3_tag_list_to_ansible_dict(elb_tags)
module.exit_json(changed=changed, **snaked_elb)
def delete_elb(connection, module):
changed = False
elb = get_elb(connection, module)
if elb:
try:
connection.delete_load_balancer(LoadBalancerArn=elb['LoadBalancerArn'])
changed = True
except ClientError as e:
module.fail_json(msg=e.message, exception=traceback.format_exc(), **camel_dict_to_snake_dict(e.response))
except NoCredentialsError as e:
module.fail_json(msg="AWS authentication problem. " + e.message, exception=traceback.format_exc())
module.exit_json(changed=changed)
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(
dict(
access_logs_enabled=dict(type='bool'),
access_logs_s3_bucket=dict(type='str'),
access_logs_s3_prefix=dict(type='str'),
deletion_protection=dict(default=False, type='bool'),
idle_timeout=dict(type='int'),
listeners=dict(type='list'),
name=dict(required=True, type='str'),
purge_listeners=dict(default=True, type='bool'),
purge_tags=dict(default=True, type='bool'),
subnets=dict(type='list'),
security_groups=dict(type='list'),
scheme=dict(default='internet-facing', choices=['internet-facing', 'internal']),
state=dict(choices=['present', 'absent'], type='str'),
tags=dict(default={}, type='dict'),
wait_timeout=dict(type='int'),
wait=dict(type='bool')
)
)
module = AnsibleModule(argument_spec=argument_spec,
required_if=[
('state', 'present', ['subnets', 'security_groups'])
],
required_together=(
['access_logs_enabled', 'access_logs_s3_bucket', 'access_logs_s3_prefix']
)
)
# Quick check of listeners parameters
listeners = module.params.get("listeners")
if listeners is not None:
for listener in listeners:
for key in listener.keys():
if key not in ['Protocol', 'Port', 'SslPolicy', 'Certificates', 'DefaultActions', 'Rules']:
module.fail_json(msg="listeners parameter contains invalid dict keys. Should be one of 'Protocol', "
"'Port', 'SslPolicy', 'Certificates', 'DefaultActions', 'Rules'.")
# Make sure Port is always an integer
elif key == 'Port':
listener[key] = int(listener[key])
if not HAS_BOTO3:
module.fail_json(msg='boto3 required for this module')
region, ec2_url, aws_connect_params = get_aws_connection_info(module, boto3=True)
if region:
connection = boto3_conn(module, conn_type='client', resource='elbv2', region=region, endpoint=ec2_url, **aws_connect_params)
connection_ec2 = boto3_conn(module, conn_type='client', resource='ec2', region=region, endpoint=ec2_url, **aws_connect_params)
else:
module.fail_json(msg="region must be specified")
state = module.params.get("state")
if state == 'present':
create_or_update_elb(connection, connection_ec2, module)
else:
delete_elb(connection, module)
if __name__ == '__main__':
main()
| gpl-3.0 |
ncdesouza/bookworm | env/lib/python2.7/site-packages/wtforms/widgets/html5.py | 153 | 2214 | """
Widgets for various HTML5 input types.
"""
from .core import Input
__all__ = (
'ColorInput', 'DateInput', 'DateTimeInput', 'DateTimeLocalInput',
'EmailInput', 'MonthInput', 'NumberInput', 'RangeInput', 'SearchInput',
'TelInput', 'TimeInput', 'URLInput', 'WeekInput',
)
class SearchInput(Input):
"""
Renders an input with type "search".
"""
input_type = 'search'
class TelInput(Input):
"""
Renders an input with type "tel".
"""
input_type = 'tel'
class URLInput(Input):
"""
Renders an input with type "url".
"""
input_type = 'url'
class EmailInput(Input):
"""
Renders an input with type "email".
"""
input_type = 'email'
class DateTimeInput(Input):
"""
Renders an input with type "datetime".
"""
input_type = 'datetime'
class DateInput(Input):
"""
Renders an input with type "date".
"""
input_type = 'date'
class MonthInput(Input):
"""
Renders an input with type "month".
"""
input_type = 'month'
class WeekInput(Input):
"""
Renders an input with type "week".
"""
input_type = 'week'
class TimeInput(Input):
"""
Renders an input with type "time".
"""
input_type = 'time'
class DateTimeLocalInput(Input):
"""
Renders an input with type "datetime-local".
"""
input_type = 'datetime-local'
class NumberInput(Input):
"""
Renders an input with type "number".
"""
input_type = 'number'
def __init__(self, step=None):
self.step = step
def __call__(self, field, **kwargs):
if self.step is not None:
kwargs.setdefault('step', self.step)
return super(NumberInput, self).__call__(field, **kwargs)
class RangeInput(Input):
"""
Renders an input with type "range".
"""
input_type = 'range'
def __init__(self, step=None):
self.step = step
def __call__(self, field, **kwargs):
if self.step is not None:
kwargs.setdefault('step', self.step)
return super(RangeInput, self).__call__(field, **kwargs)
class ColorInput(Input):
"""
Renders an input with type "color".
"""
input_type = 'color'
| gpl-3.0 |
sem-geologist/Qstitch | ui/Ui_finalize.py | 1 | 5411 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'finalize.ui'
#
# Created: Fri Apr 10 01:36:55 2015
# by: PyQt4 UI code generator 4.11.2
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_FinalImage(object):
def setupUi(self, FinalImage):
FinalImage.setObjectName(_fromUtf8("FinalImage"))
FinalImage.resize(623, 154)
self.horizontalLayout = QtGui.QHBoxLayout(FinalImage)
self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout"))
self.gridLayout = QtGui.QGridLayout()
self.gridLayout.setObjectName(_fromUtf8("gridLayout"))
self.statusText = QtGui.QLabel(FinalImage)
self.statusText.setObjectName(_fromUtf8("statusText"))
self.gridLayout.addWidget(self.statusText, 4, 1, 1, 1)
self.label_4 = QtGui.QLabel(FinalImage)
self.label_4.setObjectName(_fromUtf8("label_4"))
self.gridLayout.addWidget(self.label_4, 4, 0, 1, 1)
self.progressBar_3 = QtGui.QProgressBar(FinalImage)
self.progressBar_3.setProperty("value", 0)
self.progressBar_3.setObjectName(_fromUtf8("progressBar_3"))
self.gridLayout.addWidget(self.progressBar_3, 2, 1, 1, 1)
self.label = QtGui.QLabel(FinalImage)
self.label.setObjectName(_fromUtf8("label"))
self.gridLayout.addWidget(self.label, 2, 0, 1, 1)
self.label_2 = QtGui.QLabel(FinalImage)
self.label_2.setObjectName(_fromUtf8("label_2"))
self.gridLayout.addWidget(self.label_2, 0, 0, 1, 1)
self.progressBar_2 = QtGui.QProgressBar(FinalImage)
self.progressBar_2.setProperty("value", 0)
self.progressBar_2.setObjectName(_fromUtf8("progressBar_2"))
self.gridLayout.addWidget(self.progressBar_2, 1, 1, 1, 1)
self.progressBar_1 = QtGui.QProgressBar(FinalImage)
self.progressBar_1.setProperty("value", 0)
self.progressBar_1.setObjectName(_fromUtf8("progressBar_1"))
self.gridLayout.addWidget(self.progressBar_1, 0, 1, 1, 1)
self.label_3 = QtGui.QLabel(FinalImage)
self.label_3.setObjectName(_fromUtf8("label_3"))
self.gridLayout.addWidget(self.label_3, 1, 0, 1, 1)
self.line = QtGui.QFrame(FinalImage)
self.line.setFrameShape(QtGui.QFrame.HLine)
self.line.setFrameShadow(QtGui.QFrame.Sunken)
self.line.setObjectName(_fromUtf8("line"))
self.gridLayout.addWidget(self.line, 3, 1, 1, 1)
self.horizontalLayout.addLayout(self.gridLayout)
self.verticalLayout = QtGui.QVBoxLayout()
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.browseButton = QtGui.QPushButton(FinalImage)
self.browseButton.setObjectName(_fromUtf8("browseButton"))
self.verticalLayout.addWidget(self.browseButton)
self.imageFormat = QtGui.QComboBox(FinalImage)
self.imageFormat.setObjectName(_fromUtf8("imageFormat"))
self.imageFormat.addItem(_fromUtf8(""))
self.imageFormat.addItem(_fromUtf8(""))
self.verticalLayout.addWidget(self.imageFormat)
self.startButton = QtGui.QPushButton(FinalImage)
self.startButton.setObjectName(_fromUtf8("startButton"))
self.verticalLayout.addWidget(self.startButton)
self.abortButton = QtGui.QPushButton(FinalImage)
self.abortButton.setEnabled(False)
self.abortButton.setText(_fromUtf8("Abort"))
self.abortButton.setObjectName(_fromUtf8("abortButton"))
self.verticalLayout.addWidget(self.abortButton)
self.closeButton = QtGui.QPushButton(FinalImage)
self.closeButton.setEnabled(True)
self.closeButton.setObjectName(_fromUtf8("closeButton"))
self.verticalLayout.addWidget(self.closeButton)
self.horizontalLayout.addLayout(self.verticalLayout)
self.retranslateUi(FinalImage)
QtCore.QObject.connect(self.closeButton, QtCore.SIGNAL(_fromUtf8("pressed()")), FinalImage.reject)
QtCore.QMetaObject.connectSlotsByName(FinalImage)
def retranslateUi(self, FinalImage):
FinalImage.setWindowTitle(_translate("FinalImage", "Export to images", None))
self.statusText.setText(_translate("FinalImage", "...", None))
self.label_4.setText(_translate("FinalImage", "status:", None))
self.label.setText(_translate("FinalImage", "Total", None))
self.label_2.setText(_translate("FinalImage", "progress of \n"
" the one plane", None))
self.label_3.setText(_translate("FinalImage", "progress of\n"
" the sample", None))
self.browseButton.setText(_translate("FinalImage", "Save to folder...", None))
self.imageFormat.setItemText(0, _translate("FinalImage", "*.tif", None))
self.imageFormat.setItemText(1, _translate("FinalImage", "*.png", None))
self.startButton.setText(_translate("FinalImage", "Start", None))
self.closeButton.setText(_translate("FinalImage", "Close", None))
| gpl-2.0 |
M3nin0/supreme-broccoli | Web/Flask/site_/lib/python3.5/site-packages/werkzeug/contrib/jsrouting.py | 513 | 8564 | # -*- coding: utf-8 -*-
"""
werkzeug.contrib.jsrouting
~~~~~~~~~~~~~~~~~~~~~~~~~~
Addon module that allows to create a JavaScript function from a map
that generates rules.
:copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
try:
from simplejson import dumps
except ImportError:
try:
from json import dumps
except ImportError:
def dumps(*args):
raise RuntimeError('simplejson required for jsrouting')
from inspect import getmro
from werkzeug.routing import NumberConverter
from werkzeug._compat import iteritems
def render_template(name_parts, rules, converters):
result = u''
if name_parts:
for idx in range(0, len(name_parts) - 1):
name = u'.'.join(name_parts[:idx + 1])
result += u"if (typeof %s === 'undefined') %s = {}\n" % (name, name)
result += '%s = ' % '.'.join(name_parts)
result += """(function (server_name, script_name, subdomain, url_scheme) {
var converters = [%(converters)s];
var rules = %(rules)s;
function in_array(array, value) {
if (array.indexOf != undefined) {
return array.indexOf(value) != -1;
}
for (var i = 0; i < array.length; i++) {
if (array[i] == value) {
return true;
}
}
return false;
}
function array_diff(array1, array2) {
array1 = array1.slice();
for (var i = array1.length-1; i >= 0; i--) {
if (in_array(array2, array1[i])) {
array1.splice(i, 1);
}
}
return array1;
}
function split_obj(obj) {
var names = [];
var values = [];
for (var name in obj) {
if (typeof(obj[name]) != 'function') {
names.push(name);
values.push(obj[name]);
}
}
return {names: names, values: values, original: obj};
}
function suitable(rule, args) {
var default_args = split_obj(rule.defaults || {});
var diff_arg_names = array_diff(rule.arguments, default_args.names);
for (var i = 0; i < diff_arg_names.length; i++) {
if (!in_array(args.names, diff_arg_names[i])) {
return false;
}
}
if (array_diff(rule.arguments, args.names).length == 0) {
if (rule.defaults == null) {
return true;
}
for (var i = 0; i < default_args.names.length; i++) {
var key = default_args.names[i];
var value = default_args.values[i];
if (value != args.original[key]) {
return false;
}
}
}
return true;
}
function build(rule, args) {
var tmp = [];
var processed = rule.arguments.slice();
for (var i = 0; i < rule.trace.length; i++) {
var part = rule.trace[i];
if (part.is_dynamic) {
var converter = converters[rule.converters[part.data]];
var data = converter(args.original[part.data]);
if (data == null) {
return null;
}
tmp.push(data);
processed.push(part.name);
} else {
tmp.push(part.data);
}
}
tmp = tmp.join('');
var pipe = tmp.indexOf('|');
var subdomain = tmp.substring(0, pipe);
var url = tmp.substring(pipe+1);
var unprocessed = array_diff(args.names, processed);
var first_query_var = true;
for (var i = 0; i < unprocessed.length; i++) {
if (first_query_var) {
url += '?';
} else {
url += '&';
}
first_query_var = false;
url += encodeURIComponent(unprocessed[i]);
url += '=';
url += encodeURIComponent(args.original[unprocessed[i]]);
}
return {subdomain: subdomain, path: url};
}
function lstrip(s, c) {
while (s && s.substring(0, 1) == c) {
s = s.substring(1);
}
return s;
}
function rstrip(s, c) {
while (s && s.substring(s.length-1, s.length) == c) {
s = s.substring(0, s.length-1);
}
return s;
}
return function(endpoint, args, force_external) {
args = split_obj(args);
var rv = null;
for (var i = 0; i < rules.length; i++) {
var rule = rules[i];
if (rule.endpoint != endpoint) continue;
if (suitable(rule, args)) {
rv = build(rule, args);
if (rv != null) {
break;
}
}
}
if (rv == null) {
return null;
}
if (!force_external && rv.subdomain == subdomain) {
return rstrip(script_name, '/') + '/' + lstrip(rv.path, '/');
} else {
return url_scheme + '://'
+ (rv.subdomain ? rv.subdomain + '.' : '')
+ server_name + rstrip(script_name, '/')
+ '/' + lstrip(rv.path, '/');
}
};
})""" % {'converters': u', '.join(converters),
'rules': rules}
return result
def generate_map(map, name='url_map'):
"""
Generates a JavaScript function containing the rules defined in
this map, to be used with a MapAdapter's generate_javascript
method. If you don't pass a name the returned JavaScript code is
an expression that returns a function. Otherwise it's a standalone
script that assigns the function with that name. Dotted names are
resolved (so you an use a name like 'obj.url_for')
In order to use JavaScript generation, simplejson must be installed.
Note that using this feature will expose the rules
defined in your map to users. If your rules contain sensitive
information, don't use JavaScript generation!
"""
from warnings import warn
warn(DeprecationWarning('This module is deprecated'))
map.update()
rules = []
converters = []
for rule in map.iter_rules():
trace = [{
'is_dynamic': is_dynamic,
'data': data
} for is_dynamic, data in rule._trace]
rule_converters = {}
for key, converter in iteritems(rule._converters):
js_func = js_to_url_function(converter)
try:
index = converters.index(js_func)
except ValueError:
converters.append(js_func)
index = len(converters) - 1
rule_converters[key] = index
rules.append({
u'endpoint': rule.endpoint,
u'arguments': list(rule.arguments),
u'converters': rule_converters,
u'trace': trace,
u'defaults': rule.defaults
})
return render_template(name_parts=name and name.split('.') or [],
rules=dumps(rules),
converters=converters)
def generate_adapter(adapter, name='url_for', map_name='url_map'):
"""Generates the url building function for a map."""
values = {
u'server_name': dumps(adapter.server_name),
u'script_name': dumps(adapter.script_name),
u'subdomain': dumps(adapter.subdomain),
u'url_scheme': dumps(adapter.url_scheme),
u'name': name,
u'map_name': map_name
}
return u'''\
var %(name)s = %(map_name)s(
%(server_name)s,
%(script_name)s,
%(subdomain)s,
%(url_scheme)s
);''' % values
def js_to_url_function(converter):
"""Get the JavaScript converter function from a rule."""
if hasattr(converter, 'js_to_url_function'):
data = converter.js_to_url_function()
else:
for cls in getmro(type(converter)):
if cls in js_to_url_functions:
data = js_to_url_functions[cls](converter)
break
else:
return 'encodeURIComponent'
return '(function(value) { %s })' % data
def NumberConverter_js_to_url(conv):
if conv.fixed_digits:
return u'''\
var result = value.toString();
while (result.length < %s)
result = '0' + result;
return result;''' % conv.fixed_digits
return u'return value.toString();'
js_to_url_functions = {
NumberConverter: NumberConverter_js_to_url
}
| apache-2.0 |
kidaa/aurora | build-support/jenkins/review_feedback.py | 5 | 8383 | #!/usr/bin/env python2.7
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Runs a CI script when new diffs are posted on Review Board.
# TODO(wfarner): Also add support for pinging stale reviews.
from __future__ import print_function
import argparse
import base64
import json
import subprocess
import sys
import urllib
import urllib2
class ReviewBoard(object):
def __init__(self, host, user, password):
self._host = host
self.user = user
self._password = password
def api_url(self, api_path):
return 'https://%s/api/%s/' % (self._host, api_path)
def get_resource_data(self, href, args=None, accept='application/json', data=None):
href = '%s?%s' % (href, urllib.urlencode(args)) if args else href
print('Request: %s' % href)
request = urllib2.Request(href)
base64string = base64.encodestring('%s:%s' % (self.user, self._password)).replace('\n', '')
request.add_header('Authorization', 'Basic %s' % base64string)
request.add_header('Accept', accept)
result = urllib2.urlopen(request, data=data)
if result.getcode() / 100 != 2:
print('Non-ok response: %s\n%s' % (result.getcode(), result))
sys.exit(1)
return result.read()
def get_resource(self, href, args=None, data=None):
return json.loads(self.get_resource_data(href, args=args, data=data))
# Thrown when the patch from a review diff could not be applied.
class PatchApplyError(Exception):
pass
def _apply_patch(patch_data, clean_excludes):
subprocess.check_call(['git', 'clean', '-fdx'] + ['--exclude=%s' % e for e in clean_excludes])
subprocess.check_call(['git', 'reset', '--hard', 'origin/master'])
patch_file = 'diff.patch'
with open(patch_file, 'w') as f:
f.write(patch_data)
try:
subprocess.check_call(['git', 'apply', patch_file])
except subprocess.CalledProcessError:
raise PatchApplyError()
def _get_latest_diff_time(server, request):
diffs = server.get_resource(request['links']['diffs']['href'])['diffs']
return diffs[-1]['timestamp']
REPLY_REQUEST = '@ReviewBot retry'
def _get_latest_user_request(reviews):
reply_requests = [r for r in reviews if REPLY_REQUEST.lower() in r['body_top'].lower()]
if reply_requests:
return reply_requests[-1]['timestamp']
def _needs_reply(server, request):
print('Inspecting review %d: %s' % (request['id'], request['summary']))
reviews_response = server.get_resource(request['links']['reviews']['href'])
reviews = reviews_response['reviews']
# The default response limit is 25. When the responses are limited, a 'next' link will be
# included. When that happens, continue to walk until there are no reviews left.
while 'next' in reviews_response['links']:
print('Fetching next page of reviews.')
reviews_response = server.get_resource(reviews_response['links']['next']['href'])
reviews.extend(reviews_response['reviews'])
feedback_reviews = [r for r in reviews if r['links']['user']['title'] == server.user]
if feedback_reviews:
# Determine whether another round of feedback is necessary.
latest_feedback_time = feedback_reviews[-1]['timestamp']
latest_request = _get_latest_user_request(reviews)
latest_diff = _get_latest_diff_time(server, request)
print('Latest feedback was given at %s' % latest_feedback_time)
print('Latest build request from a user at %s' % latest_request)
print('Latest diff was posted at %s' % latest_diff)
return ((latest_request and (latest_request > latest_feedback_time))
or (latest_diff and (latest_diff > latest_feedback_time)))
return True
def _missing_tests(server, diff):
# Get files that were modified by the change, flag if test coverage appears lacking.
diff_files = server.get_resource(diff['links']['files']['href'])['files']
paths = [f['source_file'] for f in diff_files]
return (filter(lambda f: f.startswith('src/main/'), paths) and not
filter(lambda f: f.startswith('src/test/'), paths))
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--server', dest='server', help='Review Board server.', required=True)
parser.add_argument(
'--reviewboard-credentials-file',
type=argparse.FileType(),
help='Review Board credentials file, formatted as <user>\\n<password>',
required=True)
parser.add_argument(
'--repository',
help='Inspect reviews posted for this repository.',
required=True)
parser.add_argument(
'--command',
help='Build verification command.',
required=True)
parser.add_argument(
'--tail-lines',
type=int,
default=20,
help='Number of lines of command output to include in red build reviews.',
required=True)
parser.add_argument(
'--git-clean-exclude',
help='Patterns to pass to git-clean --exclude.',
nargs='*')
args = parser.parse_args()
credentials = args.reviewboard_credentials_file.readlines()
server = ReviewBoard(
host=args.server,
user=credentials[0].strip(),
password=credentials[1].strip())
# Find the numeric ID for the repository, required by other API calls.
repositories = server.get_resource(
server.api_url('repositories'),
args={'name': args.repository})['repositories']
if not repositories:
print('Failed to find repository %s' % args.repository)
sys.exit(1)
repository_id = repositories[0]['id']
# Fetch all in-flight reviews. (Note: this does not do pagination, required when > 200 results.)
requests = server.get_resource(server.api_url('review-requests'), args={
'repository': repository_id,
'status': 'pending'
})['review_requests']
print('Found %d review requests to inspect' % len(requests))
for request in requests:
if not _needs_reply(server, request):
continue
diffs = server.get_resource(request['links']['diffs']['href'])['diffs']
if not diffs:
continue
latest_diff = diffs[-1]
print('Applying diff %d' % latest_diff['id'])
patch_data = server.get_resource_data(
latest_diff['links']['self']['href'],
accept='text/x-patch')
sha = subprocess.check_output(['git', 'rev-parse', '--short', 'HEAD']).strip()
ship = False
try:
_apply_patch(patch_data, args.git_clean_exclude)
print('Running build command.')
build_output = 'build_output'
command = args.command
# Pipe to a file in case output is large, also tee the output to simplify
# debugging. Since we pipe the output, we must set pipefail to ensure
# a failing build command fails the bash pipeline.
result = subprocess.call([
'bash',
'-c',
'set -o pipefail; %s 2>&1 | tee %s' % (command, build_output)])
if result == 0:
review_text = 'Master (%s) is green with this patch.\n %s' % (sha, command)
if _missing_tests(server, latest_diff):
review_text = '%s\n\nHowever, it appears that it might lack test coverage.' % review_text
else:
ship = True
else:
build_tail = subprocess.check_output(['tail', '-n', str(args.tail_lines), build_output])
review_text = (
'Master (%s) is red with this patch.\n %s\n\n%s' % (sha, command, build_tail))
except PatchApplyError:
review_text = (
'This patch does not apply cleanly on master (%s), do you need to rebase?' % sha)
review_text = ('%s\n\nI will refresh this build result if you post a review containing "%s"'
% (review_text, REPLY_REQUEST))
print('Replying to review %d:\n%s' % (request['id'], review_text))
print(server.get_resource(
request['links']['reviews']['href'],
data=urllib.urlencode({
'body_top': review_text,
'public': 'true',
'ship_it': 'true' if ship else 'false'
})))
if __name__=="__main__":
main()
| apache-2.0 |
iMilind/grpc | src/python/src/grpc/_adapter/_links_test.py | 7 | 10736 | # Copyright 2015, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Test of the GRPC-backed ForeLink and RearLink."""
import threading
import unittest
from grpc._adapter import _proto_scenarios
from grpc._adapter import _test_links
from grpc._adapter import fore
from grpc._adapter import rear
from grpc.framework.base import interfaces
from grpc.framework.foundation import logging_pool
_IDENTITY = lambda x: x
_TIMEOUT = 2
class RoundTripTest(unittest.TestCase):
def setUp(self):
self.fore_link_pool = logging_pool.pool(80)
self.rear_link_pool = logging_pool.pool(80)
def tearDown(self):
self.rear_link_pool.shutdown(wait=True)
self.fore_link_pool.shutdown(wait=True)
def testZeroMessageRoundTrip(self):
test_operation_id = object()
test_method = 'test method'
test_fore_link = _test_links.ForeLink(None, None)
def rear_action(front_to_back_ticket, fore_link):
if front_to_back_ticket.kind in (
interfaces.FrontToBackTicket.Kind.COMPLETION,
interfaces.FrontToBackTicket.Kind.ENTIRE):
back_to_front_ticket = interfaces.BackToFrontTicket(
front_to_back_ticket.operation_id, 0,
interfaces.BackToFrontTicket.Kind.COMPLETION, None)
fore_link.accept_back_to_front_ticket(back_to_front_ticket)
test_rear_link = _test_links.RearLink(rear_action, None)
fore_link = fore.ForeLink(
self.fore_link_pool, {test_method: None}, {test_method: None}, None, ())
fore_link.join_rear_link(test_rear_link)
test_rear_link.join_fore_link(fore_link)
fore_link.start()
port = fore_link.port()
rear_link = rear.RearLink(
'localhost', port, self.rear_link_pool, {test_method: None},
{test_method: None}, False, None, None, None)
rear_link.join_fore_link(test_fore_link)
test_fore_link.join_rear_link(rear_link)
rear_link.start()
front_to_back_ticket = interfaces.FrontToBackTicket(
test_operation_id, 0, interfaces.FrontToBackTicket.Kind.ENTIRE,
test_method, interfaces.ServicedSubscription.Kind.FULL, None, None,
_TIMEOUT)
rear_link.accept_front_to_back_ticket(front_to_back_ticket)
with test_fore_link.condition:
while (not test_fore_link.tickets or
test_fore_link.tickets[-1].kind is
interfaces.BackToFrontTicket.Kind.CONTINUATION):
test_fore_link.condition.wait()
rear_link.stop()
fore_link.stop()
with test_fore_link.condition:
self.assertIs(
test_fore_link.tickets[-1].kind,
interfaces.BackToFrontTicket.Kind.COMPLETION)
def testEntireRoundTrip(self):
test_operation_id = object()
test_method = 'test method'
test_front_to_back_datum = b'\x07'
test_back_to_front_datum = b'\x08'
test_fore_link = _test_links.ForeLink(None, None)
rear_sequence_number = [0]
def rear_action(front_to_back_ticket, fore_link):
if front_to_back_ticket.payload is None:
payload = None
else:
payload = test_back_to_front_datum
terminal = front_to_back_ticket.kind in (
interfaces.FrontToBackTicket.Kind.COMPLETION,
interfaces.FrontToBackTicket.Kind.ENTIRE)
if payload is not None or terminal:
if terminal:
kind = interfaces.BackToFrontTicket.Kind.COMPLETION
else:
kind = interfaces.BackToFrontTicket.Kind.CONTINUATION
back_to_front_ticket = interfaces.BackToFrontTicket(
front_to_back_ticket.operation_id, rear_sequence_number[0], kind,
payload)
rear_sequence_number[0] += 1
fore_link.accept_back_to_front_ticket(back_to_front_ticket)
test_rear_link = _test_links.RearLink(rear_action, None)
fore_link = fore.ForeLink(
self.fore_link_pool, {test_method: _IDENTITY},
{test_method: _IDENTITY}, None, ())
fore_link.join_rear_link(test_rear_link)
test_rear_link.join_fore_link(fore_link)
fore_link.start()
port = fore_link.port()
rear_link = rear.RearLink(
'localhost', port, self.rear_link_pool, {test_method: _IDENTITY},
{test_method: _IDENTITY}, False, None, None, None)
rear_link.join_fore_link(test_fore_link)
test_fore_link.join_rear_link(rear_link)
rear_link.start()
front_to_back_ticket = interfaces.FrontToBackTicket(
test_operation_id, 0, interfaces.FrontToBackTicket.Kind.ENTIRE,
test_method, interfaces.ServicedSubscription.Kind.FULL, None,
test_front_to_back_datum, _TIMEOUT)
rear_link.accept_front_to_back_ticket(front_to_back_ticket)
with test_fore_link.condition:
while (not test_fore_link.tickets or
test_fore_link.tickets[-1].kind is not
interfaces.BackToFrontTicket.Kind.COMPLETION):
test_fore_link.condition.wait()
rear_link.stop()
fore_link.stop()
with test_rear_link.condition:
front_to_back_payloads = tuple(
ticket.payload for ticket in test_rear_link.tickets
if ticket.payload is not None)
with test_fore_link.condition:
back_to_front_payloads = tuple(
ticket.payload for ticket in test_fore_link.tickets
if ticket.payload is not None)
self.assertTupleEqual((test_front_to_back_datum,), front_to_back_payloads)
self.assertTupleEqual((test_back_to_front_datum,), back_to_front_payloads)
def _perform_scenario_test(self, scenario):
test_operation_id = object()
test_method = scenario.method()
test_fore_link = _test_links.ForeLink(None, None)
rear_lock = threading.Lock()
rear_sequence_number = [0]
def rear_action(front_to_back_ticket, fore_link):
with rear_lock:
if front_to_back_ticket.payload is not None:
response = scenario.response_for_request(front_to_back_ticket.payload)
else:
response = None
terminal = front_to_back_ticket.kind in (
interfaces.FrontToBackTicket.Kind.COMPLETION,
interfaces.FrontToBackTicket.Kind.ENTIRE)
if response is not None or terminal:
if terminal:
kind = interfaces.BackToFrontTicket.Kind.COMPLETION
else:
kind = interfaces.BackToFrontTicket.Kind.CONTINUATION
back_to_front_ticket = interfaces.BackToFrontTicket(
front_to_back_ticket.operation_id, rear_sequence_number[0], kind,
response)
rear_sequence_number[0] += 1
fore_link.accept_back_to_front_ticket(back_to_front_ticket)
test_rear_link = _test_links.RearLink(rear_action, None)
fore_link = fore.ForeLink(
self.fore_link_pool, {test_method: scenario.deserialize_request},
{test_method: scenario.serialize_response}, None, ())
fore_link.join_rear_link(test_rear_link)
test_rear_link.join_fore_link(fore_link)
fore_link.start()
port = fore_link.port()
rear_link = rear.RearLink(
'localhost', port, self.rear_link_pool,
{test_method: scenario.serialize_request},
{test_method: scenario.deserialize_response}, False, None, None, None)
rear_link.join_fore_link(test_fore_link)
test_fore_link.join_rear_link(rear_link)
rear_link.start()
commencement_ticket = interfaces.FrontToBackTicket(
test_operation_id, 0,
interfaces.FrontToBackTicket.Kind.COMMENCEMENT, test_method,
interfaces.ServicedSubscription.Kind.FULL, None, None,
_TIMEOUT)
fore_sequence_number = 1
rear_link.accept_front_to_back_ticket(commencement_ticket)
for request in scenario.requests():
continuation_ticket = interfaces.FrontToBackTicket(
test_operation_id, fore_sequence_number,
interfaces.FrontToBackTicket.Kind.CONTINUATION, None, None, None,
request, None)
fore_sequence_number += 1
rear_link.accept_front_to_back_ticket(continuation_ticket)
completion_ticket = interfaces.FrontToBackTicket(
test_operation_id, fore_sequence_number,
interfaces.FrontToBackTicket.Kind.COMPLETION, None, None, None, None,
None)
fore_sequence_number += 1
rear_link.accept_front_to_back_ticket(completion_ticket)
with test_fore_link.condition:
while (not test_fore_link.tickets or
test_fore_link.tickets[-1].kind is not
interfaces.BackToFrontTicket.Kind.COMPLETION):
test_fore_link.condition.wait()
rear_link.stop()
fore_link.stop()
with test_rear_link.condition:
requests = tuple(
ticket.payload for ticket in test_rear_link.tickets
if ticket.payload is not None)
with test_fore_link.condition:
responses = tuple(
ticket.payload for ticket in test_fore_link.tickets
if ticket.payload is not None)
self.assertTrue(scenario.verify_requests(requests))
self.assertTrue(scenario.verify_responses(responses))
def testEmptyScenario(self):
self._perform_scenario_test(_proto_scenarios.EmptyScenario())
def testBidirectionallyUnaryScenario(self):
self._perform_scenario_test(_proto_scenarios.BidirectionallyUnaryScenario())
def testBidirectionallyStreamingScenario(self):
self._perform_scenario_test(
_proto_scenarios.BidirectionallyStreamingScenario())
if __name__ == '__main__':
unittest.main()
| bsd-3-clause |
andresriancho/trunserver | trunserv/management/commands/trunserver.py | 2 | 3573 | from django.core.management.base import BaseCommand, CommandError
from django.contrib.staticfiles.handlers import StaticFilesHandler
from django.core.servers.basehttp import get_internal_wsgi_application
#from django.utils import autoreload
from trunserv import autoreload
from twisted.application import internet, service, app
from twisted.web import server, resource, wsgi, static
from twisted.python import threadpool, log
from twisted.internet import reactor
from optparse import make_option
import sys
import os
import re
naiveip_re = re.compile(r"""^(?:
(?P<addr>
(?P<ipv4>\d{1,3}(?:\.\d{1,3}){3}) | # IPv4 address
(?P<ipv6>\[[a-fA-F0-9:]+\]) | # IPv6 address
(?P<fqdn>[a-zA-Z0-9-]+(?:\.[a-zA-Z0-9-]+)*) # FQDN
):)?(?P<port>\d+)$""", re.X)
DEFAULT_PORT = "8000"
class Root(resource.Resource):
def __init__(self, wsgi_resource):
resource.Resource.__init__(self)
self.wsgi_resource = wsgi_resource
def getChild(self, path, request):
path0 = request.prepath.pop(0)
request.postpath.insert(0, path0)
return self.wsgi_resource
def wsgi_resource():
pool = threadpool.ThreadPool()
pool.start()
# Allow Ctrl-C to get you out cleanly:
reactor.addSystemEventTrigger('after', 'shutdown', pool.stop)
handler = StaticFilesHandler(get_internal_wsgi_application())
wsgi_resource = wsgi.WSGIResource(reactor, pool, handler)
return wsgi_resource
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--noreload', action='store_false', dest='use_reloader',
default=True, help='Do NOT use the auto-reloader.'),
)
help = "Starts a Twisted Web server for development."
args = '[optional port number, or ipaddr:port]'
# Validation is called explicitly each time the server is reloaded.
requires_model_validation = False
def handle(self, addrport='', *args, **options):
if not addrport:
self.addr = ''
self.port = DEFAULT_PORT
else:
m = re.match(naiveip_re, addrport)
if m is None:
raise CommandError('"%s" is not a valid port number '
'or address:port pair.' % addrport)
self.addr, _ipv4, _ipv6, _fqdn, self.port = m.groups()
if not self.port.isdigit():
raise CommandError("%r is not a valid port." % self.port)
if not self.addr:
self.addr = '127.0.0.1'
self.run(*args, **options)
def run(self, *args, **options):
use_reloader = options.get('use_reloader', True)
def _inner_run():
# Initialize logging
log.startLogging(sys.stdout)
# Setup Twisted application
application = service.Application('django')
wsgi_root = wsgi_resource()
root = Root(wsgi_root)
main_site = server.Site(root)
internet.TCPServer(int(self.port), main_site
).setServiceParent(application)
service.IService(application).startService()
app.startApplication(application, False)
reactor.addSystemEventTrigger('before', 'shutdown',
service.IService(application).stopService)
reactor.run()
if use_reloader:
try:
autoreload.main(_inner_run)
except TypeError:
# autoreload was in the middle of something
pass
else:
_inner_run()
| mit |
openstack/cinder | cinder/tests/unit/volume/drivers/netapp/dataontap/performance/test_perf_cmode.py | 2 | 19560 | # Copyright (c) 2016 Clinton Knight
# All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from unittest import mock
import ddt
from cinder.tests.unit import test
from cinder.tests.unit.volume.drivers.netapp.dataontap.performance \
import fakes as fake
from cinder.volume.drivers.netapp.dataontap.client import api as netapp_api
from cinder.volume.drivers.netapp.dataontap.performance import perf_base
from cinder.volume.drivers.netapp.dataontap.performance import perf_cmode
@ddt.ddt
class PerformanceCmodeLibraryTestCase(test.TestCase):
def setUp(self):
super(PerformanceCmodeLibraryTestCase, self).setUp()
with mock.patch.object(perf_cmode.PerformanceCmodeLibrary,
'_init_counter_info'):
self.zapi_client = mock.Mock()
self.perf_library = perf_cmode.PerformanceCmodeLibrary(
self.zapi_client)
self.perf_library.system_object_name = 'system'
self.perf_library.avg_processor_busy_base_counter_name = (
'cpu_elapsed_time1')
self._set_up_fake_pools()
def _set_up_fake_pools(self):
self.fake_volumes = {
'pool1': {
'netapp_aggregate': 'aggr1',
},
'pool2': {
'netapp_aggregate': 'aggr2',
},
'pool3': {
'netapp_aggregate': 'aggr2',
},
'pool4': {
'netapp_aggregate': ['aggr1', 'aggr2'],
}
}
self.fake_aggrs = set(['aggr1', 'aggr2', 'aggr3'])
self.fake_nodes = set(['node1', 'node2'])
self.fake_aggr_node_map = {
'aggr1': 'node1',
'aggr2': 'node2',
'aggr3': 'node2',
}
def test_init_counter_info_not_supported(self):
self.zapi_client.features.SYSTEM_METRICS = False
self.zapi_client.features.SYSTEM_CONSTITUENT_METRICS = False
mock_get_base_counter_name = self.mock_object(
self.perf_library, '_get_base_counter_name')
self.perf_library._init_counter_info()
self.assertIsNone(self.perf_library.system_object_name)
self.assertIsNone(
self.perf_library.avg_processor_busy_base_counter_name)
self.assertFalse(mock_get_base_counter_name.called)
@ddt.data({
'system_constituent': False,
'base_counter': 'cpu_elapsed_time1',
}, {
'system_constituent': True,
'base_counter': 'cpu_elapsed_time',
})
@ddt.unpack
def test_init_counter_info_api_error(self, system_constituent,
base_counter):
self.zapi_client.features.SYSTEM_METRICS = True
self.zapi_client.features.SYSTEM_CONSTITUENT_METRICS = (
system_constituent)
self.mock_object(self.perf_library,
'_get_base_counter_name',
side_effect=netapp_api.NaApiError)
self.perf_library._init_counter_info()
self.assertEqual(
base_counter,
self.perf_library.avg_processor_busy_base_counter_name)
def test_init_counter_info_system(self):
self.zapi_client.features.SYSTEM_METRICS = True
self.zapi_client.features.SYSTEM_CONSTITUENT_METRICS = False
mock_get_base_counter_name = self.mock_object(
self.perf_library, '_get_base_counter_name',
return_value='cpu_elapsed_time1')
self.perf_library._init_counter_info()
self.assertEqual('system', self.perf_library.system_object_name)
self.assertEqual(
'cpu_elapsed_time1',
self.perf_library.avg_processor_busy_base_counter_name)
mock_get_base_counter_name.assert_called_once_with(
'system', 'avg_processor_busy')
def test_init_counter_info_system_constituent(self):
self.zapi_client.features.SYSTEM_METRICS = False
self.zapi_client.features.SYSTEM_CONSTITUENT_METRICS = True
mock_get_base_counter_name = self.mock_object(
self.perf_library, '_get_base_counter_name',
return_value='cpu_elapsed_time')
self.perf_library._init_counter_info()
self.assertEqual('system:constituent',
self.perf_library.system_object_name)
self.assertEqual(
'cpu_elapsed_time',
self.perf_library.avg_processor_busy_base_counter_name)
mock_get_base_counter_name.assert_called_once_with(
'system:constituent', 'avg_processor_busy')
@test.testtools.skip("launchpad bug 1715915")
def test_update_performance_cache(self):
self.perf_library.performance_counters = {
'node1': list(range(11, 21)),
'node2': list(range(21, 31)),
}
mock_get_aggregates_for_pools = self.mock_object(
self.perf_library, '_get_aggregates_for_pools',
return_value=self.fake_aggrs)
mock_get_nodes_for_aggregates = self.mock_object(
self.perf_library, '_get_nodes_for_aggregates',
return_value=(self.fake_nodes, self.fake_aggr_node_map))
mock_get_node_utilization_counters = self.mock_object(
self.perf_library, '_get_node_utilization_counters',
side_effect=[21, 31])
mock_get_node_utilization = self.mock_object(
self.perf_library, '_get_node_utilization', side_effect=[25, 75])
self.perf_library.update_performance_cache(self.fake_volumes)
expected_performance_counters = {
'node1': list(range(12, 22)),
'node2': list(range(22, 32)),
}
self.assertEqual(expected_performance_counters,
self.perf_library.performance_counters)
expected_pool_utilization = {'pool1': 25, 'pool2': 75, 'pool3': 75,
'pool4': perf_base.DEFAULT_UTILIZATION}
self.assertEqual(expected_pool_utilization,
self.perf_library.pool_utilization)
mock_get_aggregates_for_pools.assert_called_once_with(
self.fake_volumes)
mock_get_nodes_for_aggregates.assert_called_once_with(self.fake_aggrs)
mock_get_node_utilization_counters.assert_has_calls([
mock.call('node1'), mock.call('node2')])
mock_get_node_utilization.assert_has_calls([
mock.call(12, 21, 'node1'), mock.call(22, 31, 'node2')])
@test.testtools.skip("launchpad bug #1715915")
def test_update_performance_cache_first_pass(self):
mock_get_aggregates_for_pools = self.mock_object(
self.perf_library, '_get_aggregates_for_pools',
return_value=self.fake_aggrs)
mock_get_nodes_for_aggregates = self.mock_object(
self.perf_library, '_get_nodes_for_aggregates',
return_value=(self.fake_nodes, self.fake_aggr_node_map))
mock_get_node_utilization_counters = self.mock_object(
self.perf_library, '_get_node_utilization_counters',
side_effect=[11, 21])
mock_get_node_utilization = self.mock_object(
self.perf_library, '_get_node_utilization', side_effect=[25, 75])
self.perf_library.update_performance_cache(self.fake_volumes)
expected_performance_counters = {'node1': [11], 'node2': [21]}
self.assertEqual(expected_performance_counters,
self.perf_library.performance_counters)
expected_pool_utilization = {
'pool1': perf_base.DEFAULT_UTILIZATION,
'pool2': perf_base.DEFAULT_UTILIZATION,
'pool3': perf_base.DEFAULT_UTILIZATION,
'pool4': perf_base.DEFAULT_UTILIZATION,
}
self.assertEqual(expected_pool_utilization,
self.perf_library.pool_utilization)
mock_get_aggregates_for_pools.assert_called_once_with(
self.fake_volumes)
mock_get_nodes_for_aggregates.assert_called_once_with(self.fake_aggrs)
mock_get_node_utilization_counters.assert_has_calls([
mock.call('node1'), mock.call('node2')])
self.assertFalse(mock_get_node_utilization.called)
def test_update_performance_cache_unknown_nodes(self):
self.perf_library.performance_counters = {
'node1': range(11, 21),
'node2': range(21, 31),
}
mock_get_aggregates_for_pools = self.mock_object(
self.perf_library, '_get_aggregates_for_pools',
return_value=self.fake_aggrs)
mock_get_nodes_for_aggregates = self.mock_object(
self.perf_library, '_get_nodes_for_aggregates',
return_value=(set(), {}))
mock_get_node_utilization_counters = self.mock_object(
self.perf_library, '_get_node_utilization_counters',
side_effect=[11, 21])
mock_get_node_utilization = self.mock_object(
self.perf_library, '_get_node_utilization', side_effect=[25, 75])
self.perf_library.update_performance_cache(self.fake_volumes)
expected_performance_counters = {
'node1': range(11, 21),
'node2': range(21, 31),
}
self.assertEqual(expected_performance_counters,
self.perf_library.performance_counters)
expected_pool_utilization = {
'pool1': perf_base.DEFAULT_UTILIZATION,
'pool2': perf_base.DEFAULT_UTILIZATION,
'pool3': perf_base.DEFAULT_UTILIZATION,
'pool4': perf_base.DEFAULT_UTILIZATION,
}
self.assertEqual(expected_pool_utilization,
self.perf_library.pool_utilization)
mock_get_aggregates_for_pools.assert_called_once_with(
self.fake_volumes)
mock_get_nodes_for_aggregates.assert_called_once_with(self.fake_aggrs)
self.assertFalse(mock_get_node_utilization_counters.called)
self.assertFalse(mock_get_node_utilization.called)
def test_update_performance_cache_counters_unavailable(self):
self.perf_library.performance_counters = {
'node1': range(11, 21),
'node2': range(21, 31),
}
mock_get_aggregates_for_pools = self.mock_object(
self.perf_library, '_get_aggregates_for_pools',
return_value=self.fake_aggrs)
mock_get_nodes_for_aggregates = self.mock_object(
self.perf_library, '_get_nodes_for_aggregates',
return_value=(self.fake_nodes, self.fake_aggr_node_map))
mock_get_node_utilization_counters = self.mock_object(
self.perf_library, '_get_node_utilization_counters',
side_effect=[None, None])
mock_get_node_utilization = self.mock_object(
self.perf_library, '_get_node_utilization', side_effect=[25, 75])
self.perf_library.update_performance_cache(self.fake_volumes)
expected_performance_counters = {
'node1': range(11, 21),
'node2': range(21, 31),
}
self.assertEqual(expected_performance_counters,
self.perf_library.performance_counters)
expected_pool_utilization = {
'pool1': perf_base.DEFAULT_UTILIZATION,
'pool2': perf_base.DEFAULT_UTILIZATION,
'pool3': perf_base.DEFAULT_UTILIZATION,
'pool4': perf_base.DEFAULT_UTILIZATION,
}
self.assertEqual(expected_pool_utilization,
self.perf_library.pool_utilization)
mock_get_aggregates_for_pools.assert_called_once_with(
self.fake_volumes)
mock_get_nodes_for_aggregates.assert_called_once_with(self.fake_aggrs)
mock_get_node_utilization_counters.assert_has_calls([
mock.call('node1'), mock.call('node2')],
any_order=True)
self.assertFalse(mock_get_node_utilization.called)
def test_update_performance_cache_not_supported(self):
self.zapi_client.features.SYSTEM_METRICS = False
self.zapi_client.features.SYSTEM_CONSTITUENT_METRICS = False
mock_get_aggregates_for_pools = self.mock_object(
self.perf_library, '_get_aggregates_for_pools')
self.perf_library.update_performance_cache(self.fake_volumes)
expected_performance_counters = {}
self.assertEqual(expected_performance_counters,
self.perf_library.performance_counters)
expected_pool_utilization = {}
self.assertEqual(expected_pool_utilization,
self.perf_library.pool_utilization)
self.assertFalse(mock_get_aggregates_for_pools.called)
@ddt.data({'pool': 'pool1', 'expected': 10.0},
{'pool': 'pool3', 'expected': perf_base.DEFAULT_UTILIZATION})
@ddt.unpack
def test_get_node_utilization_for_pool(self, pool, expected):
self.perf_library.pool_utilization = {'pool1': 10.0, 'pool2': 15.0}
result = self.perf_library.get_node_utilization_for_pool(pool)
self.assertAlmostEqual(expected, result)
def test__update_for_failover(self):
self.mock_object(self.perf_library, 'update_performance_cache')
mock_client = mock.Mock(name='FAKE_ZAPI_CLIENT')
self.perf_library._update_for_failover(mock_client, self.fake_volumes)
self.assertEqual(mock_client, self.perf_library.zapi_client)
self.perf_library.update_performance_cache.assert_called_once_with(
self.fake_volumes)
def test_get_aggregates_for_pools(self):
result = self.perf_library._get_aggregates_for_pools(self.fake_volumes)
expected_aggregate_names = set(['aggr1', 'aggr2'])
self.assertEqual(expected_aggregate_names, result)
def test_get_nodes_for_aggregates(self):
aggregate_names = ['aggr1', 'aggr2', 'aggr3']
aggregate_nodes = ['node1', 'node2', 'node2']
mock_get_node_for_aggregate = self.mock_object(
self.zapi_client, 'get_node_for_aggregate',
side_effect=aggregate_nodes)
result = self.perf_library._get_nodes_for_aggregates(aggregate_names)
self.assertEqual(2, len(result))
result_node_names, result_aggr_node_map = result
expected_node_names = set(['node1', 'node2'])
expected_aggr_node_map = dict(zip(aggregate_names, aggregate_nodes))
self.assertEqual(expected_node_names, result_node_names)
self.assertEqual(expected_aggr_node_map, result_aggr_node_map)
mock_get_node_for_aggregate.assert_has_calls([
mock.call('aggr1'), mock.call('aggr2'), mock.call('aggr3')])
def test_get_node_utilization_counters(self):
mock_get_node_utilization_system_counters = self.mock_object(
self.perf_library, '_get_node_utilization_system_counters',
return_value=['A', 'B', 'C'])
mock_get_node_utilization_wafl_counters = self.mock_object(
self.perf_library, '_get_node_utilization_wafl_counters',
return_value=['D', 'E', 'F'])
mock_get_node_utilization_processor_counters = self.mock_object(
self.perf_library, '_get_node_utilization_processor_counters',
return_value=['G', 'H', 'I'])
result = self.perf_library._get_node_utilization_counters(fake.NODE)
expected = ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I']
self.assertEqual(expected, result)
mock_get_node_utilization_system_counters.assert_called_once_with(
fake.NODE)
mock_get_node_utilization_wafl_counters.assert_called_once_with(
fake.NODE)
mock_get_node_utilization_processor_counters.assert_called_once_with(
fake.NODE)
def test_get_node_utilization_counters_api_error(self):
self.mock_object(self.perf_library,
'_get_node_utilization_system_counters',
side_effect=netapp_api.NaApiError)
result = self.perf_library._get_node_utilization_counters(fake.NODE)
self.assertIsNone(result)
def test_get_node_utilization_system_counters(self):
mock_get_performance_instance_uuids = self.mock_object(
self.zapi_client, 'get_performance_instance_uuids',
return_value=fake.SYSTEM_INSTANCE_UUIDS)
mock_get_performance_counters = self.mock_object(
self.zapi_client, 'get_performance_counters',
return_value=fake.SYSTEM_COUNTERS)
result = self.perf_library._get_node_utilization_system_counters(
fake.NODE)
self.assertEqual(fake.SYSTEM_COUNTERS, result)
mock_get_performance_instance_uuids.assert_called_once_with(
'system', fake.NODE)
mock_get_performance_counters.assert_called_once_with(
'system', fake.SYSTEM_INSTANCE_UUIDS,
['avg_processor_busy', 'cpu_elapsed_time1', 'cpu_elapsed_time'])
def test_get_node_utilization_wafl_counters(self):
mock_get_performance_instance_uuids = self.mock_object(
self.zapi_client, 'get_performance_instance_uuids',
return_value=fake.WAFL_INSTANCE_UUIDS)
mock_get_performance_counters = self.mock_object(
self.zapi_client, 'get_performance_counters',
return_value=fake.WAFL_COUNTERS)
mock_get_performance_counter_info = self.mock_object(
self.zapi_client, 'get_performance_counter_info',
return_value=fake.WAFL_CP_PHASE_TIMES_COUNTER_INFO)
result = self.perf_library._get_node_utilization_wafl_counters(
fake.NODE)
self.assertEqual(fake.EXPANDED_WAFL_COUNTERS, result)
mock_get_performance_instance_uuids.assert_called_once_with(
'wafl', fake.NODE)
mock_get_performance_counters.assert_called_once_with(
'wafl', fake.WAFL_INSTANCE_UUIDS,
['total_cp_msecs', 'cp_phase_times'])
mock_get_performance_counter_info.assert_called_once_with(
'wafl', 'cp_phase_times')
def test_get_node_utilization_processor_counters(self):
mock_get_performance_instance_uuids = self.mock_object(
self.zapi_client, 'get_performance_instance_uuids',
return_value=fake.PROCESSOR_INSTANCE_UUIDS)
mock_get_performance_counters = self.mock_object(
self.zapi_client, 'get_performance_counters',
return_value=fake.PROCESSOR_COUNTERS)
self.mock_object(
self.zapi_client, 'get_performance_counter_info',
return_value=fake.PROCESSOR_DOMAIN_BUSY_COUNTER_INFO)
result = self.perf_library._get_node_utilization_processor_counters(
fake.NODE)
self.assertEqual(fake.EXPANDED_PROCESSOR_COUNTERS, result)
mock_get_performance_instance_uuids.assert_called_once_with(
'processor', fake.NODE)
mock_get_performance_counters.assert_called_once_with(
'processor', fake.PROCESSOR_INSTANCE_UUIDS,
['domain_busy', 'processor_elapsed_time'])
| apache-2.0 |
Nepherhotep/django | tests/null_fk_ordering/models.py | 210 | 1605 | """
Regression tests for proper working of ForeignKey(null=True). Tests these bugs:
* #7512: including a nullable foreign key reference in Meta ordering has un
xpected results
"""
from __future__ import unicode_literals
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
# The first two models represent a very simple null FK ordering case.
class Author(models.Model):
name = models.CharField(max_length=150)
@python_2_unicode_compatible
class Article(models.Model):
title = models.CharField(max_length=150)
author = models.ForeignKey(Author, models.SET_NULL, null=True)
def __str__(self):
return 'Article titled: %s' % (self.title, )
class Meta:
ordering = ['author__name', ]
# These following 4 models represent a far more complex ordering case.
class SystemInfo(models.Model):
system_name = models.CharField(max_length=32)
class Forum(models.Model):
system_info = models.ForeignKey(SystemInfo, models.CASCADE)
forum_name = models.CharField(max_length=32)
@python_2_unicode_compatible
class Post(models.Model):
forum = models.ForeignKey(Forum, models.SET_NULL, null=True)
title = models.CharField(max_length=32)
def __str__(self):
return self.title
@python_2_unicode_compatible
class Comment(models.Model):
post = models.ForeignKey(Post, models.SET_NULL, null=True)
comment_text = models.CharField(max_length=250)
class Meta:
ordering = ['post__forum__system_info__system_name', 'comment_text']
def __str__(self):
return self.comment_text
| bsd-3-clause |
nfletton/django-oscar | tests/unit/shipping/method_tests.py | 44 | 2514 | from decimal import Decimal as D
from django.test import TestCase
import mock
from oscar.apps.shipping import methods
from oscar.apps.basket.models import Basket
class TestFreeShipppingForEmptyBasket(TestCase):
def setUp(self):
self.method = methods.Free()
self.basket = Basket()
self.charge = self.method.calculate(self.basket)
def test_is_free(self):
self.assertEqual(D('0.00'), self.charge.incl_tax)
self.assertEqual(D('0.00'), self.charge.excl_tax)
def test_has_tax_known(self):
self.assertTrue(self.charge.is_tax_known)
def test_has_same_currency_as_basket(self):
self.assertEqual(self.basket.currency, self.charge.currency)
class TestFreeShipppingForNonEmptyBasket(TestCase):
def setUp(self):
self.method = methods.Free()
self.basket = mock.Mock()
self.basket.num_items = 1
self.charge = self.method.calculate(self.basket)
def test_is_free(self):
self.assertEqual(D('0.00'), self.charge.incl_tax)
self.assertEqual(D('0.00'), self.charge.excl_tax)
class TestNoShippingRequired(TestCase):
def setUp(self):
self.method = methods.NoShippingRequired()
basket = Basket()
self.charge = self.method.calculate(basket)
def test_is_free_for_empty_basket(self):
self.assertEqual(D('0.00'), self.charge.incl_tax)
self.assertEqual(D('0.00'), self.charge.excl_tax)
def test_has_a_different_code_to_free(self):
self.assertTrue(methods.NoShippingRequired.code !=
methods.Free.code)
class TestFixedPriceShippingWithoutTax(TestCase):
def setUp(self):
self.method = methods.FixedPrice(D('10.00'))
basket = Basket()
self.charge = self.method.calculate(basket)
def test_has_correct_charge(self):
self.assertEqual(D('10.00'), self.charge.excl_tax)
def test_does_not_include_tax(self):
self.assertFalse(self.charge.is_tax_known)
class TestFixedPriceShippingWithTax(TestCase):
def setUp(self):
self.method = methods.FixedPrice(
charge_excl_tax=D('10.00'),
charge_incl_tax=D('12.00'))
basket = Basket()
self.charge = self.method.calculate(basket)
def test_has_correct_charge(self):
self.assertEqual(D('10.00'), self.charge.excl_tax)
self.assertEqual(D('12.00'), self.charge.incl_tax)
def test_does_include_tax(self):
self.assertTrue(self.charge.is_tax_known)
| bsd-3-clause |
kayhayen/Nuitka | tests/benchmarks/constructs/OperationIntegerAdd.py | 1 | 1297 | # Copyright 2021, Kay Hayen, mailto:[email protected]
#
# Python test originally created or extracted from other peoples work. The
# parts from me are licensed as below. It is at least Free Software where
# it's copied from other people. In these cases, that will normally be
# indicated.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
module_value1 = 5000
module_value2 = 3000
def calledRepeatedly():
# Force frame and eliminate forward propagation (currently).
module_value1
local_value = module_value1
s = module_value1
t = module_value2
# construct_begin
t = s + t
# construct_end
return s, t, local_value
import itertools
for x in itertools.repeat(None, 50000):
calledRepeatedly()
print("OK.")
| apache-2.0 |
Saint-Joe/weewx | bin/weewx/drivers/ws1.py | 4 | 9124 | #!/usr/bin/env python
#
# Copyright 2014 Matthew Wall
# See the file LICENSE.txt for your rights.
"""Driver for ADS WS1 weather stations.
Thanks to Steve (sesykes71) for the testing that made this driver possible.
Thanks to Jay Nugent (WB8TKL) and KRK6 for weather-2.kr6k-V2.1
http://server1.nuge.com/~weather/
"""
from __future__ import with_statement
import serial
import syslog
import time
import weewx.drivers
DRIVER_NAME = 'WS1'
DRIVER_VERSION = '0.19'
def loader(config_dict, _):
return WS1Driver(**config_dict[DRIVER_NAME])
def confeditor_loader():
return WS1ConfEditor()
INHG_PER_MBAR = 0.0295333727
METER_PER_FOOT = 0.3048
MILE_PER_KM = 0.621371
DEFAULT_PORT = '/dev/ttyS0'
DEBUG_READ = 0
def logmsg(level, msg):
syslog.syslog(level, 'ws1: %s' % msg)
def logdbg(msg):
logmsg(syslog.LOG_DEBUG, msg)
def loginf(msg):
logmsg(syslog.LOG_INFO, msg)
def logerr(msg):
logmsg(syslog.LOG_ERR, msg)
class WS1Driver(weewx.drivers.AbstractDevice):
"""weewx driver that communicates with an ADS-WS1 station
port - serial port
[Required. Default is /dev/ttyS0]
max_tries - how often to retry serial communication before giving up
[Optional. Default is 5]
retry_wait - how long to wait, in seconds, before retrying after a failure
[Optional. Default is 10]
"""
def __init__(self, **stn_dict):
self.port = stn_dict.get('port', DEFAULT_PORT)
self.max_tries = int(stn_dict.get('max_tries', 5))
self.retry_wait = int(stn_dict.get('retry_wait', 10))
self.last_rain = None
loginf('driver version is %s' % DRIVER_VERSION)
loginf('using serial port %s' % self.port)
global DEBUG_READ
DEBUG_READ = int(stn_dict.get('debug_read', DEBUG_READ))
self.station = Station(self.port)
self.station.open()
def closePort(self):
if self.station is not None:
self.station.close()
self.station = None
@property
def hardware_name(self):
return "WS1"
def genLoopPackets(self):
while True:
packet = {'dateTime': int(time.time() + 0.5),
'usUnits': weewx.US}
readings = self.station.get_readings_with_retry(self.max_tries,
self.retry_wait)
data = Station.parse_readings(readings)
packet.update(data)
self._augment_packet(packet)
yield packet
def _augment_packet(self, packet):
# calculate the rain delta from rain total
if self.last_rain is not None:
packet['rain'] = packet['long_term_rain'] - self.last_rain
else:
packet['rain'] = None
self.last_rain = packet['long_term_rain']
# no wind direction when wind speed is zero
if 'windSpeed' in packet and not packet['windSpeed']:
packet['windDir'] = None
class Station(object):
def __init__(self, port):
self.port = port
self.baudrate = 2400
self.timeout = 3
self.serial_port = None
def __enter__(self):
self.open()
return self
def __exit__(self, _, value, traceback):
self.close()
def open(self):
logdbg("open serial port %s" % self.port)
self.serial_port = serial.Serial(self.port, self.baudrate,
timeout=self.timeout)
def close(self):
if self.serial_port is not None:
logdbg("close serial port %s" % self.port)
self.serial_port.close()
self.serial_port = None
# FIXME: use either CR or LF as line terminator. apparently some ws1
# hardware occasionally ends a line with only CR instead of the standard
# CR-LF, resulting in a line that is too long.
def get_readings(self):
buf = self.serial_port.readline()
if DEBUG_READ:
logdbg("bytes: '%s'" % ' '.join(["%0.2X" % ord(c) for c in buf]))
buf = buf.strip()
return buf
def get_readings_with_retry(self, max_tries=5, retry_wait=10):
for ntries in range(0, max_tries):
try:
buf = self.get_readings()
Station.validate_string(buf)
return buf
except (serial.serialutil.SerialException, weewx.WeeWxIOError), e:
loginf("Failed attempt %d of %d to get readings: %s" %
(ntries + 1, max_tries, e))
time.sleep(retry_wait)
else:
msg = "Max retries (%d) exceeded for readings" % max_tries
logerr(msg)
raise weewx.RetriesExceeded(msg)
@staticmethod
def validate_string(buf):
if len(buf) != 50:
raise weewx.WeeWxIOError("Unexpected buffer length %d" % len(buf))
if buf[0:2] != '!!':
raise weewx.WeeWxIOError("Unexpected header bytes '%s'" % buf[0:2])
return buf
@staticmethod
def parse_readings(raw):
"""WS1 station emits data in PeetBros format:
http://www.peetbros.com/shop/custom.aspx?recid=29
Each line has 50 characters - 2 header bytes and 48 data bytes:
!!000000BE02EB000027700000023A023A0025005800000000
SSSSXXDDTTTTLLLLPPPPttttHHHHhhhhddddmmmmRRRRWWWW
SSSS - wind speed (0.1 kph)
XX - wind direction calibration
DD - wind direction (0-255)
TTTT - outdoor temperature (0.1 F)
LLLL - long term rain (0.01 in)
PPPP - pressure (0.1 mbar)
tttt - indoor temperature (0.1 F)
HHHH - outdoor humidity (0.1 %)
hhhh - indoor humidity (0.1 %)
dddd - date (day of year)
mmmm - time (minute of day)
RRRR - daily rain (0.01 in)
WWWW - one minute wind average (0.1 kph)
"""
# FIXME: peetbros could be 40 bytes or 44 bytes, what about ws1?
# FIXME: peetbros uses two's complement for temp, what about ws1?
# FIXME: for ws1 is the pressure reading 'pressure' or 'barometer'?
buf = raw[2:]
data = dict()
data['windSpeed'] = Station._decode(buf[0:4], 0.1 * MILE_PER_KM) # mph
data['windDir'] = Station._decode(buf[6:8], 1.411764) # compass deg
data['outTemp'] = Station._decode(buf[8:12], 0.1) # degree_F
data['long_term_rain'] = Station._decode(buf[12:16], 0.01) # inch
data['pressure'] = Station._decode(buf[16:20], 0.1 * INHG_PER_MBAR) # inHg
data['inTemp'] = Station._decode(buf[20:24], 0.1) # degree_F
data['outHumidity'] = Station._decode(buf[24:28], 0.1) # percent
data['inHumidity'] = Station._decode(buf[28:32], 0.1) # percent
data['day_of_year'] = Station._decode(buf[32:36])
data['minute_of_day'] = Station._decode(buf[36:40])
data['daily_rain'] = Station._decode(buf[40:44], 0.01) # inch
data['wind_average'] = Station._decode(buf[44:48], 0.1 * MILE_PER_KM) # mph
return data
@staticmethod
def _decode(s, multiplier=None, neg=False):
v = None
try:
v = int(s, 16)
if neg:
bits = 4 * len(s)
if v & (1 << (bits - 1)) != 0:
v -= (1 << bits)
if multiplier is not None:
v *= multiplier
except ValueError, e:
if s != '----':
logdbg("decode failed for '%s': %s" % (s, e))
return v
class WS1ConfEditor(weewx.drivers.AbstractConfEditor):
@property
def default_stanza(self):
return """
[WS1]
# This section is for the ADS WS1 series of weather stations.
# Serial port such as /dev/ttyS0, /dev/ttyUSB0, or /dev/cuaU0
port = /dev/ttyUSB0
# The driver to use:
driver = weewx.drivers.ws1
"""
def prompt_for_settings(self):
print "Specify the serial port on which the station is connected, for"
print "example /dev/ttyUSB0 or /dev/ttyS0."
port = self._prompt('port', '/dev/ttyUSB0')
return {'port': port}
# define a main entry point for basic testing of the station without weewx
# engine and service overhead. invoke this as follows from the weewx root dir:
#
# PYTHONPATH=bin python bin/weewx/drivers/ws1.py
if __name__ == '__main__':
import optparse
usage = """%prog [options] [--help]"""
syslog.openlog('ws1', syslog.LOG_PID | syslog.LOG_CONS)
syslog.setlogmask(syslog.LOG_UPTO(syslog.LOG_DEBUG))
parser = optparse.OptionParser(usage=usage)
parser.add_option('--version', dest='version', action='store_true',
help='display driver version')
parser.add_option('--port', dest='port', metavar='PORT',
help='serial port to which the station is connected',
default=DEFAULT_PORT)
(options, args) = parser.parse_args()
if options.version:
print "ADS WS1 driver version %s" % DRIVER_VERSION
exit(0)
with Station(options.port) as s:
while True:
print time.time(), s.get_readings()
| gpl-3.0 |
codeworldprodigy/lab4 | lib/jinja2/jinja2/testsuite/__init__.py | 404 | 4641 | # -*- coding: utf-8 -*-
"""
jinja2.testsuite
~~~~~~~~~~~~~~~~
All the unittests of Jinja2. These tests can be executed by
either running run-tests.py using multiple Python versions at
the same time.
:copyright: (c) 2010 by the Jinja Team.
:license: BSD, see LICENSE for more details.
"""
import os
import re
import sys
import unittest
from traceback import format_exception
from jinja2 import loaders
from jinja2._compat import PY2
here = os.path.dirname(os.path.abspath(__file__))
dict_loader = loaders.DictLoader({
'justdict.html': 'FOO'
})
package_loader = loaders.PackageLoader('jinja2.testsuite.res', 'templates')
filesystem_loader = loaders.FileSystemLoader(here + '/res/templates')
function_loader = loaders.FunctionLoader({'justfunction.html': 'FOO'}.get)
choice_loader = loaders.ChoiceLoader([dict_loader, package_loader])
prefix_loader = loaders.PrefixLoader({
'a': filesystem_loader,
'b': dict_loader
})
class JinjaTestCase(unittest.TestCase):
### use only these methods for testing. If you need standard
### unittest method, wrap them!
def setup(self):
pass
def teardown(self):
pass
def setUp(self):
self.setup()
def tearDown(self):
self.teardown()
def assert_equal(self, a, b):
return self.assertEqual(a, b)
def assert_raises(self, *args, **kwargs):
return self.assertRaises(*args, **kwargs)
def assert_traceback_matches(self, callback, expected_tb):
try:
callback()
except Exception as e:
tb = format_exception(*sys.exc_info())
if re.search(expected_tb.strip(), ''.join(tb)) is None:
raise self.fail('Traceback did not match:\n\n%s\nexpected:\n%s'
% (''.join(tb), expected_tb))
else:
self.fail('Expected exception')
def find_all_tests(suite):
"""Yields all the tests and their names from a given suite."""
suites = [suite]
while suites:
s = suites.pop()
try:
suites.extend(s)
except TypeError:
yield s, '%s.%s.%s' % (
s.__class__.__module__,
s.__class__.__name__,
s._testMethodName
)
class BetterLoader(unittest.TestLoader):
"""A nicer loader that solves two problems. First of all we are setting
up tests from different sources and we're doing this programmatically
which breaks the default loading logic so this is required anyways.
Secondly this loader has a nicer interpolation for test names than the
default one so you can just do ``run-tests.py ViewTestCase`` and it
will work.
"""
def getRootSuite(self):
return suite()
def loadTestsFromName(self, name, module=None):
root = self.getRootSuite()
if name == 'suite':
return root
all_tests = []
for testcase, testname in find_all_tests(root):
if testname == name or \
testname.endswith('.' + name) or \
('.' + name + '.') in testname or \
testname.startswith(name + '.'):
all_tests.append(testcase)
if not all_tests:
raise LookupError('could not find test case for "%s"' % name)
if len(all_tests) == 1:
return all_tests[0]
rv = unittest.TestSuite()
for test in all_tests:
rv.addTest(test)
return rv
def suite():
from jinja2.testsuite import ext, filters, tests, core_tags, \
loader, inheritance, imports, lexnparse, security, api, \
regression, debug, utils, bytecode_cache, doctests
suite = unittest.TestSuite()
suite.addTest(ext.suite())
suite.addTest(filters.suite())
suite.addTest(tests.suite())
suite.addTest(core_tags.suite())
suite.addTest(loader.suite())
suite.addTest(inheritance.suite())
suite.addTest(imports.suite())
suite.addTest(lexnparse.suite())
suite.addTest(security.suite())
suite.addTest(api.suite())
suite.addTest(regression.suite())
suite.addTest(debug.suite())
suite.addTest(utils.suite())
suite.addTest(bytecode_cache.suite())
# doctests will not run on python 3 currently. Too many issues
# with that, do not test that on that platform.
if PY2:
suite.addTest(doctests.suite())
return suite
def main():
"""Runs the testsuite as command line application."""
try:
unittest.main(testLoader=BetterLoader(), defaultTest='suite')
except Exception as e:
print('Error: %s' % e)
| apache-2.0 |
masschallenge/impact-api | web/impact/impact/urls.py | 1 | 5272 | # MIT License
# Copyright (c) 2017 MassChallenge, Inc.
from django.apps import apps
from django.conf import settings
from django.conf.urls import (
include,
url,
)
from django.conf.urls.static import static
from django.contrib import admin
from django.views.decorators.csrf import csrf_exempt
from django.views.generic import TemplateView
from drf_auto_endpoint.router import router as schema_router
from impact.graphql.utils.custom_error_view import SafeGraphQLView
from rest_framework import routers
from rest_framework_jwt.views import (
obtain_jwt_token,
refresh_jwt_token,
verify_jwt_token,
)
from impact.graphql.middleware import IsAuthenticatedMiddleware
from impact.graphql.schema import (
auth_schema,
schema,
)
from impact.model_utils import model_name_to_snake
from impact.schema import schema_view
from impact.v0.urls import v0_urlpatterns
from impact.v1.urls import v1_urlpatterns
from impact.views import (
CalendarReminderView,
AlgoliaApiKeyView,
GeneralViewSet,
IndexView,
JWTCookieNameView,
)
from .views.general_view_set import MODELS_TO_EXCLUDE_FROM_URL_BINDING
accelerator_router = routers.DefaultRouter()
simpleuser_router = routers.DefaultRouter()
simpleuser_router.register('User', GeneralViewSet, base_name='User')
for model in apps.get_models('accelerator'):
if (model._meta.app_label == 'accelerator' and not
model._meta.auto_created and
model.__name__ not in MODELS_TO_EXCLUDE_FROM_URL_BINDING):
schema_router.register(
model, url=model_name_to_snake(model.__name__))
sso_urlpatterns = [
url(r'^api-token-auth/', obtain_jwt_token),
url(r'^api-token-refresh/', refresh_jwt_token),
url(r'^api-token-verify/', verify_jwt_token),
]
account_urlpatterns = [
url(r'^', include('registration.backends.simple.urls')),
]
urls = [
url(r'^api/sso/token_name/', JWTCookieNameView.as_view(),
name=JWTCookieNameView.view_name),
url(r'^api/algolia/api_key/$', AlgoliaApiKeyView.as_view(),
name=AlgoliaApiKeyView.view_name),
url(r'^api/calendar/reminder/$', CalendarReminderView.as_view(),
name=CalendarReminderView.view_name),
url(r'^api/v0/', include(v0_urlpatterns)),
url(r'^api/v1/', include(v1_urlpatterns)),
url(r'^api/(?P<app>\w+)/(?P<model>[a-z_]+)/'
r'(?P<related_model>[a-z_]+)/$',
GeneralViewSet.as_view({'get': 'list', 'post': 'create'}),
name='related-object-list'),
url(r'^api/(?P<app>\w+)/(?P<model>[a-z_]+)/'
r'(?P<related_model>[a-z_]+)/'
r'(?P<pk>[0-9]+)/$',
GeneralViewSet.as_view({
'get': 'retrieve',
'put': 'update',
'patch': 'partial_update',
'delete': 'destroy'
}),
name='related-object-detail'),
url(r'^api/(?P<app>\w+)/(?P<model>[a-z_]+)/$',
GeneralViewSet.as_view({'get': 'list', 'post': 'create'}),
name='object-list'),
url(r'^api/(?P<app>\w+)/(?P<model>[a-z_]+)/(?P<pk>[0-9]+)/$',
GeneralViewSet.as_view({
'get': 'retrieve',
'put': 'update',
'patch': 'partial_update',
'delete': 'destroy'
}),
name='object-detail'),
url(r'^api/simpleuser/', include(simpleuser_router.urls)),
url(r'^api/accelerator/', include(schema_router.urls),
name='api-root'),
url(r'^sso/', include(sso_urlpatterns)),
url(r'^admin/', admin.site.urls),
url(r'^accounts/', include(account_urlpatterns)),
url(r'^graphql/$',
csrf_exempt(SafeGraphQLView.as_view(
graphiql=settings.DEBUG,
schema=schema,
middleware=[
IsAuthenticatedMiddleware])),
name="graphql"),
url(r'^graphql/auth/$',
csrf_exempt(SafeGraphQLView.as_view(
graphiql=settings.DEBUG,
schema=auth_schema)),
name="graphql-auth"),
url(r'^oauth/',
include('oauth2_provider.urls', namespace='oauth2_provider')),
url(r'^schema/$', schema_view, name='schema'),
url(r'^directory/(?:.*)$', TemplateView.as_view(
template_name='front-end.html'),
name="directory"),
url(r'^allocator/(?:.*)$', TemplateView.as_view(
template_name='front-end.html'),
name="allocator"),
url(r'^people/$', TemplateView.as_view(
template_name='front-end.html'),
name="entreprenuer_directory"),
url(r'^people/(.*)/$', TemplateView.as_view(
template_name='front-end.html'),
name="entreprenuer_profile"),
url(r'^startups/$', TemplateView.as_view(
template_name='front-end.html'),
name="startup_directory"),
url(r'^openid/', include('oidc_provider.urls', namespace='oidc_provider')),
url(r'^$', IndexView.as_view()),
]
# use staticfiles with waitress (not recommneded!)
# TODO: switch to a real static file handler
urls += (
static(settings.STATIC_URL, document_root=settings.STATIC_ROOT))
if settings.DEBUG:
# add debug toolbar
import debug_toolbar # pragma: no cover
urls += [ # pragma: no cover
url(r"^__debug__/", include(debug_toolbar.urls)),
# pragma: no cover
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
urlpatterns = urls
| mit |
Edraak/edx-platform | common/lib/capa/capa/safe_exec/safe_exec.py | 179 | 4876 | """Capa's specialized use of codejail.safe_exec."""
from codejail.safe_exec import safe_exec as codejail_safe_exec
from codejail.safe_exec import not_safe_exec as codejail_not_safe_exec
from codejail.safe_exec import json_safe, SafeExecException
from . import lazymod
from dogapi import dog_stats_api
import hashlib
# Establish the Python environment for Capa.
# Capa assumes float-friendly division always.
# The name "random" is a properly-seeded stand-in for the random module.
CODE_PROLOG = """\
from __future__ import division
import random as random_module
import sys
random = random_module.Random(%r)
random.Random = random_module.Random
sys.modules['random'] = random
"""
ASSUMED_IMPORTS = [
("numpy", "numpy"),
("math", "math"),
("scipy", "scipy"),
("calc", "calc"),
("eia", "eia"),
("chemcalc", "chem.chemcalc"),
("chemtools", "chem.chemtools"),
("miller", "chem.miller"),
("draganddrop", "verifiers.draganddrop"),
]
# We'll need the code from lazymod.py for use in safe_exec, so read it now.
lazymod_py_file = lazymod.__file__
if lazymod_py_file.endswith("c"):
lazymod_py_file = lazymod_py_file[:-1]
lazymod_py = open(lazymod_py_file).read()
LAZY_IMPORTS = [lazymod_py]
for name, modname in ASSUMED_IMPORTS:
LAZY_IMPORTS.append("{} = LazyModule('{}')\n".format(name, modname))
LAZY_IMPORTS = "".join(LAZY_IMPORTS)
def update_hash(hasher, obj):
"""
Update a `hashlib` hasher with a nested object.
To properly cache nested structures, we need to compute a hash from the
entire structure, canonicalizing at every level.
`hasher`'s `.update()` method is called a number of times, touching all of
`obj` in the process. Only primitive JSON-safe types are supported.
"""
hasher.update(str(type(obj)))
if isinstance(obj, (tuple, list)):
for e in obj:
update_hash(hasher, e)
elif isinstance(obj, dict):
for k in sorted(obj):
update_hash(hasher, k)
update_hash(hasher, obj[k])
else:
hasher.update(repr(obj))
@dog_stats_api.timed('capa.safe_exec.time')
def safe_exec(
code,
globals_dict,
random_seed=None,
python_path=None,
extra_files=None,
cache=None,
slug=None,
unsafely=False,
):
"""
Execute python code safely.
`code` is the Python code to execute. It has access to the globals in `globals_dict`,
and any changes it makes to those globals are visible in `globals_dict` when this
function returns.
`random_seed` will be used to see the `random` module available to the code.
`python_path` is a list of filenames or directories to add to the Python
path before execution. If the name is not in `extra_files`, then it will
also be copied into the sandbox.
`extra_files` is a list of (filename, contents) pairs. These files are
created in the sandbox.
`cache` is an object with .get(key) and .set(key, value) methods. It will be used
to cache the execution, taking into account the code, the values of the globals,
and the random seed.
`slug` is an arbitrary string, a description that's meaningful to the
caller, that will be used in log messages.
If `unsafely` is true, then the code will actually be executed without sandboxing.
"""
# Check the cache for a previous result.
if cache:
safe_globals = json_safe(globals_dict)
md5er = hashlib.md5()
md5er.update(repr(code))
update_hash(md5er, safe_globals)
key = "safe_exec.%r.%s" % (random_seed, md5er.hexdigest())
cached = cache.get(key)
if cached is not None:
# We have a cached result. The result is a pair: the exception
# message, if any, else None; and the resulting globals dictionary.
emsg, cleaned_results = cached
globals_dict.update(cleaned_results)
if emsg:
raise SafeExecException(emsg)
return
# Create the complete code we'll run.
code_prolog = CODE_PROLOG % random_seed
# Decide which code executor to use.
if unsafely:
exec_fn = codejail_not_safe_exec
else:
exec_fn = codejail_safe_exec
# Run the code! Results are side effects in globals_dict.
try:
exec_fn(
code_prolog + LAZY_IMPORTS + code, globals_dict,
python_path=python_path, extra_files=extra_files, slug=slug,
)
except SafeExecException as e:
emsg = e.message
else:
emsg = None
# Put the result back in the cache. This is complicated by the fact that
# the globals dict might not be entirely serializable.
if cache:
cleaned_results = json_safe(globals_dict)
cache.set(key, (emsg, cleaned_results))
# If an exception happened, raise it now.
if emsg:
raise e
| agpl-3.0 |
sean-/ansible | lib/ansible/playbook/attribute.py | 39 | 1130 | # (c) 2012-2014, Michael DeHaan <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
class Attribute:
def __init__(self, isa=None, private=False, default=None, required=False, listof=None):
self.isa = isa
self.private = private
self.default = default
self.required = required
self.listof = listof
class FieldAttribute(Attribute):
pass
| gpl-3.0 |
developmentseed/slingshotSMS | pygsm/gsmcodecs/gsm0338.py | 65 | 19653 | # vim: ai ts=4 sts=4 et sw=4 encoding=utf-8
""" Python Character Mapping Codec based on gsm0338 generated from './GSM0338.TXT' with gencodec.py.
With extra sauce to deal with the 'multibyte' extensions!
"""#"
import codecs
import re
### Codec APIs
#
# Shared funcs
#
def _encode(input,errors='strict'):
# split to see if we have any 'extended' characters
runs=unicode_splitter.split(input)
# now iterate through handling any 'multibyte' ourselves
out_str=list()
consumed=0
extended=extended_encode_map.keys()
for run in runs:
if len(run)==1 and run[0] in extended:
out_str.append(extended_indicator+extended_encode_map[run])
consumed+=1
else:
# pass it to the standard encoder
out,cons=codecs.charmap_encode(run,errors,encoding_table)
out_str.append(out)
consumed+=cons
return (''.join(out_str),consumed)
def _decode(input,errors='strict'):
# opposite of above, look for multibye 'marker'
# and handle it ourselves, pass the rest to the
# standard decoder
# split to see if we have any 'extended' characters
runs = str_splitter.split(input)
# now iterate through handling any 'multibyte' ourselves
out_uni = []
consumed = 0
for run in runs:
if len(run)==0:
# first char was a marker, but we don't care
# the marker itself will come up in the next run
continue
if len(run)==2 and run[0]==extended_indicator:
try:
out_uni.append(extended_decode_map[run[1]])
consumed += 2
continue
except KeyError:
# second char was not an extended, so
# let this pass through and the marker
# will be interpreted by the table as a NBSP
pass
# pass it to the standard encoder
out,cons=codecs.charmap_decode(run,errors,decoding_table)
out_uni.append(out)
consumed+=cons
return (u''.join(out_uni),consumed)
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return _encode(input,errors)
def decode(self,input,errors='strict'):
# strip any trailing '\x00's as the standard
# says trailing ones are _not_ @'s and
# are in fact blanks
if input[-1]=='\x00':
input=input[:-1]
return _decode(input,errors)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
# just use the standard encoding as there is no need
# to hold state
return _encode(input,self.errors)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
# a little trickier 'cause input _might_ come in
# split right on the extended char marker boundary
def __init__(self,errors='strict'):
codecs.IncrementalDecoder.__init__(self,errors)
self.last_saw_mark=False
def decode(self, input, final=False):
if final:
# check for final '\x00' which should not
# be interpreted as a '@'
if input[-1]=='\x00':
input=input[:-1]
# keep track of how many chars we've added or
# removed to the run to adjust the response from
# _decode
consumed_delta=0
# see if last char was a 2-byte mark
if self.last_saw_mark:
# add it back to the current run
input=extended_indicator+input
consumed_delta-=1 # 'cause we added a char
self.last_saw_mark=False # reset
if input[-1:]==extended_indicator and not final:
# chop it off
input=input[:-1]
consumed_delta+=1 # because we just consumed one char
self.last_saw_mark=True
# NOTE: if we are final and last mark is
# and extended indicator, it will be interpreted
# as NBSP
return _decode(input,self.errors)[0]
def reset(self):
self.last_saw_mark=False
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='gsm0338',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Tables
# gsm 'extended' character.
# gsm, annoyingly, is MOSTLY 7-bit chars
#
# BUT has 10 'extended' chars represented
# by 2-chars, an idicator, and then one of
# the 10
# first of the 2-chars is indicator
extended_indicator='\x1b'
# second char is the 'extended' character
extended_encode_map = { # Unicode->GSM string
u'\x0c':'\x0a', # FORM FEED
u'^':'\x14', # CIRCUMFLEX ACCENT
u'{':'\x28', # LEFT CURLY BRACKET
u'}':'\x29', # RIGHT CURLY BRACKET
u'\\':'\x2f', # REVERSE SOLIDUS
u'[':'\x3c', # LEFT SQUARE BRACKET
u'~':'\x3d', # TILDE
u']':'\x3e', # RIGHT SQUARE BRACKET
u'|':'\x40', # VERTICAL LINE
u'\u20ac':'\x65' # EURO SIGN
}
# reverse the map above for decoding
# GSM String->Unicode
uni,gsm=zip(*extended_encode_map.items())
extended_decode_map=dict(zip(gsm,uni))
# splitter
str_splitter=re.compile('(%(ind)s[^%(ind)s])' % { 'ind':extended_indicator })
unicode_splitter=re.compile(u'([%s])' % re.escape(''.join(extended_encode_map.keys())), re.UNICODE)
# the normal 1-char table
decoding_table = (
u'@' # 0x00 -> COMMERCIAL AT
u'\xa3' # 0x01 -> POUND SIGN
u'$' # 0x02 -> DOLLAR SIGN
u'\xa5' # 0x03 -> YEN SIGN
u'\xe8' # 0x04 -> LATIN SMALL LETTER E WITH GRAVE
u'\xe9' # 0x05 -> LATIN SMALL LETTER E WITH ACUTE
u'\xf9' # 0x06 -> LATIN SMALL LETTER U WITH GRAVE
u'\xec' # 0x07 -> LATIN SMALL LETTER I WITH GRAVE
u'\xf2' # 0x08 -> LATIN SMALL LETTER O WITH GRAVE
u'\xe7' # 0x09 -> LATIN SMALL LETTER C WITH CEDILLA
u'\n' # 0x0A -> LINE FEED
u'\xd8' # 0x0B -> LATIN CAPITAL LETTER O WITH STROKE
u'\xf8' # 0x0C -> LATIN SMALL LETTER O WITH STROKE
u'\r' # 0x0D -> CARRIAGE RETURN
u'\xc5' # 0x0E -> LATIN CAPITAL LETTER A WITH RING ABOVE
u'\xe5' # 0x0F -> LATIN SMALL LETTER A WITH RING ABOVE
u'\u0394' # 0x10 -> GREEK CAPITAL LETTER DELTA
u'_' # 0x11 -> LOW LINE
u'\u03a6' # 0x12 -> GREEK CAPITAL LETTER PHI
u'\u0393' # 0x13 -> GREEK CAPITAL LETTER GAMMA
u'\u039b' # 0x14 -> GREEK CAPITAL LETTER LAMDA
u'\u03a9' # 0x15 -> GREEK CAPITAL LETTER OMEGA
u'\u03a0' # 0x16 -> GREEK CAPITAL LETTER PI
u'\u03a8' # 0x17 -> GREEK CAPITAL LETTER PSI
u'\u03a3' # 0x18 -> GREEK CAPITAL LETTER SIGMA
u'\u0398' # 0x19 -> GREEK CAPITAL LETTER THETA
u'\u039e' # 0x1A -> GREEK CAPITAL LETTER XI
u'\xa0' # 0x1B -> ESCAPE TO EXTENSION TABLE (or displayed as NBSP, see note above)
u'\xc6' # 0x1C -> LATIN CAPITAL LETTER AE
u'\xe6' # 0x1D -> LATIN SMALL LETTER AE
u'\xdf' # 0x1E -> LATIN SMALL LETTER SHARP S (German)
u'\xc9' # 0x1F -> LATIN CAPITAL LETTER E WITH ACUTE
u' ' # 0x20 -> SPACE
u'!' # 0x21 -> EXCLAMATION MARK
u'"' # 0x22 -> QUOTATION MARK
u'#' # 0x23 -> NUMBER SIGN
u'\xa4' # 0x24 -> CURRENCY SIGN
u'%' # 0x25 -> PERCENT SIGN
u'&' # 0x26 -> AMPERSAND
u"'" # 0x27 -> APOSTROPHE
u'(' # 0x28 -> LEFT PARENTHESIS
u')' # 0x29 -> RIGHT PARENTHESIS
u'*' # 0x2A -> ASTERISK
u'+' # 0x2B -> PLUS SIGN
u',' # 0x2C -> COMMA
u'-' # 0x2D -> HYPHEN-MINUS
u'.' # 0x2E -> FULL STOP
u'/' # 0x2F -> SOLIDUS
u'0' # 0x30 -> DIGIT ZERO
u'1' # 0x31 -> DIGIT ONE
u'2' # 0x32 -> DIGIT TWO
u'3' # 0x33 -> DIGIT THREE
u'4' # 0x34 -> DIGIT FOUR
u'5' # 0x35 -> DIGIT FIVE
u'6' # 0x36 -> DIGIT SIX
u'7' # 0x37 -> DIGIT SEVEN
u'8' # 0x38 -> DIGIT EIGHT
u'9' # 0x39 -> DIGIT NINE
u':' # 0x3A -> COLON
u';' # 0x3B -> SEMICOLON
u'<' # 0x3C -> LESS-THAN SIGN
u'=' # 0x3D -> EQUALS SIGN
u'>' # 0x3E -> GREATER-THAN SIGN
u'?' # 0x3F -> QUESTION MARK
u'\xa1' # 0x40 -> INVERTED EXCLAMATION MARK
u'A' # 0x41 -> LATIN CAPITAL LETTER A
u'B' # 0x42 -> LATIN CAPITAL LETTER B
u'C' # 0x43 -> LATIN CAPITAL LETTER C
u'D' # 0x44 -> LATIN CAPITAL LETTER D
u'E' # 0x45 -> LATIN CAPITAL LETTER E
u'F' # 0x46 -> LATIN CAPITAL LETTER F
u'G' # 0x47 -> LATIN CAPITAL LETTER G
u'H' # 0x48 -> LATIN CAPITAL LETTER H
u'I' # 0x49 -> LATIN CAPITAL LETTER I
u'J' # 0x4A -> LATIN CAPITAL LETTER J
u'K' # 0x4B -> LATIN CAPITAL LETTER K
u'L' # 0x4C -> LATIN CAPITAL LETTER L
u'M' # 0x4D -> LATIN CAPITAL LETTER M
u'N' # 0x4E -> LATIN CAPITAL LETTER N
u'O' # 0x4F -> LATIN CAPITAL LETTER O
u'P' # 0x50 -> LATIN CAPITAL LETTER P
u'Q' # 0x51 -> LATIN CAPITAL LETTER Q
u'R' # 0x52 -> LATIN CAPITAL LETTER R
u'S' # 0x53 -> LATIN CAPITAL LETTER S
u'T' # 0x54 -> LATIN CAPITAL LETTER T
u'U' # 0x55 -> LATIN CAPITAL LETTER U
u'V' # 0x56 -> LATIN CAPITAL LETTER V
u'W' # 0x57 -> LATIN CAPITAL LETTER W
u'X' # 0x58 -> LATIN CAPITAL LETTER X
u'Y' # 0x59 -> LATIN CAPITAL LETTER Y
u'Z' # 0x5A -> LATIN CAPITAL LETTER Z
u'\xc4' # 0x5B -> LATIN CAPITAL LETTER A WITH DIAERESIS
u'\xd6' # 0x5C -> LATIN CAPITAL LETTER O WITH DIAERESIS
u'\xd1' # 0x5D -> LATIN CAPITAL LETTER N WITH TILDE
u'\xdc' # 0x5E -> LATIN CAPITAL LETTER U WITH DIAERESIS
u'\xa7' # 0x5F -> SECTION SIGN
u'\xbf' # 0x60 -> INVERTED QUESTION MARK
u'a' # 0x61 -> LATIN SMALL LETTER A
u'b' # 0x62 -> LATIN SMALL LETTER B
u'c' # 0x63 -> LATIN SMALL LETTER C
u'd' # 0x64 -> LATIN SMALL LETTER D
u'e' # 0x65 -> LATIN SMALL LETTER E
u'f' # 0x66 -> LATIN SMALL LETTER F
u'g' # 0x67 -> LATIN SMALL LETTER G
u'h' # 0x68 -> LATIN SMALL LETTER H
u'i' # 0x69 -> LATIN SMALL LETTER I
u'j' # 0x6A -> LATIN SMALL LETTER J
u'k' # 0x6B -> LATIN SMALL LETTER K
u'l' # 0x6C -> LATIN SMALL LETTER L
u'm' # 0x6D -> LATIN SMALL LETTER M
u'n' # 0x6E -> LATIN SMALL LETTER N
u'o' # 0x6F -> LATIN SMALL LETTER O
u'p' # 0x70 -> LATIN SMALL LETTER P
u'q' # 0x71 -> LATIN SMALL LETTER Q
u'r' # 0x72 -> LATIN SMALL LETTER R
u's' # 0x73 -> LATIN SMALL LETTER S
u't' # 0x74 -> LATIN SMALL LETTER T
u'u' # 0x75 -> LATIN SMALL LETTER U
u'v' # 0x76 -> LATIN SMALL LETTER V
u'w' # 0x77 -> LATIN SMALL LETTER W
u'x' # 0x78 -> LATIN SMALL LETTER X
u'y' # 0x79 -> LATIN SMALL LETTER Y
u'z' # 0x7A -> LATIN SMALL LETTER Z
u'\xe4' # 0x7B -> LATIN SMALL LETTER A WITH DIAERESIS
u'\xf6' # 0x7C -> LATIN SMALL LETTER O WITH DIAERESIS
u'\xf1' # 0x7D -> LATIN SMALL LETTER N WITH TILDE
u'\xfc' # 0x7E -> LATIN SMALL LETTER U WITH DIAERESIS
u'\xe0' # 0x7F -> LATIN SMALL LETTER A WITH GRAVE
u'\ufffe' # 0x80 -> UNDEFINED
u'\ufffe' # 0x81 -> UNDEFINED
u'\ufffe' # 0x82 -> UNDEFINED
u'\ufffe' # 0x83 -> UNDEFINED
u'\ufffe' # 0x84 -> UNDEFINED
u'\ufffe' # 0x85 -> UNDEFINED
u'\ufffe' # 0x86 -> UNDEFINED
u'\ufffe' # 0x87 -> UNDEFINED
u'\ufffe' # 0x88 -> UNDEFINED
u'\ufffe' # 0x89 -> UNDEFINED
u'\ufffe' # 0x8A -> UNDEFINED
u'\ufffe' # 0x8B -> UNDEFINED
u'\ufffe' # 0x8C -> UNDEFINED
u'\ufffe' # 0x8D -> UNDEFINED
u'\ufffe' # 0x8E -> UNDEFINED
u'\ufffe' # 0x8F -> UNDEFINED
u'\ufffe' # 0x90 -> UNDEFINED
u'\ufffe' # 0x91 -> UNDEFINED
u'\ufffe' # 0x92 -> UNDEFINED
u'\ufffe' # 0x93 -> UNDEFINED
u'\ufffe' # 0x94 -> UNDEFINED
u'\ufffe' # 0x95 -> UNDEFINED
u'\ufffe' # 0x96 -> UNDEFINED
u'\ufffe' # 0x97 -> UNDEFINED
u'\ufffe' # 0x98 -> UNDEFINED
u'\ufffe' # 0x99 -> UNDEFINED
u'\ufffe' # 0x9A -> UNDEFINED
u'\ufffe' # 0x9B -> UNDEFINED
u'\ufffe' # 0x9C -> UNDEFINED
u'\ufffe' # 0x9D -> UNDEFINED
u'\ufffe' # 0x9E -> UNDEFINED
u'\ufffe' # 0x9F -> UNDEFINED
u'\ufffe' # 0xA0 -> UNDEFINED
u'\ufffe' # 0xA1 -> UNDEFINED
u'\ufffe' # 0xA2 -> UNDEFINED
u'\ufffe' # 0xA3 -> UNDEFINED
u'\ufffe' # 0xA4 -> UNDEFINED
u'\ufffe' # 0xA5 -> UNDEFINED
u'\ufffe' # 0xA6 -> UNDEFINED
u'\ufffe' # 0xA7 -> UNDEFINED
u'\ufffe' # 0xA8 -> UNDEFINED
u'\ufffe' # 0xA9 -> UNDEFINED
u'\ufffe' # 0xAA -> UNDEFINED
u'\ufffe' # 0xAB -> UNDEFINED
u'\ufffe' # 0xAC -> UNDEFINED
u'\ufffe' # 0xAD -> UNDEFINED
u'\ufffe' # 0xAE -> UNDEFINED
u'\ufffe' # 0xAF -> UNDEFINED
u'\ufffe' # 0xB0 -> UNDEFINED
u'\ufffe' # 0xB1 -> UNDEFINED
u'\ufffe' # 0xB2 -> UNDEFINED
u'\ufffe' # 0xB3 -> UNDEFINED
u'\ufffe' # 0xB4 -> UNDEFINED
u'\ufffe' # 0xB5 -> UNDEFINED
u'\ufffe' # 0xB6 -> UNDEFINED
u'\ufffe' # 0xB7 -> UNDEFINED
u'\ufffe' # 0xB8 -> UNDEFINED
u'\ufffe' # 0xB9 -> UNDEFINED
u'\ufffe' # 0xBA -> UNDEFINED
u'\ufffe' # 0xBB -> UNDEFINED
u'\ufffe' # 0xBC -> UNDEFINED
u'\ufffe' # 0xBD -> UNDEFINED
u'\ufffe' # 0xBE -> UNDEFINED
u'\ufffe' # 0xBF -> UNDEFINED
u'\ufffe' # 0xC0 -> UNDEFINED
u'\ufffe' # 0xC1 -> UNDEFINED
u'\ufffe' # 0xC2 -> UNDEFINED
u'\ufffe' # 0xC3 -> UNDEFINED
u'\ufffe' # 0xC4 -> UNDEFINED
u'\ufffe' # 0xC5 -> UNDEFINED
u'\ufffe' # 0xC6 -> UNDEFINED
u'\ufffe' # 0xC7 -> UNDEFINED
u'\ufffe' # 0xC8 -> UNDEFINED
u'\ufffe' # 0xC9 -> UNDEFINED
u'\ufffe' # 0xCA -> UNDEFINED
u'\ufffe' # 0xCB -> UNDEFINED
u'\ufffe' # 0xCC -> UNDEFINED
u'\ufffe' # 0xCD -> UNDEFINED
u'\ufffe' # 0xCE -> UNDEFINED
u'\ufffe' # 0xCF -> UNDEFINED
u'\ufffe' # 0xD0 -> UNDEFINED
u'\ufffe' # 0xD1 -> UNDEFINED
u'\ufffe' # 0xD2 -> UNDEFINED
u'\ufffe' # 0xD3 -> UNDEFINED
u'\ufffe' # 0xD4 -> UNDEFINED
u'\ufffe' # 0xD5 -> UNDEFINED
u'\ufffe' # 0xD6 -> UNDEFINED
u'\ufffe' # 0xD7 -> UNDEFINED
u'\ufffe' # 0xD8 -> UNDEFINED
u'\ufffe' # 0xD9 -> UNDEFINED
u'\ufffe' # 0xDA -> UNDEFINED
u'\ufffe' # 0xDB -> UNDEFINED
u'\ufffe' # 0xDC -> UNDEFINED
u'\ufffe' # 0xDD -> UNDEFINED
u'\ufffe' # 0xDE -> UNDEFINED
u'\ufffe' # 0xDF -> UNDEFINED
u'\ufffe' # 0xE0 -> UNDEFINED
u'\ufffe' # 0xE1 -> UNDEFINED
u'\ufffe' # 0xE2 -> UNDEFINED
u'\ufffe' # 0xE3 -> UNDEFINED
u'\ufffe' # 0xE4 -> UNDEFINED
u'\ufffe' # 0xE5 -> UNDEFINED
u'\ufffe' # 0xE6 -> UNDEFINED
u'\ufffe' # 0xE7 -> UNDEFINED
u'\ufffe' # 0xE8 -> UNDEFINED
u'\ufffe' # 0xE9 -> UNDEFINED
u'\ufffe' # 0xEA -> UNDEFINED
u'\ufffe' # 0xEB -> UNDEFINED
u'\ufffe' # 0xEC -> UNDEFINED
u'\ufffe' # 0xED -> UNDEFINED
u'\ufffe' # 0xEE -> UNDEFINED
u'\ufffe' # 0xEF -> UNDEFINED
u'\ufffe' # 0xF0 -> UNDEFINED
u'\ufffe' # 0xF1 -> UNDEFINED
u'\ufffe' # 0xF2 -> UNDEFINED
u'\ufffe' # 0xF3 -> UNDEFINED
u'\ufffe' # 0xF4 -> UNDEFINED
u'\ufffe' # 0xF5 -> UNDEFINED
u'\ufffe' # 0xF6 -> UNDEFINED
u'\ufffe' # 0xF7 -> UNDEFINED
u'\ufffe' # 0xF8 -> UNDEFINED
u'\ufffe' # 0xF9 -> UNDEFINED
u'\ufffe' # 0xFA -> UNDEFINED
u'\ufffe' # 0xFB -> UNDEFINED
u'\ufffe' # 0xFC -> UNDEFINED
u'\ufffe' # 0xFD -> UNDEFINED
u'\ufffe' # 0xFE -> UNDEFINED
u'\ufffe' # 0xFF -> UNDEFINED
)
encoding_table=codecs.charmap_build(decoding_table)
if __name__ == "__main__":
"""
Run this as a script for poor-man's unit tests
"""
isoLatin15_alpha=u" !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJLKMNOPQRSTUVWXYZ[\\]^-`abcdefghijklmnopqrstuvwxyz{|}~¡¢£€¥Š§š©ª«¬®¯°±²³Žµ¶·ž¹º»ŒœŸ¿ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖ×ØÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõö÷øùúûüýþÿ"
gsm_alpha=u"\u00A0@£$¥èéùìòçØøÅåΔ_ΦΓΛΩΠΨΣΘΞ^{}\\[~]|\u00A0\u00A0€ÆæßÉ !\"#¤%&'()*+,-./0123456789:;<=>?¡ABCDEFGHIJKLMNOPQRSTUVWXYZÄÖÑܧ¿abcdefghijklmnopqrstuvwxyzäöñüà\u00A0"
gsm_alpha_encoded='1b000102030405060708090b0c0e0f101112131415161718191a1b141b281b291b2f1b3c1b3d1b3e1b401b1b1b651c1d1e1f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d3e3f404142434445464748494a4b4c4d4e4f505152535455565758595a5b5c5d5e5f606162636465666768696a6b6c6d6e6f707172737475767778797a7b7c7d7e7f1b'
gsm_alpha_gsm=gsm_alpha_encoded.decode('hex')
# some simple tests
print "Assert GSM alphabet, encoded in GSM is correct (unicode->gsm_str)..."
encoded=_encode(gsm_alpha)[0].encode('hex')
print encoded
assert(encoded==gsm_alpha_encoded)
print "Good"
print
print "Assert GSM encoded string converts to correct Unicode (gsm_str->unicode)..."
assert(_decode(gsm_alpha_gsm)[0]==gsm_alpha)
print "Good"
print
# test Codec objects
print "Try the codec objects unicode_test_str->encode->decode==unicode_test_str..."
c=Codec()
gsm_str,out=c.encode(gsm_alpha)
assert(c.decode(gsm_str)[0]==gsm_alpha)
print "Good"
print
print "Try the incremental codecs, same test, but loop it..."
def _inc_encode(ie):
encoded=list()
hop=17 # make it something odd
final=False
for i in range(0,len(gsm_alpha),hop):
end=i+hop
if end>=len(gsm_alpha): final=True
encoded.append(ie.encode(gsm_alpha[i:end],final))
return ''.join(encoded)
enc=IncrementalEncoder()
assert(_inc_encode(enc)==gsm_alpha_gsm)
print "Good"
print
print "Now do that again with the same encoder to make sure state is reset..."
enc.reset()
assert(_inc_encode(enc)==gsm_alpha_gsm)
print "Good"
print
print "Now decode the encoded string back to unicode..."
def _inc_decode(idec):
decoded=list()
# define so we KNOW we hit a mark as last char
hop=gsm_alpha_gsm.index('\x1b')+1
final=False
for i in range(0,len(gsm_alpha_gsm),hop):
end=i+hop
if end>=len(gsm_alpha_gsm): final=True
decoded.append(idec.decode(gsm_alpha_gsm[i:end],final))
return ''.join(decoded)
dec=IncrementalDecoder()
assert(_inc_decode(dec)==gsm_alpha)
print "Good"
print
print "Do it again with some decoder to make sure state is cleared..."
dec.reset()
assert(_inc_decode(dec)==gsm_alpha)
print "Good"
print
| bsd-3-clause |
StefanRijnhart/server-tools | module_parent_dependencies/__init__.py | 3 | 1061 | # -*- encoding: utf-8 -*-
##############################################################################
#
# Module - Parent Dependencies module for Odoo
# Copyright (C) 2014 GRAP (http://www.grap.coop)
# @author Sylvain LE GAL (https://twitter.com/legalsylvain)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import model
| agpl-3.0 |
2013Commons/hue | desktop/core/ext-py/Django-1.4.5/django/contrib/databrowse/datastructures.py | 86 | 9237 | """
These classes are light wrappers around Django's database API that provide
convenience functionality and permalink functions for the databrowse app.
"""
from django.db import models
from django.utils import formats
from django.utils.text import capfirst
from django.utils.encoding import smart_unicode, smart_str, iri_to_uri
from django.utils.safestring import mark_safe
from django.db.models.query import QuerySet
EMPTY_VALUE = '(None)'
DISPLAY_SIZE = 100
class EasyModel(object):
def __init__(self, site, model):
self.site = site
self.model = model
self.model_list = site.registry.keys()
self.verbose_name = model._meta.verbose_name
self.verbose_name_plural = model._meta.verbose_name_plural
def __repr__(self):
return '<EasyModel for %s>' % smart_str(self.model._meta.object_name)
def model_databrowse(self):
"Returns the ModelDatabrowse class for this model."
return self.site.registry[self.model]
def url(self):
return mark_safe('%s%s/%s/' % (self.site.root_url, self.model._meta.app_label, self.model._meta.module_name))
def objects(self, **kwargs):
return self.get_query_set().filter(**kwargs)
def get_query_set(self):
easy_qs = self.model._default_manager.get_query_set()._clone(klass=EasyQuerySet)
easy_qs._easymodel = self
return easy_qs
def object_by_pk(self, pk):
return EasyInstance(self, self.model._default_manager.get(pk=pk))
def sample_objects(self):
for obj in self.model._default_manager.all()[:3]:
yield EasyInstance(self, obj)
def field(self, name):
try:
f = self.model._meta.get_field(name)
except models.FieldDoesNotExist:
return None
return EasyField(self, f)
def fields(self):
return [EasyField(self, f) for f in (self.model._meta.fields + self.model._meta.many_to_many)]
class EasyField(object):
def __init__(self, easy_model, field):
self.model, self.field = easy_model, field
def __repr__(self):
return smart_str(u'<EasyField for %s.%s>' % (self.model.model._meta.object_name, self.field.name))
def choices(self):
for value, label in self.field.choices:
yield EasyChoice(self.model, self, value, label)
def url(self):
if self.field.choices:
return mark_safe('%s%s/%s/%s/' % (self.model.site.root_url, self.model.model._meta.app_label, self.model.model._meta.module_name, self.field.name))
elif self.field.rel:
return mark_safe('%s%s/%s/' % (self.model.site.root_url, self.model.model._meta.app_label, self.model.model._meta.module_name))
class EasyChoice(object):
def __init__(self, easy_model, field, value, label):
self.model, self.field = easy_model, field
self.value, self.label = value, label
def __repr__(self):
return smart_str(u'<EasyChoice for %s.%s>' % (self.model.model._meta.object_name, self.field.name))
def url(self):
return mark_safe('%s%s/%s/%s/%s/' % (self.model.site.root_url, self.model.model._meta.app_label, self.model.model._meta.module_name, self.field.field.name, iri_to_uri(self.value)))
class EasyInstance(object):
def __init__(self, easy_model, instance):
self.model, self.instance = easy_model, instance
def __repr__(self):
return smart_str(u'<EasyInstance for %s (%s)>' % (self.model.model._meta.object_name, self.instance._get_pk_val()))
def __unicode__(self):
val = smart_unicode(self.instance)
if len(val) > DISPLAY_SIZE:
return val[:DISPLAY_SIZE] + u'...'
return val
def __str__(self):
return self.__unicode__().encode('utf-8')
def pk(self):
return self.instance._get_pk_val()
def url(self):
return mark_safe('%s%s/%s/objects/%s/' % (self.model.site.root_url, self.model.model._meta.app_label, self.model.model._meta.module_name, iri_to_uri(self.pk())))
def fields(self):
"""
Generator that yields EasyInstanceFields for each field in this
EasyInstance's model.
"""
for f in self.model.model._meta.fields + self.model.model._meta.many_to_many:
yield EasyInstanceField(self.model, self, f)
def related_objects(self):
"""
Generator that yields dictionaries of all models that have this
EasyInstance's model as a ForeignKey or ManyToManyField, along with
lists of related objects.
"""
for rel_object in self.model.model._meta.get_all_related_objects() + self.model.model._meta.get_all_related_many_to_many_objects():
if rel_object.model not in self.model.model_list:
continue # Skip models that aren't in the model_list
em = EasyModel(self.model.site, rel_object.model)
yield {
'model': em,
'related_field': rel_object.field.verbose_name,
'object_list': [EasyInstance(em, i) for i in getattr(self.instance, rel_object.get_accessor_name()).all()],
}
class EasyInstanceField(object):
def __init__(self, easy_model, instance, field):
self.model, self.field, self.instance = easy_model, field, instance
self.raw_value = getattr(instance.instance, field.name)
def __repr__(self):
return smart_str(u'<EasyInstanceField for %s.%s>' % (self.model.model._meta.object_name, self.field.name))
def values(self):
"""
Returns a list of values for this field for this instance. It's a list
so we can accomodate many-to-many fields.
"""
# This import is deliberately inside the function because it causes
# some settings to be imported, and we don't want to do that at the
# module level.
if self.field.rel:
if isinstance(self.field.rel, models.ManyToOneRel):
objs = getattr(self.instance.instance, self.field.name)
elif isinstance(self.field.rel, models.ManyToManyRel): # ManyToManyRel
return list(getattr(self.instance.instance, self.field.name).all())
elif self.field.choices:
objs = dict(self.field.choices).get(self.raw_value, EMPTY_VALUE)
elif isinstance(self.field, models.DateField) or isinstance(self.field, models.TimeField):
if self.raw_value:
if isinstance(self.field, models.DateTimeField):
objs = capfirst(formats.date_format(self.raw_value, 'DATETIME_FORMAT'))
elif isinstance(self.field, models.TimeField):
objs = capfirst(formats.time_format(self.raw_value, 'TIME_FORMAT'))
else:
objs = capfirst(formats.date_format(self.raw_value, 'DATE_FORMAT'))
else:
objs = EMPTY_VALUE
elif isinstance(self.field, models.BooleanField) or isinstance(self.field, models.NullBooleanField):
objs = {True: 'Yes', False: 'No', None: 'Unknown'}[self.raw_value]
else:
objs = self.raw_value
return [objs]
def urls(self):
"Returns a list of (value, URL) tuples."
# First, check the urls() method for each plugin.
plugin_urls = []
for plugin_name, plugin in self.model.model_databrowse().plugins.items():
urls = plugin.urls(plugin_name, self)
if urls is not None:
#plugin_urls.append(urls)
values = self.values()
return zip(self.values(), urls)
if self.field.rel:
m = EasyModel(self.model.site, self.field.rel.to)
if self.field.rel.to in self.model.model_list:
lst = []
for value in self.values():
if value is None:
continue
url = mark_safe('%s%s/%s/objects/%s/' % (self.model.site.root_url, m.model._meta.app_label, m.model._meta.module_name, iri_to_uri(value._get_pk_val())))
lst.append((smart_unicode(value), url))
else:
lst = [(value, None) for value in self.values()]
elif self.field.choices:
lst = []
for value in self.values():
url = mark_safe('%s%s/%s/fields/%s/%s/' % (self.model.site.root_url, self.model.model._meta.app_label, self.model.model._meta.module_name, self.field.name, iri_to_uri(self.raw_value)))
lst.append((value, url))
elif isinstance(self.field, models.URLField):
val = self.values()[0]
lst = [(val, iri_to_uri(val))]
else:
lst = [(self.values()[0], None)]
return lst
class EasyQuerySet(QuerySet):
"""
When creating (or cloning to) an `EasyQuerySet`, make sure to set the
`_easymodel` variable to the related `EasyModel`.
"""
def iterator(self, *args, **kwargs):
for obj in super(EasyQuerySet, self).iterator(*args, **kwargs):
yield EasyInstance(self._easymodel, obj)
def _clone(self, *args, **kwargs):
c = super(EasyQuerySet, self)._clone(*args, **kwargs)
c._easymodel = self._easymodel
return c
| apache-2.0 |
hryang/flatbuffers | python/flatbuffers/table.py | 65 | 4153 | # Copyright 2014 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from . import encode
from . import number_types as N
class Table(object):
"""Table wraps a byte slice and provides read access to its data.
The variable `Pos` indicates the root of the FlatBuffers object therein."""
__slots__ = ("Bytes", "Pos")
def __init__(self, buf, pos):
N.enforce_number(pos, N.UOffsetTFlags)
self.Bytes = buf
self.Pos = pos
def Offset(self, vtableOffset):
"""Offset provides access into the Table's vtable.
Deprecated fields are ignored by checking the vtable's length."""
vtable = self.Pos - self.Get(N.SOffsetTFlags, self.Pos)
vtableEnd = self.Get(N.VOffsetTFlags, vtable)
if vtableOffset < vtableEnd:
return self.Get(N.VOffsetTFlags, vtable + vtableOffset)
return 0
def Indirect(self, off):
"""Indirect retrieves the relative offset stored at `offset`."""
N.enforce_number(off, N.UOffsetTFlags)
return off + encode.Get(N.UOffsetTFlags.packer_type, self.Bytes, off)
def String(self, off):
"""String gets a string from data stored inside the flatbuffer."""
N.enforce_number(off, N.UOffsetTFlags)
off += encode.Get(N.UOffsetTFlags.packer_type, self.Bytes, off)
start = off + N.UOffsetTFlags.bytewidth
length = encode.Get(N.UOffsetTFlags.packer_type, self.Bytes, off)
return bytes(self.Bytes[start:start+length])
def VectorLen(self, off):
"""VectorLen retrieves the length of the vector whose offset is stored
at "off" in this object."""
N.enforce_number(off, N.UOffsetTFlags)
off += self.Pos
off += encode.Get(N.UOffsetTFlags.packer_type, self.Bytes, off)
ret = encode.Get(N.UOffsetTFlags.packer_type, self.Bytes, off)
return ret
def Vector(self, off):
"""Vector retrieves the start of data of the vector whose offset is
stored at "off" in this object."""
N.enforce_number(off, N.UOffsetTFlags)
off += self.Pos
x = off + self.Get(N.UOffsetTFlags, off)
# data starts after metadata containing the vector length
x += N.UOffsetTFlags.bytewidth
return x
def Union(self, t2, off):
"""Union initializes any Table-derived type to point to the union at
the given offset."""
assert type(t2) is Table
N.enforce_number(off, N.UOffsetTFlags)
off += self.Pos
t2.Pos = off + self.Get(N.UOffsetTFlags, off)
t2.Bytes = self.Bytes
def Get(self, flags, off):
"""
Get retrieves a value of the type specified by `flags` at the
given offset.
"""
N.enforce_number(off, N.UOffsetTFlags)
return flags.py_type(encode.Get(flags.packer_type, self.Bytes, off))
def GetSlot(self, slot, d, validator_flags):
N.enforce_number(slot, N.VOffsetTFlags)
if validator_flags is not None:
N.enforce_number(d, validator_flags)
off = self.Offset(slot)
if off == 0:
return d
return self.Get(validator_flags, self.Pos + off)
def GetVOffsetTSlot(self, slot, d):
"""
GetVOffsetTSlot retrieves the VOffsetT that the given vtable location
points to. If the vtable value is zero, the default value `d`
will be returned.
"""
N.enforce_number(slot, N.VOffsetTFlags)
N.enforce_number(d, N.VOffsetTFlags)
off = self.Offset(slot)
if off == 0:
return d
return off
| apache-2.0 |
dgarros/ansible | test/units/modules/network/eos/test_eos_config.py | 19 | 4969 | # (c) 2016 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
from ansible.compat.tests.mock import patch
from ansible.modules.network.eos import eos_config
from .eos_module import TestEosModule, load_fixture, set_module_args
class TestEosConfigModule(TestEosModule):
module = eos_config
def setUp(self):
self.mock_get_config = patch('ansible.modules.network.eos.eos_config.get_config')
self.get_config = self.mock_get_config.start()
self.mock_load_config = patch('ansible.modules.network.eos.eos_config.load_config')
self.load_config = self.mock_load_config.start()
def tearDown(self):
self.mock_get_config.stop()
self.mock_load_config.stop()
def load_fixtures(self, commands=None, transport='cli'):
self.get_config.return_value = load_fixture('eos_config_config.cfg')
self.load_config.return_value = dict(diff=None, session='session')
def test_eos_config_no_change(self):
args = dict(lines=['hostname localhost'])
set_module_args(args)
result = self.execute_module()
def test_eos_config_src(self):
args = dict(src=load_fixture('eos_config_candidate.cfg'))
set_module_args(args)
result = self.execute_module(changed=True)
config = ['hostname switch01', 'interface Ethernet1',
'description test interface', 'no shutdown', 'ip routing']
self.assertEqual(sorted(config), sorted(result['commands']), result['commands'])
def test_eos_config_lines(self):
args = dict(lines=['hostname switch01', 'ip domain-name eng.ansible.com'])
set_module_args(args)
result = self.execute_module(changed=True)
config = ['hostname switch01']
self.assertEqual(sorted(config), sorted(result['commands']), result['commands'])
def test_eos_config_before(self):
args = dict(lines=['hostname switch01', 'ip domain-name eng.ansible.com'],
before=['before command'])
set_module_args(args)
result = self.execute_module(changed=True)
config = ['before command', 'hostname switch01']
self.assertEqual(sorted(config), sorted(result['commands']), result['commands'])
self.assertEqual('before command', result['commands'][0])
def test_eos_config_after(self):
args = dict(lines=['hostname switch01', 'ip domain-name eng.ansible.com'],
after=['after command'])
set_module_args(args)
result = self.execute_module(changed=True)
config = ['after command', 'hostname switch01']
self.assertEqual(sorted(config), sorted(result['commands']), result['commands'])
self.assertEqual('after command', result['commands'][-1])
def test_eos_config_parents(self):
args = dict(lines=['ip address 1.2.3.4/5', 'no shutdown'], parents=['interface Ethernet10'])
set_module_args(args)
result = self.execute_module(changed=True)
config = ['interface Ethernet10', 'ip address 1.2.3.4/5', 'no shutdown']
self.assertEqual(config, result['commands'], result['commands'])
def test_eos_config_src_and_lines_fails(self):
args = dict(src='foo', lines='foo')
set_module_args(args)
result = self.execute_module(failed=True)
def test_eos_config_match_exact_requires_lines(self):
args = dict(match='exact')
set_module_args(args)
result = self.execute_module(failed=True)
def test_eos_config_match_strict_requires_lines(self):
args = dict(match='strict')
set_module_args(args)
result = self.execute_module(failed=True)
def test_eos_config_replace_block_requires_lines(self):
args = dict(replace='block')
set_module_args(args)
result = self.execute_module(failed=True)
def test_eos_config_replace_config_requires_src(self):
args = dict(replace='config')
set_module_args(args)
result = self.execute_module(failed=True)
def test_eos_config_backup_returns__backup__(self):
args = dict(backup=True)
set_module_args(args)
result = self.execute_module()
self.assertIn('__backup__', result)
| gpl-3.0 |
nathanial/lettuce | tests/integration/lib/Django-1.3/django/core/management/commands/startproject.py | 322 | 1680 | from django.core.management.base import copy_helper, CommandError, LabelCommand
from django.utils.importlib import import_module
import os
import re
from random import choice
class Command(LabelCommand):
help = "Creates a Django project directory structure for the given project name in the current directory."
args = "[projectname]"
label = 'project name'
requires_model_validation = False
# Can't import settings during this command, because they haven't
# necessarily been created.
can_import_settings = False
def handle_label(self, project_name, **options):
# Determine the project_name a bit naively -- by looking at the name of
# the parent directory.
directory = os.getcwd()
# Check that the project_name cannot be imported.
try:
import_module(project_name)
except ImportError:
pass
else:
raise CommandError("%r conflicts with the name of an existing Python module and cannot be used as a project name. Please try another name." % project_name)
copy_helper(self.style, 'project', project_name, directory)
# Create a random SECRET_KEY hash, and put it in the main settings.
main_settings_file = os.path.join(directory, project_name, 'settings.py')
settings_contents = open(main_settings_file, 'r').read()
fp = open(main_settings_file, 'w')
secret_key = ''.join([choice('abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)') for i in range(50)])
settings_contents = re.sub(r"(?<=SECRET_KEY = ')'", secret_key + "'", settings_contents)
fp.write(settings_contents)
fp.close()
| gpl-3.0 |