repo_name
stringlengths
5
100
path
stringlengths
4
299
copies
stringclasses
990 values
size
stringlengths
4
7
content
stringlengths
666
1.03M
license
stringclasses
15 values
hash
int64
-9,223,351,895,964,839,000
9,223,297,778B
line_mean
float64
3.17
100
line_max
int64
7
1k
alpha_frac
float64
0.25
0.98
autogenerated
bool
1 class
shinate/phantomjs
src/qt/qtwebkit/Tools/Scripts/webkitpy/common/system/outputcapture.py
124
5478
# Copyright (c) 2009, Google Inc. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # # Class for unittest support. Used for capturing stderr/stdout. import logging import unittest # Don't use unittest2 here as the autoinstaller may not have it yet. import sys from StringIO import StringIO class OutputCapture(object): # By default we capture the output to a stream. Other modules may override # this function in order to do things like pass through the output. See # webkitpy.test.main for an example. @staticmethod def stream_wrapper(stream): return StringIO() def __init__(self): self.saved_outputs = dict() self._log_level = logging.INFO def set_log_level(self, log_level): self._log_level = log_level if hasattr(self, '_logs_handler'): self._logs_handler.setLevel(self._log_level) def _capture_output_with_name(self, output_name): stream = getattr(sys, output_name) captured_output = self.stream_wrapper(stream) self.saved_outputs[output_name] = stream setattr(sys, output_name, captured_output) return captured_output def _restore_output_with_name(self, output_name): captured_output = getattr(sys, output_name).getvalue() setattr(sys, output_name, self.saved_outputs[output_name]) del self.saved_outputs[output_name] return captured_output def capture_output(self): self._logs = StringIO() self._logs_handler = logging.StreamHandler(self._logs) self._logs_handler.setLevel(self._log_level) self._logger = logging.getLogger() self._orig_log_level = self._logger.level self._logger.addHandler(self._logs_handler) self._logger.setLevel(min(self._log_level, self._orig_log_level)) return (self._capture_output_with_name("stdout"), self._capture_output_with_name("stderr")) def restore_output(self): self._logger.removeHandler(self._logs_handler) self._logger.setLevel(self._orig_log_level) self._logs_handler.flush() self._logs.flush() logs_string = self._logs.getvalue() delattr(self, '_logs_handler') delattr(self, '_logs') return (self._restore_output_with_name("stdout"), self._restore_output_with_name("stderr"), logs_string) def assert_outputs(self, testcase, function, args=[], kwargs={}, expected_stdout="", expected_stderr="", expected_exception=None, expected_logs=None): self.capture_output() try: if expected_exception: return_value = testcase.assertRaises(expected_exception, function, *args, **kwargs) else: return_value = function(*args, **kwargs) finally: (stdout_string, stderr_string, logs_string) = self.restore_output() if hasattr(testcase, 'assertMultiLineEqual'): testassert = testcase.assertMultiLineEqual else: testassert = testcase.assertEqual testassert(stdout_string, expected_stdout) testassert(stderr_string, expected_stderr) if expected_logs is not None: testassert(logs_string, expected_logs) # This is a little strange, but I don't know where else to return this information. return return_value class OutputCaptureTestCaseBase(unittest.TestCase): maxDiff = None def setUp(self): unittest.TestCase.setUp(self) self.output_capture = OutputCapture() (self.__captured_stdout, self.__captured_stderr) = self.output_capture.capture_output() def tearDown(self): del self.__captured_stdout del self.__captured_stderr self.output_capture.restore_output() unittest.TestCase.tearDown(self) def assertStdout(self, expected_stdout): self.assertEqual(expected_stdout, self.__captured_stdout.getvalue()) def assertStderr(self, expected_stderr): self.assertEqual(expected_stderr, self.__captured_stderr.getvalue())
bsd-3-clause
-268,936,401,586,776,220
41.796875
154
0.692223
false
danielquinn/spirithunter
src/spirits/migrations/0002_auto_20160904_1741.py
1
2320
# -*- coding: utf-8 -*- # Generated by Django 1.10.1 on 2016-09-04 17:41 from __future__ import unicode_literals from django.conf import settings from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('items', '0002_auto_20160904_1741'), ('spirits', '0001_initial'), ('aspects', '0001_initial'), ] operations = [ migrations.AddField( model_name='spirit', name='owner', field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='spirits', to=settings.AUTH_USER_MODEL), ), migrations.AddField( model_name='levelladder', name='family', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='ladder', to='spirits.Family'), ), migrations.AddField( model_name='levelladder', name='item_drop_common', field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='common_drops', to='items.Item'), ), migrations.AddField( model_name='levelladder', name='item_drop_rare', field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='rare_drops', to='items.Item'), ), migrations.AddField( model_name='elementalstrength', name='element', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='applications', to='aspects.Element'), ), migrations.AddField( model_name='elementalstrength', name='spirit', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='elemental_strengths', to='spirits.Spirit'), ), migrations.AlterUniqueTogether( name='levelladder', unique_together=set([('level', 'family')]), ), migrations.AlterUniqueTogether( name='elementalstrength', unique_together=set([('spirit', 'element')]), ), ]
agpl-3.0
4,100,323,036,932,990,500
37.666667
157
0.613362
false
fedora-infra/fmn.consumer
tests/test_sse_backend.py
2
5500
""" Unit tests for the SSE backend. """ from __future__ import unicode_literals, absolute_import import json import unittest import mock from fmn.consumer.backends import SSEBackend @mock.patch('fmn.consumer.backends.sse.protocol.ClientCreator', mock.Mock()) class TestSSEBackend(unittest.TestCase): def test_format_message_conglomerated(self): """Assert conglomerated messages are formatted""" message = { 'subtitle': 'relrod pushed commits to ghc and 487 other packages', 'link': 'http://example.com/', 'icon': 'https://that-git-logo', 'secondary_icon': 'https://that-relrod-avatar', 'start_time': 0, 'end_time': 100, 'human_time': '5 minutes ago', 'usernames': ['relrod'], 'packages': ['ghc', 'nethack'], 'topics': ['org.fedoraproject.prod.git.receive'], 'categories': ['git'], 'msg_ids': { '2014-abcde': { 'subtitle': 'relrod pushed some commits to ghc', 'title': 'git.receive', 'link': 'http://...', 'icon': 'http://...', }, '2014-bcdef': { 'subtitle': 'relrod pushed some commits to nethack', 'title': 'git.receive', 'link': 'http://...', 'icon': 'http://...', }, }, } recipient = { "triggered_by_links": True, "markup_messages": True, "user": "jcline.id.fedoraproject.org", "filter_name": "firehose", "filter_oneshot": True, "filter_id": 7, "shorten_links": False, "verbose": True, } backend = SSEBackend({}) formatted_message = backend._format_message(message, recipient) self.assertTrue(isinstance(formatted_message, bytes)) formatted_message = json.loads(formatted_message) for key in ('dom_id', 'date_time', 'icon', 'link', 'markup', 'secondary_icon'): self.assertTrue(key in formatted_message) self.assertEqual(formatted_message['link'], message['link']) self.assertEqual(formatted_message['markup'], message['subtitle']) @mock.patch('fmn.consumer.backends.sse.fedmsg.meta') def test_format_message_raw(self, mock_meta): """Assert raw messages are formatted""" message = { u'username': u'apache', u'i': 1, u'timestamp': 1478281861, u'msg_id': u'2016-c2184569-f9c4-4c52-affd-79e28848d70f', u'crypto': u'x509', u'topic': u'org.fedoraproject.prod.buildsys.task.state.change', u'msg': { u'info': { u'children': [], u'parent': None, u'channel_id': 1, u'start_time': u'2016-11-04 17:51:01.254871', u'request': [ u'../packages/eclipse/4.5.0/1.fc26/src/eclipse-4.5.0-1.fc26.src.rpm', u'f26', {u'scratch': True, u'arch_override': u'x86_64'} ], u'state': 1, u'awaited': None, u'method': u'build', u'priority': 50, u'completion_time': None, u'waiting': None, u'create_time': u'2016-11-04 17:50:57.825631', u'owner': 3199, u'host_id': 82, u'label': None, u'arch': u'noarch', u'id': 16289846 }, u'old': u'FREE', u'attribute': u'state', u'method': u'build', u'instance': u'primary', u'owner': u'koschei', u'new': u'OPEN', u'srpm': u'eclipse-4.5.0-1.fc26.src.rpm', u'id': 16289846 } } recipient = { "triggered_by_links": True, "markup_messages": True, "user": "jcline.id.fedoraproject.org", "filter_name": "firehose", "filter_oneshot": True, "filter_id": 7, "shorten_links": False, "verbose": True, } mock_meta.msg2icon.return_value = 'http://example.com/icon.png' mock_meta.msg2link.return_value = 'http://example.com/link' mock_meta.msg2secondary_icon.return_value = None mock_meta.msg2agent.return_value = 'koschei' mock_meta.msg2title.return_value = 'Some title' mock_meta.msg2subtitle.return_value = 'Some subtitle' backend = SSEBackend({}) formatted_message = backend._format_message(message, recipient) self.assertTrue(isinstance(formatted_message, bytes)) formatted_message = json.loads(formatted_message) for key in ('dom_id', 'date_time', 'icon', 'link', 'markup', 'secondary_icon'): self.assertTrue(key in formatted_message) self.assertEqual(mock_meta.msg2icon.return_value, formatted_message['icon']) self.assertEqual(mock_meta.msg2link.return_value, formatted_message['link']) self.assertEqual( mock_meta.msg2secondary_icon.return_value, formatted_message['secondary_icon']) if __name__ == '__main__': unittest.main()
gpl-2.0
1,209,929,337,784,353,500
38.007092
93
0.502182
false
mcrowson/django
django/utils/functional.py
234
13622
import copy import operator from functools import total_ordering, wraps from django.utils import six from django.utils.six.moves import copyreg # You can't trivially replace this with `functools.partial` because this binds # to classes and returns bound instances, whereas functools.partial (on # CPython) is a type and its instances don't bind. def curry(_curried_func, *args, **kwargs): def _curried(*moreargs, **morekwargs): return _curried_func(*(args + moreargs), **dict(kwargs, **morekwargs)) return _curried class cached_property(object): """ Decorator that converts a method with a single self argument into a property cached on the instance. Optional ``name`` argument allows you to make cached properties of other methods. (e.g. url = cached_property(get_absolute_url, name='url') ) """ def __init__(self, func, name=None): self.func = func self.__doc__ = getattr(func, '__doc__') self.name = name or func.__name__ def __get__(self, instance, type=None): if instance is None: return self res = instance.__dict__[self.name] = self.func(instance) return res class Promise(object): """ This is just a base class for the proxy class created in the closure of the lazy function. It can be used to recognize promises in code. """ pass def lazy(func, *resultclasses): """ Turns any callable into a lazy evaluated callable. You need to give result classes or types -- at least one is needed so that the automatic forcing of the lazy evaluation code is triggered. Results are not memoized; the function is evaluated on every access. """ @total_ordering class __proxy__(Promise): """ Encapsulate a function call and act as a proxy for methods that are called on the result of that function. The function is not evaluated until one of the methods on the result is called. """ __prepared = False def __init__(self, args, kw): self.__args = args self.__kw = kw if not self.__prepared: self.__prepare_class__() self.__prepared = True def __reduce__(self): return ( _lazy_proxy_unpickle, (func, self.__args, self.__kw) + resultclasses ) @classmethod def __prepare_class__(cls): for resultclass in resultclasses: for type_ in resultclass.mro(): for method_name in type_.__dict__.keys(): # All __promise__ return the same wrapper method, they # look up the correct implementation when called. if hasattr(cls, method_name): continue meth = cls.__promise__(method_name) setattr(cls, method_name, meth) cls._delegate_bytes = bytes in resultclasses cls._delegate_text = six.text_type in resultclasses assert not (cls._delegate_bytes and cls._delegate_text), ( "Cannot call lazy() with both bytes and text return types.") if cls._delegate_text: if six.PY3: cls.__str__ = cls.__text_cast else: cls.__unicode__ = cls.__text_cast cls.__str__ = cls.__bytes_cast_encoded elif cls._delegate_bytes: if six.PY3: cls.__bytes__ = cls.__bytes_cast else: cls.__str__ = cls.__bytes_cast @classmethod def __promise__(cls, method_name): # Builds a wrapper around some magic method def __wrapper__(self, *args, **kw): # Automatically triggers the evaluation of a lazy value and # applies the given magic method of the result type. res = func(*self.__args, **self.__kw) return getattr(res, method_name)(*args, **kw) return __wrapper__ def __text_cast(self): return func(*self.__args, **self.__kw) def __bytes_cast(self): return bytes(func(*self.__args, **self.__kw)) def __bytes_cast_encoded(self): return func(*self.__args, **self.__kw).encode('utf-8') def __cast(self): if self._delegate_bytes: return self.__bytes_cast() elif self._delegate_text: return self.__text_cast() else: return func(*self.__args, **self.__kw) def __str__(self): # object defines __str__(), so __prepare_class__() won't overload # a __str__() method from the proxied class. return str(self.__cast()) def __ne__(self, other): if isinstance(other, Promise): other = other.__cast() return self.__cast() != other def __eq__(self, other): if isinstance(other, Promise): other = other.__cast() return self.__cast() == other def __lt__(self, other): if isinstance(other, Promise): other = other.__cast() return self.__cast() < other def __hash__(self): return hash(self.__cast()) def __mod__(self, rhs): if self._delegate_bytes and six.PY2: return bytes(self) % rhs elif self._delegate_text: return six.text_type(self) % rhs return self.__cast() % rhs def __deepcopy__(self, memo): # Instances of this class are effectively immutable. It's just a # collection of functions. So we don't need to do anything # complicated for copying. memo[id(self)] = self return self @wraps(func) def __wrapper__(*args, **kw): # Creates the proxy object, instead of the actual value. return __proxy__(args, kw) return __wrapper__ def _lazy_proxy_unpickle(func, args, kwargs, *resultclasses): return lazy(func, *resultclasses)(*args, **kwargs) def allow_lazy(func, *resultclasses): """ A decorator that allows a function to be called with one or more lazy arguments. If none of the args are lazy, the function is evaluated immediately, otherwise a __proxy__ is returned that will evaluate the function when needed. """ lazy_func = lazy(func, *resultclasses) @wraps(func) def wrapper(*args, **kwargs): for arg in list(args) + list(kwargs.values()): if isinstance(arg, Promise): break else: return func(*args, **kwargs) return lazy_func(*args, **kwargs) return wrapper empty = object() def new_method_proxy(func): def inner(self, *args): if self._wrapped is empty: self._setup() return func(self._wrapped, *args) return inner class LazyObject(object): """ A wrapper for another class that can be used to delay instantiation of the wrapped class. By subclassing, you have the opportunity to intercept and alter the instantiation. If you don't need to do that, use SimpleLazyObject. """ # Avoid infinite recursion when tracing __init__ (#19456). _wrapped = None def __init__(self): self._wrapped = empty __getattr__ = new_method_proxy(getattr) def __setattr__(self, name, value): if name == "_wrapped": # Assign to __dict__ to avoid infinite __setattr__ loops. self.__dict__["_wrapped"] = value else: if self._wrapped is empty: self._setup() setattr(self._wrapped, name, value) def __delattr__(self, name): if name == "_wrapped": raise TypeError("can't delete _wrapped.") if self._wrapped is empty: self._setup() delattr(self._wrapped, name) def _setup(self): """ Must be implemented by subclasses to initialize the wrapped object. """ raise NotImplementedError('subclasses of LazyObject must provide a _setup() method') # Because we have messed with __class__ below, we confuse pickle as to what # class we are pickling. It also appears to stop __reduce__ from being # called. So, we define __getstate__ in a way that cooperates with the way # that pickle interprets this class. This fails when the wrapped class is # a builtin, but it is better than nothing. def __getstate__(self): if self._wrapped is empty: self._setup() return self._wrapped.__dict__ # Python 3 will call __reduce__ when pickling; this method is needed # to serialize and deserialize correctly. @classmethod def __newobj__(cls, *args): return cls.__new__(cls, *args) def __reduce_ex__(self, proto): if proto >= 2: # On Py3, since the default protocol is 3, pickle uses the # ``__newobj__`` method (& more efficient opcodes) for writing. return (self.__newobj__, (self.__class__,), self.__getstate__()) else: # On Py2, the default protocol is 0 (for back-compat) & the above # code fails miserably (see regression test). Instead, we return # exactly what's returned if there's no ``__reduce__`` method at # all. return (copyreg._reconstructor, (self.__class__, object, None), self.__getstate__()) def __deepcopy__(self, memo): if self._wrapped is empty: # We have to use type(self), not self.__class__, because the # latter is proxied. result = type(self)() memo[id(self)] = result return result return copy.deepcopy(self._wrapped, memo) if six.PY3: __bytes__ = new_method_proxy(bytes) __str__ = new_method_proxy(str) __bool__ = new_method_proxy(bool) else: __str__ = new_method_proxy(str) __unicode__ = new_method_proxy(unicode) # NOQA: unicode undefined on PY3 __nonzero__ = new_method_proxy(bool) # Introspection support __dir__ = new_method_proxy(dir) # Need to pretend to be the wrapped class, for the sake of objects that # care about this (especially in equality tests) __class__ = property(new_method_proxy(operator.attrgetter("__class__"))) __eq__ = new_method_proxy(operator.eq) __ne__ = new_method_proxy(operator.ne) __hash__ = new_method_proxy(hash) # List/Tuple/Dictionary methods support __getitem__ = new_method_proxy(operator.getitem) __setitem__ = new_method_proxy(operator.setitem) __delitem__ = new_method_proxy(operator.delitem) __iter__ = new_method_proxy(iter) __len__ = new_method_proxy(len) __contains__ = new_method_proxy(operator.contains) # Workaround for http://bugs.python.org/issue12370 _super = super class SimpleLazyObject(LazyObject): """ A lazy object initialized from any function. Designed for compound objects of unknown type. For builtins or objects of known type, use django.utils.functional.lazy. """ def __init__(self, func): """ Pass in a callable that returns the object to be wrapped. If copies are made of the resulting SimpleLazyObject, which can happen in various circumstances within Django, then you must ensure that the callable can be safely run more than once and will return the same value. """ self.__dict__['_setupfunc'] = func _super(SimpleLazyObject, self).__init__() def _setup(self): self._wrapped = self._setupfunc() # Return a meaningful representation of the lazy object for debugging # without evaluating the wrapped object. def __repr__(self): if self._wrapped is empty: repr_attr = self._setupfunc else: repr_attr = self._wrapped return '<%s: %r>' % (type(self).__name__, repr_attr) def __deepcopy__(self, memo): if self._wrapped is empty: # We have to use SimpleLazyObject, not self.__class__, because the # latter is proxied. result = SimpleLazyObject(self._setupfunc) memo[id(self)] = result return result return copy.deepcopy(self._wrapped, memo) class lazy_property(property): """ A property that works with subclasses by wrapping the decorated functions of the base class. """ def __new__(cls, fget=None, fset=None, fdel=None, doc=None): if fget is not None: @wraps(fget) def fget(instance, instance_type=None, name=fget.__name__): return getattr(instance, name)() if fset is not None: @wraps(fset) def fset(instance, value, name=fset.__name__): return getattr(instance, name)(value) if fdel is not None: @wraps(fdel) def fdel(instance, name=fdel.__name__): return getattr(instance, name)() return property(fget, fset, fdel, doc) def partition(predicate, values): """ Splits the values into two sets, based on the return value of the function (True/False). e.g.: >>> partition(lambda x: x > 3, range(5)) [0, 1, 2, 3], [4] """ results = ([], []) for item in values: results[predicate(item)].append(item) return results
bsd-3-clause
3,670,553,300,481,786,000
33.838875
96
0.571942
false
rwatson/chromium-capsicum
third_party/scons/scons-local/SCons/Tool/cvf.py
3
2399
"""engine.SCons.Tool.cvf Tool-specific initialization for the Compaq Visual Fortran compiler. """ # # Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY # KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE # WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. # __revision__ = "src/engine/SCons/Tool/cvf.py 3897 2009/01/13 06:45:54 scons" import fortran compilers = ['f90'] def generate(env): """Add Builders and construction variables for compaq visual fortran to an Environment.""" fortran.generate(env) env['FORTRAN'] = 'f90' env['FORTRANCOM'] = '$FORTRAN $FORTRANFLAGS $_FORTRANMODFLAG $_FORTRANINCFLAGS /compile_only ${SOURCES.windows} /object:${TARGET.windows}' env['FORTRANPPCOM'] = '$FORTRAN $FORTRANFLAGS $CPPFLAGS $_CPPDEFFLAGS $_FORTRANMODFLAG $_FORTRANINCFLAGS /compile_only ${SOURCES.windows} /object:${TARGET.windows}' env['SHFORTRANCOM'] = '$SHFORTRAN $SHFORTRANFLAGS $_FORTRANMODFLAG $_FORTRANINCFLAGS /compile_only ${SOURCES.windows} /object:${TARGET.windows}' env['SHFORTRANPPCOM'] = '$SHFORTRAN $SHFORTRANFLAGS $CPPFLAGS $_CPPDEFFLAGS $_FORTRANMODFLAG $_FORTRANINCFLAGS /compile_only ${SOURCES.windows} /object:${TARGET.windows}' env['OBJSUFFIX'] = '.obj' env['FORTRANMODDIR'] = '${TARGET.dir}' env['FORTRANMODDIRPREFIX'] = '/module:' env['FORTRANMODDIRSUFFIX'] = '' def exists(env): return env.Detect(compilers)
bsd-3-clause
-2,009,289,899,430,156,000
45.134615
174
0.73489
false
inveniosoftware/kwalitee
kwalitee/cli/githooks.py
2
2832
# -*- coding: utf-8 -*- # # This file is part of kwalitee # Copyright (C) 2014, 2015 CERN. # # kwalitee is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License as # published by the Free Software Foundation; either version 2 of the # License, or (at your option) any later version. # # kwalitee is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with kwalitee; if not, write to the Free Software Foundation, # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA. # # In applying this licence, CERN does not waive the privileges and immunities # granted to it by virtue of its status as an Intergovernmental Organization # or submit itself to any jurisdiction. """Command-line tools for the git hooks.""" from __future__ import absolute_import, print_function import os import sys import click from ..hooks import run HOOKS = { "pre-commit", "prepare-commit-msg", "post-commit", } HOOK_PATH = os.path.join(".git", "hooks") @click.group() def githooks(): """Install githooks for kwalitee checks.""" @githooks.command() @click.option("-f", "--force", is_flag=True, help="Overwrite existing hooks", default=False) def install(force=False): """Install git hooks.""" ret, git_dir, _ = run("git rev-parse --show-toplevel") if ret != 0: click.echo( "ERROR: Please run from within a GIT repository.", file=sys.stderr) raise click.Abort git_dir = git_dir[0] hooks_dir = os.path.join(git_dir, HOOK_PATH) for hook in HOOKS: hook_path = os.path.join(hooks_dir, hook) if os.path.exists(hook_path): if not force: click.echo( "Hook already exists. Skipping {0}".format(hook_path), file=sys.stderr) continue else: os.unlink(hook_path) source = os.path.join(sys.prefix, "bin", "kwalitee-" + hook) os.symlink(os.path.normpath(source), hook_path) return True @githooks.command() def uninstall(): """Uninstall git hooks.""" ret, git_dir, _ = run("git rev-parse --show-toplevel") if ret != 0: click.echo( "ERROR: Please run from within a GIT repository.", file=sys.stderr) raise click.Abort git_dir = git_dir[0] hooks_dir = os.path.join(git_dir, HOOK_PATH) for hook in HOOKS: hook_path = os.path.join(hooks_dir, hook) if os.path.exists(hook_path): os.remove(hook_path) return True
gpl-2.0
3,105,231,613,633,305,600
28.5
77
0.637712
false
xaviercobain88/framework-python
openerp/addons/account/account_analytic_line.py
31
7587
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp.osv import fields from openerp.osv import osv from openerp.tools.translate import _ class account_analytic_line(osv.osv): _inherit = 'account.analytic.line' _description = 'Analytic Line' _columns = { 'product_uom_id': fields.many2one('product.uom', 'Unit of Measure'), 'product_id': fields.many2one('product.product', 'Product'), 'general_account_id': fields.many2one('account.account', 'General Account', required=True, ondelete='restrict'), 'move_id': fields.many2one('account.move.line', 'Move Line', ondelete='cascade', select=True), 'journal_id': fields.many2one('account.analytic.journal', 'Analytic Journal', required=True, ondelete='restrict', select=True), 'code': fields.char('Code', size=8), 'ref': fields.char('Ref.', size=64), 'currency_id': fields.related('move_id', 'currency_id', type='many2one', relation='res.currency', string='Account Currency', store=True, help="The related account currency if not equal to the company one.", readonly=True), 'amount_currency': fields.related('move_id', 'amount_currency', type='float', string='Amount Currency', store=True, help="The amount expressed in the related account currency if not equal to the company one.", readonly=True), } _defaults = { 'company_id': lambda self,cr,uid,c: self.pool.get('res.company')._company_default_get(cr, uid, 'account.analytic.line', context=c), } _order = 'date desc' def search(self, cr, uid, args, offset=0, limit=None, order=None, context=None, count=False): if context is None: context = {} if context.get('from_date',False): args.append(['date', '>=', context['from_date']]) if context.get('to_date',False): args.append(['date','<=', context['to_date']]) return super(account_analytic_line, self).search(cr, uid, args, offset, limit, order, context=context, count=count) def _check_company(self, cr, uid, ids, context=None): lines = self.browse(cr, uid, ids, context=context) for l in lines: if l.move_id and not l.account_id.company_id.id == l.move_id.account_id.company_id.id: return False return True # Compute the cost based on the price type define into company # property_valuation_price_type property def on_change_unit_amount(self, cr, uid, id, prod_id, quantity, company_id, unit=False, journal_id=False, context=None): if context==None: context={} if not journal_id: j_ids = self.pool.get('account.analytic.journal').search(cr, uid, [('type','=','purchase')]) journal_id = j_ids and j_ids[0] or False if not journal_id or not prod_id: return {} product_obj = self.pool.get('product.product') analytic_journal_obj =self.pool.get('account.analytic.journal') product_price_type_obj = self.pool.get('product.price.type') j_id = analytic_journal_obj.browse(cr, uid, journal_id, context=context) prod = product_obj.browse(cr, uid, prod_id, context=context) result = 0.0 if prod_id: unit = prod.uom_id.id if j_id.type == 'purchase': unit = prod.uom_po_id.id if j_id.type <> 'sale': a = prod.property_account_expense.id if not a: a = prod.categ_id.property_account_expense_categ.id if not a: raise osv.except_osv(_('Error!'), _('There is no expense account defined ' \ 'for this product: "%s" (id:%d).') % \ (prod.name, prod.id,)) else: a = prod.property_account_income.id if not a: a = prod.categ_id.property_account_income_categ.id if not a: raise osv.except_osv(_('Error!'), _('There is no income account defined ' \ 'for this product: "%s" (id:%d).') % \ (prod.name, prod_id,)) flag = False # Compute based on pricetype product_price_type_ids = product_price_type_obj.search(cr, uid, [('field','=','standard_price')], context=context) pricetype = product_price_type_obj.browse(cr, uid, product_price_type_ids, context=context)[0] if journal_id: journal = analytic_journal_obj.browse(cr, uid, journal_id, context=context) if journal.type == 'sale': product_price_type_ids = product_price_type_obj.search(cr, uid, [('field','=','list_price')], context=context) if product_price_type_ids: pricetype = product_price_type_obj.browse(cr, uid, product_price_type_ids, context=context)[0] # Take the company currency as the reference one if pricetype.field == 'list_price': flag = True ctx = context.copy() if unit: # price_get() will respect a 'uom' in its context, in order # to return a default price for those units ctx['uom'] = unit amount_unit = prod.price_get(pricetype.field, context=ctx)[prod.id] prec = self.pool.get('decimal.precision').precision_get(cr, uid, 'Account') amount = amount_unit * quantity or 0.0 result = round(amount, prec) if not flag: result *= -1 return {'value': { 'amount': result, 'general_account_id': a, 'product_uom_id': unit } } def view_header_get(self, cr, user, view_id, view_type, context=None): if context is None: context = {} if context.get('account_id', False): # account_id in context may also be pointing to an account.account.id cr.execute('select name from account_analytic_account where id=%s', (context['account_id'],)) res = cr.fetchone() if res: res = _('Entries: ')+ (res[0] or '') return res return False account_analytic_line() class res_partner(osv.osv): """ Inherits partner and adds contract information in the partner form """ _inherit = 'res.partner' _columns = { 'contract_ids': fields.one2many('account.analytic.account', \ 'partner_id', 'Contracts', readonly=True), } res_partner() # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
-5,107,192,865,294,292,000
46.716981
233
0.580598
false
adamschmideg/mal
rpython/step2_eval.py
32
3030
#import sys, traceback import mal_readline import mal_types as types from mal_types import (MalSym, MalInt, MalStr, _keywordu, MalList, _list, MalVector, MalHashMap, MalFunc) import reader, printer # read def READ(str): return reader.read_str(str) # eval def eval_ast(ast, env): if types._symbol_Q(ast): assert isinstance(ast, MalSym) if ast.value in env: return env[ast.value] else: raise Exception(u"'" + ast.value + u"' not found") elif types._list_Q(ast): res = [] for a in ast.values: res.append(EVAL(a, env)) return MalList(res) elif types._vector_Q(ast): res = [] for a in ast.values: res.append(EVAL(a, env)) return MalVector(res) elif types._hash_map_Q(ast): new_dct = {} for k in ast.dct.keys(): new_dct[k] = EVAL(ast.dct[k], env) return MalHashMap(new_dct) else: return ast # primitive value, return unchanged def EVAL(ast, env): #print("EVAL %s" % printer._pr_str(ast)) if not types._list_Q(ast): return eval_ast(ast, env) # apply list el = eval_ast(ast, env) f = el.values[0] if isinstance(f, MalFunc): return f.apply(el.values[1:]) else: raise Exception("%s is not callable" % f) # print def PRINT(exp): return printer._pr_str(exp) # repl repl_env = {} def REP(str, env): return PRINT(EVAL(READ(str), env)) def plus(args): a, b = args[0], args[1] assert isinstance(a, MalInt) assert isinstance(b, MalInt) return MalInt(a.value+b.value) def minus(args): a, b = args[0], args[1] assert isinstance(a, MalInt) assert isinstance(b, MalInt) return MalInt(a.value-b.value) def multiply(args): a, b = args[0], args[1] assert isinstance(a, MalInt) assert isinstance(b, MalInt) return MalInt(a.value*b.value) def divide(args): a, b = args[0], args[1] assert isinstance(a, MalInt) assert isinstance(b, MalInt) return MalInt(int(a.value/b.value)) repl_env[u'+'] = MalFunc(plus) repl_env[u'-'] = MalFunc(minus) repl_env[u'*'] = MalFunc(multiply) repl_env[u'/'] = MalFunc(divide) def entry_point(argv): while True: try: line = mal_readline.readline("user> ") if line == "": continue print(REP(line, repl_env)) except EOFError as e: break except reader.Blank: continue except types.MalException as e: print(u"Error: %s" % printer._pr_str(e.object, False)) except Exception as e: print("Error: %s" % e) #print("".join(traceback.format_exception(*sys.exc_info()))) return 0 # _____ Define and setup target ___ def target(*args): return entry_point # Just run entry_point if not RPython compilation import sys if not sys.argv[0].endswith('rpython'): entry_point(sys.argv)
mpl-2.0
-4,262,228,240,801,030,700
26.545455
72
0.575248
false
angryrancor/kivy
kivy/core/clipboard/clipboard_pygame.py
39
1446
''' Clipboard Pygame: an implementation of the Clipboard using pygame.scrap. ''' __all__ = ('ClipboardPygame', ) from kivy.utils import platform from kivy.compat import PY2 from kivy.core.clipboard import ClipboardBase if platform not in ('win', 'linux', 'macosx'): raise SystemError('unsupported platform for pygame clipboard') try: import pygame import pygame.scrap except: raise class ClipboardPygame(ClipboardBase): _is_init = False _types = None _aliases = { 'text/plain;charset=utf-8': 'UTF8_STRING' } def init(self): if ClipboardPygame._is_init: return pygame.scrap.init() ClipboardPygame._is_init = True def get(self, mimetype='text/plain'): self.init() mimetype = self._aliases.get(mimetype, mimetype) text = pygame.scrap.get(mimetype) return text def put(self, data, mimetype='text/plain'): self.init() mimetype = self._aliases.get(mimetype, mimetype) pygame.scrap.put(mimetype, data) def get_types(self): if not self._types: self.init() types = pygame.scrap.get_types() for mime, pygtype in self._aliases.items()[:]: if mime in types: del self._aliases[mime] if pygtype in types: types.append(mime) self._types = types return self._types
mit
7,406,963,261,212,281,000
23.931034
72
0.594744
false
arista-eosplus/ansible
lib/ansible/modules/network/panos/panos_security_policy.py
39
16244
#!/usr/bin/python # -*- coding: utf-8 -*- # # Ansible module to manage PaloAltoNetworks Firewall # (c) 2016, techbizdev <[email protected]> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. ANSIBLE_METADATA = {'metadata_version': '1.0', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: panos_security_policy short_description: Create security rule policy on PanOS devices. description: - Security policies allow you to enforce rules and take action, and can be as general or specific as needed. The policy rules are compared against the incoming traffic in sequence, and because the first rule that matches the traffic is applied, the more specific rules must precede the more general ones. author: "Ivan Bojer (@ivanbojer)" version_added: "2.3" requirements: - pan-python can be obtained from PyPi U(https://pypi.python.org/pypi/pan-python) - pandevice can be obtained from PyPi U(https://pypi.python.org/pypi/pandevice) notes: - Checkmode is not supported. - Panorama is supported options: ip_address: description: - IP address (or hostname) of PAN-OS device being configured. required: true username: description: - Username credentials to use for auth unless I(api_key) is set. default: "admin" password: description: - Password credentials to use for auth unless I(api_key) is set. required: true api_key: description: - API key that can be used instead of I(username)/I(password) credentials. rule_name: description: - Name of the security rule. required: true rule_type: description: - Type of security rule (version 6.1 of PanOS and above). default: "universal" description: description: - Description for the security rule. default: "None" tag: description: - Administrative tags that can be added to the rule. Note, tags must be already defined. default: "None" from_zone: description: - List of source zones. default: "any" to_zone: description: - List of destination zones. default: "any" source: description: - List of source addresses. default: "any" source_user: description: - Use users to enforce policy for individual users or a group of users. default: "any" hip_profiles: description: > If you are using GlobalProtect with host information profile (HIP) enabled, you can also base the policy on information collected by GlobalProtect. For example, the user access level can be determined HIP that notifies the firewall about the user's local configuration. default: "any" destination: description: - List of destination addresses. default: "any" application: description: - List of applications. default: "any" service: description: - List of services. default: "application-default" log_start: description: - Whether to log at session start. default: false log_end: description: - Whether to log at session end. default: true action: description: - Action to apply once rules maches. default: "allow" group_profile: description: > Security profile group that is already defined in the system. This property supersedes antivirus, vulnerability, spyware, url_filtering, file_blocking, data_filtering, and wildfire_analysis properties. default: None antivirus: description: - Name of the already defined antivirus profile. default: None vulnerability: description: - Name of the already defined vulnerability profile. default: None spyware: description: - Name of the already defined spyware profile. default: None url_filtering: description: - Name of the already defined url_filtering profile. default: None file_blocking: description: - Name of the already defined file_blocking profile. default: None data_filtering: description: - Name of the already defined data_filtering profile. default: None wildfire_analysis: description: - Name of the already defined wildfire_analysis profile. default: None devicegroup: description: > Device groups are used for the Panorama interaction with Firewall(s). The group must exists on Panorama. If device group is not define we assume that we are contacting Firewall. default: None commit: description: - Commit configuration if changed. default: true ''' EXAMPLES = ''' - name: permit ssh to 1.1.1.1 panos_security_policy: ip_address: '10.5.172.91' username: 'admin' password: 'paloalto' rule_name: 'SSH permit' description: 'SSH rule test' from_zone: ['public'] to_zone: ['private'] source: ['any'] source_user: ['any'] destination: ['1.1.1.1'] category: ['any'] application: ['ssh'] service: ['application-default'] hip_profiles: ['any'] action: 'allow' commit: false - name: Allow HTTP multimedia only from CDNs panos_security_policy: ip_address: '10.5.172.91' username: 'admin' password: 'paloalto' rule_name: 'HTTP Multimedia' description: 'Allow HTTP multimedia only to host at 1.1.1.1' from_zone: ['public'] to_zone: ['private'] source: ['any'] source_user: ['any'] destination: ['1.1.1.1'] category: ['content-delivery-networks'] application: ['http-video', 'http-audio'] service: ['service-http', 'service-https'] hip_profiles: ['any'] action: 'allow' commit: false - name: more complex fictitious rule that uses profiles panos_security_policy: ip_address: '10.5.172.91' username: 'admin' password: 'paloalto' rule_name: 'Allow HTTP w profile' log_start: false log_end: true action: 'allow' antivirus: 'default' vulnerability: 'default' spyware: 'default' url_filtering: 'default' wildfire_analysis: 'default' commit: false - name: deny all panos_security_policy: ip_address: '10.5.172.91' username: 'admin' password: 'paloalto' rule_name: 'DenyAll' log_start: true log_end: true action: 'deny' rule_type: 'interzone' commit: false # permit ssh to 1.1.1.1 using panorama and pushing the configuration to firewalls # that are defined in 'DeviceGroupA' device group - name: permit ssh to 1.1.1.1 through Panorama panos_security_policy: ip_address: '10.5.172.92' password: 'paloalto' rule_name: 'SSH permit' description: 'SSH rule test' from_zone: ['public'] to_zone: ['private'] source: ['any'] source_user: ['any'] destination: ['1.1.1.1'] category: ['any'] application: ['ssh'] service: ['application-default'] hip_profiles: ['any'] action: 'allow' devicegroup: 'DeviceGroupA' ''' RETURN = ''' # Default return values ''' from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.basic import get_exception try: import pan.xapi from pan.xapi import PanXapiError import pandevice import pandevice.firewall import pandevice.panorama import pandevice.objects import pandevice.policies HAS_LIB = True except ImportError: HAS_LIB = False def security_rule_exists(device, rule_name): if isinstance(device, pandevice.firewall.Firewall): rule_base = pandevice.policies.Rulebase.refreshall(device) elif isinstance(device, pandevice.panorama.Panorama): # look for only pre-rulebase ATM rule_base = pandevice.policies.PreRulebase.refreshall(device) if rule_base: rule_base = rule_base[0] security_rules = rule_base.findall(pandevice.policies.SecurityRule) if security_rules: for r in security_rules: if r.name == rule_name: return True return False def create_security_rule(**kwargs): security_rule = pandevice.policies.SecurityRule( name=kwargs['rule_name'], description=kwargs['description'], tozone=kwargs['to_zone'], fromzone=kwargs['from_zone'], source=kwargs['source'], source_user=kwargs['source_user'], destination=kwargs['destination'], category=kwargs['category'], application=kwargs['application'], service=kwargs['service'], hip_profiles=kwargs['hip_profiles'], log_start=kwargs['log_start'], log_end=kwargs['log_end'], type=kwargs['rule_type'], action=kwargs['action']) if 'tag' in kwargs: security_rule.tag = kwargs['tag'] # profile settings if 'group_profile' in kwargs: security_rule.group = kwargs['group_profile'] else: if 'antivirus' in kwargs: security_rule.virus = kwargs['antivirus'] if 'vulnerability' in kwargs: security_rule.vulnerability = kwargs['vulnerability'] if 'spyware' in kwargs: security_rule.spyware = kwargs['spyware'] if 'url_filtering' in kwargs: security_rule.url_filtering = kwargs['url_filtering'] if 'file_blocking' in kwargs: security_rule.file_blocking = kwargs['file_blocking'] if 'data_filtering' in kwargs: security_rule.data_filtering = kwargs['data_filtering'] if 'wildfire_analysis' in kwargs: security_rule.wildfire_analysis = kwargs['wildfire_analysis'] return security_rule def add_security_rule(device, sec_rule): if isinstance(device, pandevice.firewall.Firewall): rule_base = pandevice.policies.Rulebase.refreshall(device) elif isinstance(device, pandevice.panorama.Panorama): # look for only pre-rulebase ATM rule_base = pandevice.policies.PreRulebase.refreshall(device) if rule_base: rule_base = rule_base[0] rule_base.add(sec_rule) sec_rule.create() return True else: return False def _commit(device, device_group=None): """ :param device: either firewall or panorama :param device_group: panorama device group or if none then 'all' :return: True if successful """ result = device.commit(sync=True) if isinstance(device, pandevice.panorama.Panorama): result = device.commit_all(sync=True, sync_all=True, devicegroup=device_group) return result def main(): argument_spec = dict( ip_address=dict(required=True), password=dict(no_log=True), username=dict(default='admin'), api_key=dict(no_log=True), rule_name=dict(required=True), description=dict(default=''), tag=dict(), to_zone=dict(type='list', default=['any']), from_zone=dict(type='list', default=['any']), source=dict(type='list', default=["any"]), source_user=dict(type='list', default=['any']), destination=dict(type='list', default=["any"]), category=dict(type='list', default=['any']), application=dict(type='list', default=['any']), service=dict(type='list', default=['application-default']), hip_profiles=dict(type='list', default=['any']), group_profile=dict(), antivirus=dict(), vulnerability=dict(), spyware=dict(), url_filtering=dict(), file_blocking=dict(), data_filtering=dict(), wildfire_analysis=dict(), log_start=dict(type='bool', default=False), log_end=dict(type='bool', default=True), rule_type=dict(default='universal'), action=dict(default='allow'), devicegroup=dict(), commit=dict(type='bool', default=True) ) module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=False, required_one_of=[['api_key', 'password']]) if not HAS_LIB: module.fail_json(msg='Missing required pan-python and pandevice modules.') ip_address = module.params["ip_address"] password = module.params["password"] username = module.params['username'] api_key = module.params['api_key'] rule_name = module.params['rule_name'] description = module.params['description'] tag = module.params['tag'] from_zone = module.params['from_zone'] to_zone = module.params['to_zone'] source = module.params['source'] source_user = module.params['source_user'] destination = module.params['destination'] category = module.params['category'] application = module.params['application'] service = module.params['service'] hip_profiles = module.params['hip_profiles'] log_start = module.params['log_start'] log_end = module.params['log_end'] rule_type = module.params['rule_type'] action = module.params['action'] group_profile = module.params['group_profile'] antivirus = module.params['antivirus'] vulnerability = module.params['vulnerability'] spyware = module.params['spyware'] url_filtering = module.params['url_filtering'] file_blocking = module.params['file_blocking'] data_filtering = module.params['data_filtering'] wildfire_analysis = module.params['wildfire_analysis'] devicegroup = module.params['devicegroup'] commit = module.params['commit'] if devicegroup: device = pandevice.panorama.Panorama(ip_address, username, password, api_key=api_key) dev_grps = device.refresh_devices() for grp in dev_grps: if grp.name == devicegroup: break module.fail_json(msg=' \'%s\' device group not found in Panorama. Is the name correct?' % devicegroup) else: device = pandevice.firewall.Firewall(ip_address, username, password, api_key=api_key) if security_rule_exists(device, rule_name): module.fail_json(msg='Rule with the same name already exists.') try: sec_rule = create_security_rule( rule_name=rule_name, description=description, tag=tag, from_zone=from_zone, to_zone=to_zone, source=source, source_user=source_user, destination=destination, category=category, application=application, service=service, hip_profiles=hip_profiles, group_profile=group_profile, antivirus=antivirus, vulnerability=vulnerability, spyware=spyware, url_filtering=url_filtering, file_blocking=file_blocking, data_filtering=data_filtering, wildfire_analysis=wildfire_analysis, log_start=log_start, log_end=log_end, rule_type=rule_type, action=action ) changed = add_security_rule(device, sec_rule) except PanXapiError: exc = get_exception() module.fail_json(msg=exc.message) if changed and commit: result = _commit(device, devicegroup) module.exit_json(changed=changed, msg="okey dokey") if __name__ == '__main__': main()
gpl-3.0
-9,135,327,369,128,480,000
31.816162
116
0.62854
false
amoikevin/gyp
test/win/gyptest-cl-enable-enhanced-instruction-set.py
52
1432
#!/usr/bin/env python # Copyright (c) 2014 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ Test VCCLCompilerTool EnableEnhancedInstructionSet setting. """ import TestGyp import os import sys if sys.platform == 'win32': test = TestGyp.TestGyp() CHDIR = 'compiler-flags' test.run_gyp('enable-enhanced-instruction-set.gyp', chdir=CHDIR) test.build('enable-enhanced-instruction-set.gyp', test.ALL, chdir=CHDIR) test.run_built_executable('sse_extensions', chdir=CHDIR, stdout='/arch:SSE\n') test.run_built_executable('sse2_extensions', chdir=CHDIR, stdout='/arch:SSE2\n') # /arch:AVX introduced in VS2010, but MSBuild support lagged until 2012. if os.path.exists(test.built_file_path('avx_extensions')): test.run_built_executable('avx_extensions', chdir=CHDIR, stdout='/arch:AVX\n') # /arch:IA32 introduced in VS2012. if os.path.exists(test.built_file_path('no_extensions')): test.run_built_executable('no_extensions', chdir=CHDIR, stdout='/arch:IA32\n') # /arch:AVX2 introduced in VS2013r2. if os.path.exists(test.built_file_path('avx2_extensions')): test.run_built_executable('avx2_extensions', chdir=CHDIR, stdout='/arch:AVX2\n') test.pass_test()
bsd-3-clause
6,768,219,745,631,797,000
31.545455
74
0.651536
false
mila-iqia/babyai
scripts/il_perf.py
1
2047
#!/usr/bin/env python3 import argparse import pandas import os import json import re import numpy as np from scipy import stats from babyai import plotting as bp parser = argparse.ArgumentParser("Analyze performance of imitation learning") parser.add_argument("--path", default='.', help="path to model logs") parser.add_argument("--regex", default='.*', help="filter out some logs") parser.add_argument("--other", default=None, help="path to model logs for ttest comparison") parser.add_argument("--other_regex", default='.*', help="filter out some logs from comparison") parser.add_argument("--window", type=int, default=100, help="size of sliding window average, 10 for GoToRedBallGrey, 100 otherwise") args = parser.parse_args() def get_data(path, regex): df = pandas.concat(bp.load_logs(path), sort=True) fps = bp.get_fps(df) models = df['model'].unique() models = [model for model in df['model'].unique() if re.match(regex, model)] maxes = [] for model in models: df_model = df[df['model'] == model] success_rate = df_model['validation_success_rate'] success_rate = success_rate.rolling(args.window, center=True).mean() success_rate = max(success_rate[np.logical_not(np.isnan(success_rate))]) print(model, success_rate) maxes.append(success_rate) return np.array(maxes), fps if args.other is not None: print("is this architecture better") print(args.regex) maxes, fps = get_data(args.path, args.regex) result = {'samples': len(maxes), 'mean': maxes.mean(), 'std': maxes.std(), 'fps_mean': fps.mean(), 'fps_std': fps.std()} print(result) if args.other is not None: print("\nthan this one") maxes_ttest, fps = get_data(args.other, args.other_regex) result = {'samples': len(maxes_ttest), 'mean': maxes_ttest.mean(), 'std': maxes_ttest.std(), 'fps_mean': fps.mean(), 'fps_std': fps.std()} print(result) ttest = stats.ttest_ind(maxes, maxes_ttest, equal_var=False) print(f"\n{ttest}")
bsd-3-clause
-5,121,100,608,454,657,000
32.557377
81
0.66341
false
boundlessgeo/QGIS
python/plugins/db_manager/db_plugins/postgis/plugins/versioning/__init__.py
32
2259
# -*- coding: utf-8 -*- """ /*************************************************************************** Name : Versioning plugin for DB Manager Description : Set up versioning support for a table Date : Mar 12, 2012 copyright : (C) 2012 by Giuseppe Sucameli email : [email protected] ***************************************************************************/ /*************************************************************************** * * * This program is free software; you can redistribute it and/or modify * * it under the terms of the GNU General Public License as published by * * the Free Software Foundation; either version 2 of the License, or * * (at your option) any later version. * * * ***************************************************************************/ """ from qgis.PyQt.QtCore import Qt from qgis.PyQt.QtWidgets import QAction, QApplication from qgis.PyQt.QtGui import QIcon # The load function is called when the "db" database or either one of its # children db objects (table o schema) is selected by the user. # @param db is the selected database # @param mainwindow is the DBManager mainwindow def load(db, mainwindow): # add the action to the DBManager menu action = QAction(QIcon(), QApplication.translate("DBManagerPlugin", "&Change Logging…"), db) mainwindow.registerAction(action, QApplication.translate("DBManagerPlugin", "&Table"), run) # The run function is called once the user clicks on the action TopoViewer # (look above at the load function) from the DBManager menu/toolbar. # @param item is the selected db item (either db, schema or table) # @param action is the clicked action on the DBManager menu/toolbar # @param mainwindow is the DBManager mainwindow def run(item, action, mainwindow): from .dlg_versioning import DlgVersioning dlg = DlgVersioning(item, mainwindow) QApplication.restoreOverrideCursor() try: dlg.exec_() finally: QApplication.setOverrideCursor(Qt.WaitCursor)
gpl-2.0
1,873,845,665,409,150,500
42.403846
96
0.551617
false
edum1978/eduengage
boilerplate/lib/basehandler.py
8
12971
# *-* coding: UTF-8 *-* # standard library imports import logging import re import traceback import sys # related third party imports import webapp2 from google.appengine.api.users import NotAllowedError from webapp2_extras import jinja2 from webapp2_extras import auth from webapp2_extras import sessions from google.appengine.api import taskqueue # local application/library specific imports from boilerplate import models from boilerplate.lib import utils, i18n from babel import Locale def user_required(handler): """ Decorator for checking if there's a user associated with the current session. Will also fail if there's no session present. """ def check_login(self, *args, **kwargs): """ If handler has no login_url specified invoke a 403 error """ try: auth = self.auth.get_user_by_session() if not auth: try: self.auth_config['login_url'] = self.uri_for('login', continue_url=self.request.path) self.redirect(self.auth_config['login_url'], abort=True) except (AttributeError, KeyError), e: self.abort(403) else: return handler(self, *args, **kwargs) except AttributeError, e: # avoid AttributeError when the session was delete from the server logging.error(e) self.auth.unset_session() self.redirect_to('home') return check_login def generate_csrf_token(): session = sessions.get_store().get_session() if '_csrf_token' not in session: session['_csrf_token'] = utils.random_string() return session['_csrf_token'] def jinja2_factory(app): j = jinja2.Jinja2(app) j.environment.filters.update({ # Set filters. # ... }) j.environment.globals.update({ # Set global variables. 'csrf_token' : generate_csrf_token, 'uri_for': webapp2.uri_for, 'getattr': getattr, 'str': str }) j.environment.tests.update({ # Set test. # ... }) return j def handle_error(request, response, exception): exc_type, exc_value, exc_tb = sys.exc_info() c = { 'exception': str(exception), 'url': request.url, } if request.app.config.get('send_mail_developer') is not False: # send email subject = "[{}] ERROR {}".format(request.app.config.get('environment').upper(), request.app.config.get('app_name')) lines = traceback.format_exception(exc_type, exc_value, exc_tb) message = '<strong>Type:</strong> ' + exc_type.__name__ + "<br />" + \ '<strong>Description:</strong> ' + c['exception'] + "<br />" + \ '<strong>URL:</strong> ' + c['url'] + "<br />" + \ '<strong>Traceback:</strong> <br />' + '<br />'.join(lines) email_body_path = "emails/error.txt" if c['exception'] is not 'Error saving Email Log in datastore': template_val = { "app_name" : request.app.config.get('app_name'), "message" : message, } email_body = jinja2.get_jinja2(factory=jinja2_factory, app=webapp2.get_app()).render_template(email_body_path, **template_val) email_url = webapp2.uri_for('taskqueue-send-email') for dev in request.app.config.get('developers'): taskqueue.add(url = email_url, params={ 'to': dev[1], 'subject' : subject, 'body' : email_body, 'sender' : request.app.config.get('contact_sender'), }) status_int = hasattr(exception, 'status_int') and exception.status_int or 500 template = request.app.config.get('error_templates')[status_int] t = jinja2.get_jinja2(factory=jinja2_factory, app=webapp2.get_app()).render_template(template, **c) logging.error(str(status_int) + " - " + str(exception)) response.write(t) response.set_status(status_int) class ViewClass: """ ViewClass to insert variables into the template. ViewClass is used in BaseHandler to promote variables automatically that can be used in jinja2 templates. Use case in a BaseHandler Class: self.view.var1 = "hello" self.view.array = [1, 2, 3] self.view.dict = dict(a="abc", b="bcd") Can be accessed in the template by just using the variables liek {{var1}} or {{dict.b}} """ pass class BaseHandler(webapp2.RequestHandler): """ BaseHandler for all requests Holds the auth and session properties so they are reachable for all requests """ def __init__(self, request, response): """ Override the initialiser in order to set the language. """ self.initialize(request, response) self.locale = i18n.set_locale(self) self.view = ViewClass() def dispatch(self): """ Get a session store for this request. """ self.session_store = sessions.get_store(request=self.request) try: # csrf protection if self.request.method == "POST" and not self.request.path.startswith('/taskqueue'): token = self.session.get('_csrf_token') if not token or token != self.request.get('_csrf_token'): self.abort(403) # Dispatch the request. webapp2.RequestHandler.dispatch(self) finally: # Save all sessions. self.session_store.save_sessions(self.response) @webapp2.cached_property def auth(self): return auth.get_auth() @webapp2.cached_property def session_store(self): return sessions.get_store(request=self.request) @webapp2.cached_property def session(self): # Returns a session using the default cookie key. return self.session_store.get_session() @webapp2.cached_property def messages(self): return self.session.get_flashes(key='_messages') def add_message(self, message, level=None): self.session.add_flash(message, level, key='_messages') @webapp2.cached_property def auth_config(self): """ Dict to hold urls for login/logout """ return { 'login_url': self.uri_for('login'), 'logout_url': self.uri_for('logout') } @webapp2.cached_property def language(self): return str(Locale.parse(self.locale).language) @webapp2.cached_property def user(self): return self.auth.get_user_by_session() @webapp2.cached_property def user_id(self): return str(self.user['user_id']) if self.user else None @webapp2.cached_property def user_key(self): if self.user: user_info = models.User.get_by_id(long(self.user_id)) return user_info.key return None @webapp2.cached_property def username(self): if self.user: try: user_info = models.User.get_by_id(long(self.user_id)) return str(user_info.username) except AttributeError, e: # avoid AttributeError when the session was delete from the server logging.error(e) self.auth.unset_session() self.redirect_to('home') return None @webapp2.cached_property def email(self): if self.user: try: user_info = models.User.get_by_id(long(self.user_id)) return user_info.email except AttributeError, e: # avoid AttributeError when the session was delete from the server logging.error(e) self.auth.unset_session() self.redirect_to('home') return None @webapp2.cached_property def provider_uris(self): login_urls = {} continue_url = self.request.get('continue_url') for provider in self.provider_info: if continue_url: login_url = self.uri_for("social-login", provider_name=provider, continue_url=continue_url) else: login_url = self.uri_for("social-login", provider_name=provider) login_urls[provider] = login_url return login_urls @webapp2.cached_property def provider_info(self): return models.SocialUser.PROVIDERS_INFO @webapp2.cached_property def path_for_language(self): """ Get the current path + query_string without language parameter (hl=something) Useful to put it on a template to concatenate with '&hl=NEW_LOCALE' Example: .../?hl=en_US """ path_lang = re.sub(r'(^hl=(\w{5})\&*)|(\&hl=(\w{5})\&*?)', '', str(self.request.query_string)) return self.request.path + "?" if path_lang == "" else str(self.request.path) + "?" + path_lang @property def locales(self): """ returns a dict of locale codes to locale display names in both the current locale and the localized locale example: if the current locale is es_ES then locales['en_US'] = 'Ingles (Estados Unidos) - English (United States)' """ if not self.app.config.get('locales'): return None locales = {} for l in self.app.config.get('locales'): current_locale = Locale.parse(self.locale) language = current_locale.languages[l.split('_')[0]] territory = current_locale.territories[l.split('_')[1]] localized_locale_name = Locale.parse(l).display_name.capitalize() locales[l] = language.capitalize() + " (" + territory.capitalize() + ") - " + localized_locale_name return locales @webapp2.cached_property def is_mobile(self): return utils.set_device_cookie_and_return_bool(self) @webapp2.cached_property def jinja2(self): return jinja2.get_jinja2(factory=jinja2_factory, app=self.app) @webapp2.cached_property def get_base_layout(self): """ Get the current base layout template for jinja2 templating. Uses the variable base_layout set in config or if there is a base_layout defined, use the base_layout. """ return self.base_layout if hasattr(self, 'base_layout') else self.app.config.get('base_layout') def set_base_layout(self, layout): """ Set the base_layout variable, thereby overwriting the default layout template name in config.py. """ self.base_layout = layout def render_template(self, filename, **kwargs): locales = self.app.config.get('locales') or [] locale_iso = None language = '' territory = '' language_id = self.app.config.get('app_lang') if self.locale and len(locales) > 1: locale_iso = Locale.parse(self.locale) language_id = locale_iso.language territory_id = locale_iso.territory language = locale_iso.languages[language_id] territory = locale_iso.territories[territory_id] # make all self.view variables available in jinja2 templates if hasattr(self, 'view'): kwargs.update(self.view.__dict__) # set or overwrite special vars for jinja templates kwargs.update({ 'google_analytics_domain' : self.app.config.get('google_analytics_domain'), 'google_analytics_code' : self.app.config.get('google_analytics_code'), 'app_name': self.app.config.get('app_name'), 'user_id': self.user_id, 'username': self.username, 'email': self.email, 'url': self.request.url, 'path': self.request.path, 'query_string': self.request.query_string, 'path_for_language': self.path_for_language, 'is_mobile': self.is_mobile, 'locale_iso': locale_iso, # babel locale object 'locale_language': language.capitalize() + " (" + territory.capitalize() + ")", # babel locale object 'locale_language_id': language_id, # babel locale object 'locales': self.locales, 'provider_uris': self.provider_uris, 'provider_info': self.provider_info, 'enable_federated_login': self.app.config.get('enable_federated_login'), 'base_layout': self.get_base_layout }) kwargs.update(self.auth_config) if hasattr(self, 'form'): kwargs['form'] = self.form if self.messages: kwargs['messages'] = self.messages self.response.headers.add_header('X-UA-Compatible', 'IE=Edge,chrome=1') self.response.write(self.jinja2.render_template(filename, **kwargs))
lgpl-3.0
-6,065,742,665,887,119,000
35.540845
138
0.587464
false
CloverHealth/airflow
airflow/example_dags/example_passing_params_via_test_command.py
14
2351
# -*- coding: utf-8 -*- # # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from datetime import timedelta import airflow from airflow import DAG from airflow.operators.bash_operator import BashOperator from airflow.operators.python_operator import PythonOperator dag = DAG("example_passing_params_via_test_command", default_args={"owner": "airflow", "start_date": airflow.utils.dates.days_ago(1)}, schedule_interval='*/1 * * * *', dagrun_timeout=timedelta(minutes=4) ) def my_py_command(ds, **kwargs): # Print out the "foo" param passed in via # `airflow test example_passing_params_via_test_command run_this <date> # -tp '{"foo":"bar"}'` if kwargs["test_mode"]: print(" 'foo' was passed in via test={} command : kwargs[params][foo] \ = {}".format(kwargs["test_mode"], kwargs["params"]["foo"])) # Print out the value of "miff", passed in below via the Python Operator print(" 'miff' was passed in via task params = {}".format(kwargs["params"]["miff"])) return 1 my_templated_command = """ echo " 'foo was passed in via Airflow CLI Test command with value {{ params.foo }} " echo " 'miff was passed in via BashOperator with value {{ params.miff }} " """ run_this = PythonOperator( task_id='run_this', provide_context=True, python_callable=my_py_command, params={"miff": "agg"}, dag=dag) also_run_this = BashOperator( task_id='also_run_this', bash_command=my_templated_command, params={"miff": "agg"}, dag=dag) also_run_this.set_upstream(run_this)
apache-2.0
-8,027,758,272,358,939,000
35.169231
88
0.680987
false
Luobiny/bioconda-recipes
recipes/mtnucratio/mtnucratio.py
20
2667
#!/usr/bin/env python # # Wrapper script for Java Conda packages that ensures that the java runtime # is invoked with the right options. Adapted from the bash script (http://stackoverflow.com/questions/59895/can-a-bash-script-tell-what-directory-its-stored-in/246128#246128). # # # Program Parameters # import os import sys import subprocess from os import access, getenv, X_OK jar_file = 'MTNucRatioCalculator-0.7.jar' default_jvm_mem_opts = ['-Xms512m', '-Xmx1g'] # !!! End of parameter section. No user-serviceable code below this line !!! def real_dirname(path): """Return the symlink-resolved, canonicalized directory-portion of path.""" return os.path.dirname(os.path.realpath(path)) def java_executable(): """Return the executable name of the Java interpreter.""" java_home = getenv('JAVA_HOME') java_bin = os.path.join('bin', 'java') if java_home and access(os.path.join(java_home, java_bin), X_OK): return os.path.join(java_home, java_bin) else: return 'java' def jvm_opts(argv): """Construct list of Java arguments based on our argument list. The argument list passed in argv must not include the script name. The return value is a 3-tuple lists of strings of the form: (memory_options, prop_options, passthrough_options) """ mem_opts = [] prop_opts = [] pass_args = [] for arg in argv: if arg.startswith('-D'): prop_opts.append(arg) elif arg.startswith('-XX'): prop_opts.append(arg) elif arg.startswith('-Xm'): mem_opts.append(arg) else: pass_args.append(arg) # In the original shell script the test coded below read: # if [ "$jvm_mem_opts" == "" ] && [ -z ${_JAVA_OPTIONS+x} ] # To reproduce the behaviour of the above shell code fragment # it is important to explictly check for equality with None # in the second condition, so a null envar value counts as True! if mem_opts == [] and getenv('_JAVA_OPTIONS') == None: mem_opts = default_jvm_mem_opts return (mem_opts, prop_opts, pass_args) def main(): java = java_executable() jar_dir = real_dirname(sys.argv[0]) (mem_opts, prop_opts, pass_args) = jvm_opts(sys.argv[1:]) if pass_args != [] and pass_args[0].startswith('eu'): jar_arg = '-cp' else: jar_arg = '-jar' jar_path = os.path.join(jar_dir, jar_file) java_args = [java]+ mem_opts + prop_opts + [jar_arg] + [jar_path] + pass_args if '--jar_dir' in sys.argv[1:]: print(jar_path) else: sys.exit(subprocess.call(java_args)) if __name__ == '__main__': main()
mit
-8,165,809,715,266,996,000
28.633333
175
0.633296
false
ahamilton55/ansible
lib/ansible/modules/network/cloudengine/ce_netconf.py
46
5922
#!/usr/bin/python # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # ANSIBLE_METADATA = {'status': ['preview'], 'supported_by': 'community', 'metadata_version': '1.0'} DOCUMENTATION = ''' --- module: ce_netconf version_added: "2.4" short_description: Run an arbitrary netconf command on HUAWEI CloudEngine switches. description: - Sends an arbitrary netconf command on HUAWEI CloudEngine switches. author: - wangdezhuang (@CloudEngine-Ansible) options: rpc: description: - The type of rpc. required: true choices: ['get', 'edit-config', 'execute-action', 'execute-cli'] cfg_xml: description: - The config xml string. required: true ''' EXAMPLES = ''' - name: CloudEngine netconf test hosts: cloudengine connection: local gather_facts: no vars: cli: host: "{{ inventory_hostname }}" port: "{{ ansible_ssh_port }}" username: "{{ username }}" password: "{{ password }}" transport: cli tasks: - name: "Netconf get operation" ce_netconf: rpc: get cfg_xml: '<filter type=\"subtree\"> <vlan xmlns=\"http://www.huawei.com/netconf/vrp\" content-version=\"1.0\" format-version=\"1.0\"> <vlans> <vlan> <vlanId>10</vlanId> <vlanif> <ifName></ifName> <cfgBand></cfgBand> <dampTime></dampTime> </vlanif> </vlan> </vlans> </vlan> </filter>' provider: "{{ cli }}" - name: "Netconf edit-config operation" ce_netconf: rpc: edit-config cfg_xml: '<config> <aaa xmlns=\"http://www.huawei.com/netconf/vrp\" content-version=\"1.0\" format-version=\"1.0\"> <authenticationSchemes> <authenticationScheme operation=\"create\"> <authenSchemeName>default_wdz</authenSchemeName> <firstAuthenMode>local</firstAuthenMode> <secondAuthenMode>invalid</secondAuthenMode> </authenticationScheme> </authenticationSchemes> </aaa> </config>' provider: "{{ cli }}" - name: "Netconf execute-action operation" ce_netconf: rpc: execute-action cfg_xml: '<action> <l2mc xmlns=\"http://www.huawei.com/netconf/vrp\" content-version=\"1.0\" format-version=\"1.0\"> <l2McResetAllVlanStatis> <addrFamily>ipv4unicast</addrFamily> </l2McResetAllVlanStatis> </l2mc> </action>' provider: "{{ cli }}" ''' RETURN = ''' changed: description: check to see if a change was made on the device returned: always type: boolean sample: true end_state: description: k/v pairs of aaa params after module execution returned: always type: dict sample: {"result": ["ok"]} ''' from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.ce import get_nc_config, set_nc_config from ansible.module_utils.ce import execute_nc_action, ce_argument_spec, execute_nc_cli def main(): """ main """ argument_spec = dict( rpc=dict(choices=['get', 'edit-config', 'execute-action', 'execute-cli'], required=True), cfg_xml=dict(required=True) ) argument_spec.update(ce_argument_spec) module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=True) rpc = module.params['rpc'] cfg_xml = module.params['cfg_xml'] changed = False end_state = dict() if rpc == "get": response = get_nc_config(module, cfg_xml) if "<data/>" in response: end_state["result"] = "<data/>" else: tmp1 = response.split(r"<data>") tmp2 = tmp1[1].split(r"</data>") result = tmp2[0].split("\n") end_state["result"] = result elif rpc == "edit-config": response = set_nc_config(module, cfg_xml) if "<ok/>" not in response: module.fail_json(msg='rpc edit-config failed.') changed = True end_state["result"] = "ok" elif rpc == "execute-action": response = execute_nc_action(module, cfg_xml) if "<ok/>" not in response: module.fail_json(msg='rpc execute-action failed.') changed = True end_state["result"] = "ok" elif rpc == "execute-cli": response = execute_nc_cli(module, cfg_xml) if "<data/>" in response: end_state["result"] = "<data/>" else: tmp1 = response.xml.split(r"<data>") tmp2 = tmp1[1].split(r"</data>") result = tmp2[0].split("\n") end_state["result"] = result else: module.fail_json(msg='please input correct rpc.') results = dict() results['changed'] = changed results['end_state'] = end_state module.exit_json(**results) if __name__ == '__main__': main()
gpl-3.0
-1,764,544,194,913,864,000
28.61
118
0.553698
false
OpenBfS/dokpool-plone
Plone/src/elan.sitrep/elan/sitrep/content/srmoduleconfig.py
1
3765
# -*- coding: utf-8 -*- # # File: srmoduleconfig.py # # Copyright (c) 2017 by Condat AG # Generator: ConPD2 # http://www.condat.de # __author__ = '' __docformat__ = 'plaintext' """Definition of the SRModuleConfig content type. See srmoduleconfig.py for more explanation on the statements below. """ from AccessControl import ClassSecurityInfo from elan.sitrep import DocpoolMessageFactory as _ from plone.autoform import directives from plone.dexterity.content import Item from plone.dexterity.interfaces import IEditFinishedEvent from plone.supermodel import model from Products.CMFCore.utils import getToolByName from Products.CMFPlone.utils import log from z3c.relationfield.schema import RelationChoice from z3c.relationfield.schema import RelationList from zope import schema from zope.component import adapter from zope.interface import implementer class ISRModuleConfig(model.Schema): """ """ modType = schema.Choice( title=_(u'label_srmoduleconfig_modtype', default=u'Module Type'), description=_(u'description_srmoduleconfig_modtype', default=u''), required=True, source="elan.sitrep.vocabularies.ModuleTypes", ) docSelection = RelationChoice( title=_( u'label_srmoduleconfig_docselection', default=u'Collection for relevant documents', ), description=_( u'description_srmoduleconfig_docselection', default=u'This collection defines a pre-selection of possible documents to reference within this module.', ), required=False, source="elan.sitrep.vocabularies.Collections", ) textBlocks = RelationList( title=_(u'label_srmoduleconfig_textblocks', default=u'Text Blocks'), description=_(u'description_srmoduleconfig_textblocks', default=u''), required=False, value_type=RelationChoice( title=_("Text Blocks"), source="elan.sitrep.vocabularies.TextBlocks" ), ) defaultTextBlocks = RelationList( title=_( u'label_srmoduletype_defaulttextblocks', default=u'Default Text (when freshly created)', ), description=_( u'description_srmoduletype_defaulttextblocks', default=u''), required=False, value_type=RelationChoice( title=_("Default Text"), source="elan.sitrep.vocabularies.TextBlocks" ), ) directives.widget(docSelection='z3c.form.browser.select.SelectFieldWidget') directives.widget( textBlocks='z3c.form.browser.select.CollectionSelectFieldWidget') directives.widget( defaultTextBlocks='z3c.form.browser.select.CollectionSelectFieldWidget') @implementer(ISRModuleConfig) class SRModuleConfig(Item): """ """ security = ClassSecurityInfo() def getSRModuleNames(self): """ Index Method """ return [self.modType] def getSRModuleRefs(self): """ Index Method """ return [self.UID()] def currentDocuments(self): """ Return the documents from the referenced collection - if any. """ if self.docSelection: coll = self.docSelection.to_object return coll.results(batch=False) else: return [] def currentTextBlocks(self): """ """ return [tb.to_object for tb in (self.textBlocks or [])] @adapter(ISRModuleConfig, IEditFinishedEvent) def updated(obj, event=None): log("SRModuleConfig updated: %s" % str(obj)) sr_cat = getToolByName(obj, "sr_catalog") sr_cat._reindexObject(obj) if obj.textBlocks: for tb in obj.textBlocks: sr_cat._reindexObject(tb.to_object)
gpl-3.0
1,140,776,488,611,942,300
28.645669
118
0.656574
false
safwanrahman/mozillians
vendor-local/lib/python/unidecode/x0fa.py
252
4406
data = ( 'Chey ', # 0x00 'Thak ', # 0x01 'Thak ', # 0x02 'Thang ', # 0x03 'Thayk ', # 0x04 'Thong ', # 0x05 'Pho ', # 0x06 'Phok ', # 0x07 'Hang ', # 0x08 'Hang ', # 0x09 'Hyen ', # 0x0a 'Hwak ', # 0x0b 'Wu ', # 0x0c 'Huo ', # 0x0d '[?] ', # 0x0e '[?] ', # 0x0f 'Zhong ', # 0x10 '[?] ', # 0x11 'Qing ', # 0x12 '[?] ', # 0x13 '[?] ', # 0x14 'Xi ', # 0x15 'Zhu ', # 0x16 'Yi ', # 0x17 'Li ', # 0x18 'Shen ', # 0x19 'Xiang ', # 0x1a 'Fu ', # 0x1b 'Jing ', # 0x1c 'Jing ', # 0x1d 'Yu ', # 0x1e '[?] ', # 0x1f 'Hagi ', # 0x20 '[?] ', # 0x21 'Zhu ', # 0x22 '[?] ', # 0x23 '[?] ', # 0x24 'Yi ', # 0x25 'Du ', # 0x26 '[?] ', # 0x27 '[?] ', # 0x28 '[?] ', # 0x29 'Fan ', # 0x2a 'Si ', # 0x2b 'Guan ', # 0x2c '[?]', # 0x2d '[?]', # 0x2e '[?]', # 0x2f '[?]', # 0x30 '[?]', # 0x31 '[?]', # 0x32 '[?]', # 0x33 '[?]', # 0x34 '[?]', # 0x35 '[?]', # 0x36 '[?]', # 0x37 '[?]', # 0x38 '[?]', # 0x39 '[?]', # 0x3a '[?]', # 0x3b '[?]', # 0x3c '[?]', # 0x3d '[?]', # 0x3e '[?]', # 0x3f '[?]', # 0x40 '[?]', # 0x41 '[?]', # 0x42 '[?]', # 0x43 '[?]', # 0x44 '[?]', # 0x45 '[?]', # 0x46 '[?]', # 0x47 '[?]', # 0x48 '[?]', # 0x49 '[?]', # 0x4a '[?]', # 0x4b '[?]', # 0x4c '[?]', # 0x4d '[?]', # 0x4e '[?]', # 0x4f '[?]', # 0x50 '[?]', # 0x51 '[?]', # 0x52 '[?]', # 0x53 '[?]', # 0x54 '[?]', # 0x55 '[?]', # 0x56 '[?]', # 0x57 '[?]', # 0x58 '[?]', # 0x59 '[?]', # 0x5a '[?]', # 0x5b '[?]', # 0x5c '[?]', # 0x5d '[?]', # 0x5e '[?]', # 0x5f '[?]', # 0x60 '[?]', # 0x61 '[?]', # 0x62 '[?]', # 0x63 '[?]', # 0x64 '[?]', # 0x65 '[?]', # 0x66 '[?]', # 0x67 '[?]', # 0x68 '[?]', # 0x69 '[?]', # 0x6a '[?]', # 0x6b '[?]', # 0x6c '[?]', # 0x6d '[?]', # 0x6e '[?]', # 0x6f '[?]', # 0x70 '[?]', # 0x71 '[?]', # 0x72 '[?]', # 0x73 '[?]', # 0x74 '[?]', # 0x75 '[?]', # 0x76 '[?]', # 0x77 '[?]', # 0x78 '[?]', # 0x79 '[?]', # 0x7a '[?]', # 0x7b '[?]', # 0x7c '[?]', # 0x7d '[?]', # 0x7e '[?]', # 0x7f '[?]', # 0x80 '[?]', # 0x81 '[?]', # 0x82 '[?]', # 0x83 '[?]', # 0x84 '[?]', # 0x85 '[?]', # 0x86 '[?]', # 0x87 '[?]', # 0x88 '[?]', # 0x89 '[?]', # 0x8a '[?]', # 0x8b '[?]', # 0x8c '[?]', # 0x8d '[?]', # 0x8e '[?]', # 0x8f '[?]', # 0x90 '[?]', # 0x91 '[?]', # 0x92 '[?]', # 0x93 '[?]', # 0x94 '[?]', # 0x95 '[?]', # 0x96 '[?]', # 0x97 '[?]', # 0x98 '[?]', # 0x99 '[?]', # 0x9a '[?]', # 0x9b '[?]', # 0x9c '[?]', # 0x9d '[?]', # 0x9e '[?]', # 0x9f '[?]', # 0xa0 '[?]', # 0xa1 '[?]', # 0xa2 '[?]', # 0xa3 '[?]', # 0xa4 '[?]', # 0xa5 '[?]', # 0xa6 '[?]', # 0xa7 '[?]', # 0xa8 '[?]', # 0xa9 '[?]', # 0xaa '[?]', # 0xab '[?]', # 0xac '[?]', # 0xad '[?]', # 0xae '[?]', # 0xaf '[?]', # 0xb0 '[?]', # 0xb1 '[?]', # 0xb2 '[?]', # 0xb3 '[?]', # 0xb4 '[?]', # 0xb5 '[?]', # 0xb6 '[?]', # 0xb7 '[?]', # 0xb8 '[?]', # 0xb9 '[?]', # 0xba '[?]', # 0xbb '[?]', # 0xbc '[?]', # 0xbd '[?]', # 0xbe '[?]', # 0xbf '[?]', # 0xc0 '[?]', # 0xc1 '[?]', # 0xc2 '[?]', # 0xc3 '[?]', # 0xc4 '[?]', # 0xc5 '[?]', # 0xc6 '[?]', # 0xc7 '[?]', # 0xc8 '[?]', # 0xc9 '[?]', # 0xca '[?]', # 0xcb '[?]', # 0xcc '[?]', # 0xcd '[?]', # 0xce '[?]', # 0xcf '[?]', # 0xd0 '[?]', # 0xd1 '[?]', # 0xd2 '[?]', # 0xd3 '[?]', # 0xd4 '[?]', # 0xd5 '[?]', # 0xd6 '[?]', # 0xd7 '[?]', # 0xd8 '[?]', # 0xd9 '[?]', # 0xda '[?]', # 0xdb '[?]', # 0xdc '[?]', # 0xdd '[?]', # 0xde '[?]', # 0xdf '[?]', # 0xe0 '[?]', # 0xe1 '[?]', # 0xe2 '[?]', # 0xe3 '[?]', # 0xe4 '[?]', # 0xe5 '[?]', # 0xe6 '[?]', # 0xe7 '[?]', # 0xe8 '[?]', # 0xe9 '[?]', # 0xea '[?]', # 0xeb '[?]', # 0xec '[?]', # 0xed '[?]', # 0xee '[?]', # 0xef '[?]', # 0xf0 '[?]', # 0xf1 '[?]', # 0xf2 '[?]', # 0xf3 '[?]', # 0xf4 '[?]', # 0xf5 '[?]', # 0xf6 '[?]', # 0xf7 '[?]', # 0xf8 '[?]', # 0xf9 '[?]', # 0xfa '[?]', # 0xfb '[?]', # 0xfc '[?]', # 0xfd '[?]', # 0xfe )
bsd-3-clause
-917,433,629,175,011,100
16.143969
19
0.258284
false
camptocamp/odoo
addons/crm_claim/__init__.py
390
1078
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## import crm_claim import report # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
-5,220,181,429,007,546,000
40.461538
78
0.616883
false
ptisserand/docker-registry
tests/test_s3.py
30
6105
# -*- coding: utf-8 -*- import StringIO import sys import time from nose import tools from docker_registry.core import exceptions import docker_registry.testing as testing from docker_registry.testing import mock_boto # noqa from . import mock_s3 # noqa class StringIOWithError(StringIO.StringIO): '''Throw IOError after reaching EOF.''' def read(self, size): if self.pos == self.len: raise IOError('Reading beyond EOF') return StringIO.StringIO.read(self, size) class TestDriver(testing.Driver): '''Extra tests for coverage completion.''' def __init__(self): self.scheme = 's3' self.path = '' self.config = testing.Config({}) def tearDown(self): self._storage._boto_bucket.delete() super(TestDriver, self).tearDown() @tools.raises(exceptions.FileNotFoundError) def test_list_bucket(self): # Add a couple of bucket keys filename1 = self.gen_random_string() filename2 = self.gen_random_string() content = self.gen_random_string() self._storage.put_content(filename1, content) # Check bucket key is stored in normalized form self._storage.put_content(filename2 + '/', content) # Check both keys are in the bucket assert sorted([filename1, filename2]) == sorted( list(self._storage.list_directory())) # Check listing bucket raises exception after removing keys self._storage.remove(filename1) self._storage.remove(filename2) s = self._storage.list_directory() s.next() def test_stream_write(self): # Check stream write with buffer bigger than default 5MB self._storage.buffer_size = 7 * 1024 * 1024 filename = self.gen_random_string() # Test 8MB content = self.gen_random_string(8 * 1024 * 1024) io = StringIOWithError(content) assert not self._storage.exists(filename) try: self._storage.stream_write(filename, io) except IOError: pass assert self._storage.exists(filename) # Test that EOFed io string throws IOError on lib/storage/s3 try: self._storage.stream_write(filename, io) except IOError: pass # Cleanup io.close() self._storage.remove(filename) self._storage.buffer_size = 5 * 1024 * 1024 assert not self._storage.exists(filename) def test_init_path(self): # s3 storage _init_path result keys are relative (no / at start) root_path = self._storage._root_path if root_path.startswith('/'): self._storage._root_path = root_path[1:] assert not self._storage._init_path().startswith('/') self._storage._root_path = root_path def test_debug_key(self): # Create a valid s3 key object to debug filename = self.gen_random_string() content = self.gen_random_string() self._storage.put_content(filename, content) # Get filename key path as stored key_path = self._storage._init_path(filename) key = self._storage._boto_bucket.lookup(key_path) self._storage._debug_key(key) # Capture debugged output saved_stdout = sys.stdout output = StringIO.StringIO() sys.stdout = output # As key is mocked for unittest purposes, we call make_request directly dummy = "################\n('d', 1)\n{'v': 2}\n################\n" # '{}\n{}\n{}\n{}\n'.format( # '#' * 16, ('d', 1), {'v': 2}, '#' * 16) result = self._storage._boto_bucket.connection.make_request( 'd', 1, v=2) assert output.getvalue() == dummy assert result == 'request result' sys.stdout = saved_stdout # We don't call self._storage.remove(filename) here to ensure tearDown # cleanup properly and that other tests keep running as expected. # Validation test for docker-index#486 def test_get_tags(self): store = self._storage store._root_path = 'my/custom/path' store._init_path() assert store._root_path == 'my/custom/path' tag_path = store.tag_path('test', 'test', '0.0.2') store.put_content(tag_path, 'randomdata') tags_path = store.tag_path('test', 'test') for fname in store.list_directory(tags_path): full_tag_name = fname.split('/').pop() if not full_tag_name == 'tag_0.0.2': continue try: store.get_content(fname) except exceptions.FileNotFoundError: pass except Exception as e: raise e else: assert False tag_content = store.get_content(tag_path) assert tag_content == 'randomdata' def test_consistency_latency(self): self.testCount = -1 mockKey = mock_boto.Key() def mockExists(): self.testCount += 1 return self.testCount == 1 mockKey.exists = mockExists mockKey.get_contents_as_string = lambda: "Foo bar" self._storage.makeKey = lambda x: mockKey startTime = time.time() content = self._storage.get_content("/FOO") waitTime = time.time() - startTime assert waitTime >= 0.1, ("Waiting time was less than %sms " "(actual : %sms)" % (0.1 * 1000, waitTime * 1000)) assert content == "Foo bar", ("expected : %s; actual: %s" % ("Foo bar", content)) @tools.raises(exceptions.FileNotFoundError) def test_too_many_read_retries(self): self.testCount = -1 mockKey = mock_boto.Key() def mockExists(): self.testCount += 1 return self.testCount == 5 mockKey.exists = mockExists mockKey.get_contents_as_string = lambda: "Foo bar" self._storage.makeKey = lambda x: mockKey self._storage.get_content("/FOO")
apache-2.0
-4,708,033,643,386,103,000
33.885714
79
0.57887
false
MoritzS/django
django/utils/archive.py
52
7462
""" Based on "python-archive" -- http://pypi.python.org/pypi/python-archive/ Copyright (c) 2010 Gary Wilson Jr. <[email protected]> and contributors. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ import os import shutil import stat import tarfile import zipfile class ArchiveException(Exception): """ Base exception class for all archive errors. """ class UnrecognizedArchiveFormat(ArchiveException): """ Error raised when passed file is not a recognized archive format. """ def extract(path, to_path=''): """ Unpack the tar or zip file at the specified path to the directory specified by to_path. """ with Archive(path) as archive: archive.extract(to_path) class Archive: """ The external API class that encapsulates an archive implementation. """ def __init__(self, file): self._archive = self._archive_cls(file)(file) @staticmethod def _archive_cls(file): cls = None if isinstance(file, str): filename = file else: try: filename = file.name except AttributeError: raise UnrecognizedArchiveFormat( "File object not a recognized archive format.") base, tail_ext = os.path.splitext(filename.lower()) cls = extension_map.get(tail_ext) if not cls: base, ext = os.path.splitext(base) cls = extension_map.get(ext) if not cls: raise UnrecognizedArchiveFormat( "Path not a recognized archive format: %s" % filename) return cls def __enter__(self): return self def __exit__(self, exc_type, exc_value, traceback): self.close() def extract(self, to_path=''): self._archive.extract(to_path) def list(self): self._archive.list() def close(self): self._archive.close() class BaseArchive: """ Base Archive class. Implementations should inherit this class. """ @staticmethod def _copy_permissions(mode, filename): """ If the file in the archive has some permissions (this assumes a file won't be writable/executable without being readable), apply those permissions to the unarchived file. """ if mode & stat.S_IROTH: os.chmod(filename, mode) def split_leading_dir(self, path): path = str(path) path = path.lstrip('/').lstrip('\\') if '/' in path and (('\\' in path and path.find('/') < path.find('\\')) or '\\' not in path): return path.split('/', 1) elif '\\' in path: return path.split('\\', 1) else: return path, '' def has_leading_dir(self, paths): """ Return True if all the paths have the same leading path name (i.e., everything is in one subdirectory in an archive). """ common_prefix = None for path in paths: prefix, rest = self.split_leading_dir(path) if not prefix: return False elif common_prefix is None: common_prefix = prefix elif prefix != common_prefix: return False return True def extract(self): raise NotImplementedError('subclasses of BaseArchive must provide an extract() method') def list(self): raise NotImplementedError('subclasses of BaseArchive must provide a list() method') class TarArchive(BaseArchive): def __init__(self, file): self._archive = tarfile.open(file) def list(self, *args, **kwargs): self._archive.list(*args, **kwargs) def extract(self, to_path): members = self._archive.getmembers() leading = self.has_leading_dir(x.name for x in members) for member in members: name = member.name if leading: name = self.split_leading_dir(name)[1] filename = os.path.join(to_path, name) if member.isdir(): if filename and not os.path.exists(filename): os.makedirs(filename) else: try: extracted = self._archive.extractfile(member) except (KeyError, AttributeError) as exc: # Some corrupt tar files seem to produce this # (specifically bad symlinks) print("In the tar file %s the member %s is invalid: %s" % (name, member.name, exc)) else: dirname = os.path.dirname(filename) if dirname and not os.path.exists(dirname): os.makedirs(dirname) with open(filename, 'wb') as outfile: shutil.copyfileobj(extracted, outfile) self._copy_permissions(member.mode, filename) finally: if extracted: extracted.close() def close(self): self._archive.close() class ZipArchive(BaseArchive): def __init__(self, file): self._archive = zipfile.ZipFile(file) def list(self, *args, **kwargs): self._archive.printdir(*args, **kwargs) def extract(self, to_path): namelist = self._archive.namelist() leading = self.has_leading_dir(namelist) for name in namelist: data = self._archive.read(name) info = self._archive.getinfo(name) if leading: name = self.split_leading_dir(name)[1] filename = os.path.join(to_path, name) dirname = os.path.dirname(filename) if dirname and not os.path.exists(dirname): os.makedirs(dirname) if filename.endswith(('/', '\\')): # A directory if not os.path.exists(filename): os.makedirs(filename) else: with open(filename, 'wb') as outfile: outfile.write(data) # Convert ZipInfo.external_attr to mode mode = info.external_attr >> 16 self._copy_permissions(mode, filename) def close(self): self._archive.close() extension_map = { '.tar': TarArchive, '.tar.bz2': TarArchive, '.tar.gz': TarArchive, '.tgz': TarArchive, '.tz2': TarArchive, '.zip': ZipArchive, }
bsd-3-clause
-8,572,850,418,039,428,000
31.872247
101
0.588046
false
buntyke/Flask
microblog/flask/lib/python2.7/site-packages/sqlalchemy/util/__init__.py
10
2520
# util/__init__.py # Copyright (C) 2005-2015 the SQLAlchemy authors and contributors # <see AUTHORS file> # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php from .compat import callable, cmp, reduce, \ threading, py3k, py33, py36, py2k, jython, pypy, cpython, win32, \ pickle, dottedgetter, parse_qsl, namedtuple, next, reraise, \ raise_from_cause, text_type, safe_kwarg, string_types, int_types, \ binary_type, nested, \ quote_plus, with_metaclass, print_, itertools_filterfalse, u, ue, b,\ unquote_plus, unquote, b64decode, b64encode, byte_buffer, itertools_filter,\ iterbytes, StringIO, inspect_getargspec, zip_longest from ._collections import KeyedTuple, ImmutableContainer, immutabledict, \ Properties, OrderedProperties, ImmutableProperties, OrderedDict, \ OrderedSet, IdentitySet, OrderedIdentitySet, column_set, \ column_dict, ordered_column_set, populate_column_dict, unique_list, \ UniqueAppender, PopulateDict, EMPTY_SET, to_list, to_set, \ to_column_set, update_copy, flatten_iterator, has_intersection, \ LRUCache, ScopedRegistry, ThreadLocalRegistry, WeakSequence, \ coerce_generator_arg, lightweight_named_tuple from .langhelpers import iterate_attributes, class_hierarchy, \ portable_instancemethod, unbound_method_to_callable, \ getargspec_init, format_argspec_init, format_argspec_plus, \ get_func_kwargs, get_cls_kwargs, decorator, as_interface, \ memoized_property, memoized_instancemethod, md5_hex, \ group_expirable_memoized_property, dependencies, decode_slice, \ monkeypatch_proxied_specials, asbool, bool_or_str, coerce_kw_type,\ duck_type_collection, assert_arg_type, symbol, dictlike_iteritems,\ classproperty, set_creation_order, warn_exception, warn, NoneType,\ constructor_copy, methods_equivalent, chop_traceback, asint,\ generic_repr, counter, PluginLoader, hybridproperty, hybridmethod, \ safe_reraise,\ get_callable_argspec, only_once, attrsetter, ellipses_string, \ warn_limited, map_bits, MemoizedSlots, EnsureKWArgType from .deprecations import warn_deprecated, warn_pending_deprecation, \ deprecated, pending_deprecation, inject_docstring_text # things that used to be not always available, # but are now as of current support Python versions from collections import defaultdict from functools import partial from functools import update_wrapper from contextlib import contextmanager
mit
6,179,880,468,462,195,000
50.428571
80
0.754762
false
ojii/sandlib
lib/lib-python/2.7/importlib/__init__.py
456
1327
"""Backport of importlib.import_module from 3.x.""" # While not critical (and in no way guaranteed!), it would be nice to keep this # code compatible with Python 2.3. import sys def _resolve_name(name, package, level): """Return the absolute name of the module to be imported.""" if not hasattr(package, 'rindex'): raise ValueError("'package' not set to a string") dot = len(package) for x in xrange(level, 1, -1): try: dot = package.rindex('.', 0, dot) except ValueError: raise ValueError("attempted relative import beyond top-level " "package") return "%s.%s" % (package[:dot], name) def import_module(name, package=None): """Import a module. The 'package' argument is required when performing a relative import. It specifies the package to use as the anchor point from which to resolve the relative import to an absolute import. """ if name.startswith('.'): if not package: raise TypeError("relative imports require the 'package' argument") level = 0 for character in name: if character != '.': break level += 1 name = _resolve_name(name[level:], package, level) __import__(name) return sys.modules[name]
bsd-3-clause
8,607,384,532,238,540,000
33.921053
79
0.608892
false
nelsongoh/tembotsu
libs/future/backports/email/generator.py
82
19520
# Copyright (C) 2001-2010 Python Software Foundation # Author: Barry Warsaw # Contact: [email protected] """Classes to generate plain text from a message object tree.""" from __future__ import print_function from __future__ import unicode_literals from __future__ import division from __future__ import absolute_import from future.builtins import super from future.builtins import str __all__ = ['Generator', 'DecodedGenerator', 'BytesGenerator'] import re import sys import time import random import warnings from io import StringIO, BytesIO from future.backports.email._policybase import compat32 from future.backports.email.header import Header from future.backports.email.utils import _has_surrogates import future.backports.email.charset as _charset UNDERSCORE = '_' NL = '\n' # XXX: no longer used by the code below. fcre = re.compile(r'^From ', re.MULTILINE) class Generator(object): """Generates output from a Message object tree. This basic generator writes the message to the given file object as plain text. """ # # Public interface # def __init__(self, outfp, mangle_from_=True, maxheaderlen=None, **_3to2kwargs): if 'policy' in _3to2kwargs: policy = _3to2kwargs['policy']; del _3to2kwargs['policy'] else: policy = None """Create the generator for message flattening. outfp is the output file-like object for writing the message to. It must have a write() method. Optional mangle_from_ is a flag that, when True (the default), escapes From_ lines in the body of the message by putting a `>' in front of them. Optional maxheaderlen specifies the longest length for a non-continued header. When a header line is longer (in characters, with tabs expanded to 8 spaces) than maxheaderlen, the header will split as defined in the Header class. Set maxheaderlen to zero to disable header wrapping. The default is 78, as recommended (but not required) by RFC 2822. The policy keyword specifies a policy object that controls a number of aspects of the generator's operation. The default policy maintains backward compatibility. """ self._fp = outfp self._mangle_from_ = mangle_from_ self.maxheaderlen = maxheaderlen self.policy = policy def write(self, s): # Just delegate to the file object self._fp.write(s) def flatten(self, msg, unixfrom=False, linesep=None): r"""Print the message object tree rooted at msg to the output file specified when the Generator instance was created. unixfrom is a flag that forces the printing of a Unix From_ delimiter before the first object in the message tree. If the original message has no From_ delimiter, a `standard' one is crafted. By default, this is False to inhibit the printing of any From_ delimiter. Note that for subobjects, no From_ line is printed. linesep specifies the characters used to indicate a new line in the output. The default value is determined by the policy. """ # We use the _XXX constants for operating on data that comes directly # from the msg, and _encoded_XXX constants for operating on data that # has already been converted (to bytes in the BytesGenerator) and # inserted into a temporary buffer. policy = msg.policy if self.policy is None else self.policy if linesep is not None: policy = policy.clone(linesep=linesep) if self.maxheaderlen is not None: policy = policy.clone(max_line_length=self.maxheaderlen) self._NL = policy.linesep self._encoded_NL = self._encode(self._NL) self._EMPTY = '' self._encoded_EMTPY = self._encode('') # Because we use clone (below) when we recursively process message # subparts, and because clone uses the computed policy (not None), # submessages will automatically get set to the computed policy when # they are processed by this code. old_gen_policy = self.policy old_msg_policy = msg.policy try: self.policy = policy msg.policy = policy if unixfrom: ufrom = msg.get_unixfrom() if not ufrom: ufrom = 'From nobody ' + time.ctime(time.time()) self.write(ufrom + self._NL) self._write(msg) finally: self.policy = old_gen_policy msg.policy = old_msg_policy def clone(self, fp): """Clone this generator with the exact same options.""" return self.__class__(fp, self._mangle_from_, None, # Use policy setting, which we've adjusted policy=self.policy) # # Protected interface - undocumented ;/ # # Note that we use 'self.write' when what we are writing is coming from # the source, and self._fp.write when what we are writing is coming from a # buffer (because the Bytes subclass has already had a chance to transform # the data in its write method in that case). This is an entirely # pragmatic split determined by experiment; we could be more general by # always using write and having the Bytes subclass write method detect when # it has already transformed the input; but, since this whole thing is a # hack anyway this seems good enough. # Similarly, we have _XXX and _encoded_XXX attributes that are used on # source and buffer data, respectively. _encoded_EMPTY = '' def _new_buffer(self): # BytesGenerator overrides this to return BytesIO. return StringIO() def _encode(self, s): # BytesGenerator overrides this to encode strings to bytes. return s def _write_lines(self, lines): # We have to transform the line endings. if not lines: return lines = lines.splitlines(True) for line in lines[:-1]: self.write(line.rstrip('\r\n')) self.write(self._NL) laststripped = lines[-1].rstrip('\r\n') self.write(laststripped) if len(lines[-1]) != len(laststripped): self.write(self._NL) def _write(self, msg): # We can't write the headers yet because of the following scenario: # say a multipart message includes the boundary string somewhere in # its body. We'd have to calculate the new boundary /before/ we write # the headers so that we can write the correct Content-Type: # parameter. # # The way we do this, so as to make the _handle_*() methods simpler, # is to cache any subpart writes into a buffer. The we write the # headers and the buffer contents. That way, subpart handlers can # Do The Right Thing, and can still modify the Content-Type: header if # necessary. oldfp = self._fp try: self._fp = sfp = self._new_buffer() self._dispatch(msg) finally: self._fp = oldfp # Write the headers. First we see if the message object wants to # handle that itself. If not, we'll do it generically. meth = getattr(msg, '_write_headers', None) if meth is None: self._write_headers(msg) else: meth(self) self._fp.write(sfp.getvalue()) def _dispatch(self, msg): # Get the Content-Type: for the message, then try to dispatch to # self._handle_<maintype>_<subtype>(). If there's no handler for the # full MIME type, then dispatch to self._handle_<maintype>(). If # that's missing too, then dispatch to self._writeBody(). main = msg.get_content_maintype() sub = msg.get_content_subtype() specific = UNDERSCORE.join((main, sub)).replace('-', '_') meth = getattr(self, '_handle_' + specific, None) if meth is None: generic = main.replace('-', '_') meth = getattr(self, '_handle_' + generic, None) if meth is None: meth = self._writeBody meth(msg) # # Default handlers # def _write_headers(self, msg): for h, v in msg.raw_items(): self.write(self.policy.fold(h, v)) # A blank line always separates headers from body self.write(self._NL) # # Handlers for writing types and subtypes # def _handle_text(self, msg): payload = msg.get_payload() if payload is None: return if not isinstance(payload, str): raise TypeError('string payload expected: %s' % type(payload)) if _has_surrogates(msg._payload): charset = msg.get_param('charset') if charset is not None: del msg['content-transfer-encoding'] msg.set_payload(payload, charset) payload = msg.get_payload() if self._mangle_from_: payload = fcre.sub('>From ', payload) self._write_lines(payload) # Default body handler _writeBody = _handle_text def _handle_multipart(self, msg): # The trick here is to write out each part separately, merge them all # together, and then make sure that the boundary we've chosen isn't # present in the payload. msgtexts = [] subparts = msg.get_payload() if subparts is None: subparts = [] elif isinstance(subparts, str): # e.g. a non-strict parse of a message with no starting boundary. self.write(subparts) return elif not isinstance(subparts, list): # Scalar payload subparts = [subparts] for part in subparts: s = self._new_buffer() g = self.clone(s) g.flatten(part, unixfrom=False, linesep=self._NL) msgtexts.append(s.getvalue()) # BAW: What about boundaries that are wrapped in double-quotes? boundary = msg.get_boundary() if not boundary: # Create a boundary that doesn't appear in any of the # message texts. alltext = self._encoded_NL.join(msgtexts) boundary = self._make_boundary(alltext) msg.set_boundary(boundary) # If there's a preamble, write it out, with a trailing CRLF if msg.preamble is not None: if self._mangle_from_: preamble = fcre.sub('>From ', msg.preamble) else: preamble = msg.preamble self._write_lines(preamble) self.write(self._NL) # dash-boundary transport-padding CRLF self.write('--' + boundary + self._NL) # body-part if msgtexts: self._fp.write(msgtexts.pop(0)) # *encapsulation # --> delimiter transport-padding # --> CRLF body-part for body_part in msgtexts: # delimiter transport-padding CRLF self.write(self._NL + '--' + boundary + self._NL) # body-part self._fp.write(body_part) # close-delimiter transport-padding self.write(self._NL + '--' + boundary + '--') if msg.epilogue is not None: self.write(self._NL) if self._mangle_from_: epilogue = fcre.sub('>From ', msg.epilogue) else: epilogue = msg.epilogue self._write_lines(epilogue) def _handle_multipart_signed(self, msg): # The contents of signed parts has to stay unmodified in order to keep # the signature intact per RFC1847 2.1, so we disable header wrapping. # RDM: This isn't enough to completely preserve the part, but it helps. p = self.policy self.policy = p.clone(max_line_length=0) try: self._handle_multipart(msg) finally: self.policy = p def _handle_message_delivery_status(self, msg): # We can't just write the headers directly to self's file object # because this will leave an extra newline between the last header # block and the boundary. Sigh. blocks = [] for part in msg.get_payload(): s = self._new_buffer() g = self.clone(s) g.flatten(part, unixfrom=False, linesep=self._NL) text = s.getvalue() lines = text.split(self._encoded_NL) # Strip off the unnecessary trailing empty line if lines and lines[-1] == self._encoded_EMPTY: blocks.append(self._encoded_NL.join(lines[:-1])) else: blocks.append(text) # Now join all the blocks with an empty line. This has the lovely # effect of separating each block with an empty line, but not adding # an extra one after the last one. self._fp.write(self._encoded_NL.join(blocks)) def _handle_message(self, msg): s = self._new_buffer() g = self.clone(s) # The payload of a message/rfc822 part should be a multipart sequence # of length 1. The zeroth element of the list should be the Message # object for the subpart. Extract that object, stringify it, and # write it out. # Except, it turns out, when it's a string instead, which happens when # and only when HeaderParser is used on a message of mime type # message/rfc822. Such messages are generated by, for example, # Groupwise when forwarding unadorned messages. (Issue 7970.) So # in that case we just emit the string body. payload = msg._payload if isinstance(payload, list): g.flatten(msg.get_payload(0), unixfrom=False, linesep=self._NL) payload = s.getvalue() else: payload = self._encode(payload) self._fp.write(payload) # This used to be a module level function; we use a classmethod for this # and _compile_re so we can continue to provide the module level function # for backward compatibility by doing # _make_boudary = Generator._make_boundary # at the end of the module. It *is* internal, so we could drop that... @classmethod def _make_boundary(cls, text=None): # Craft a random boundary. If text is given, ensure that the chosen # boundary doesn't appear in the text. token = random.randrange(sys.maxsize) boundary = ('=' * 15) + (_fmt % token) + '==' if text is None: return boundary b = boundary counter = 0 while True: cre = cls._compile_re('^--' + re.escape(b) + '(--)?$', re.MULTILINE) if not cre.search(text): break b = boundary + '.' + str(counter) counter += 1 return b @classmethod def _compile_re(cls, s, flags): return re.compile(s, flags) class BytesGenerator(Generator): """Generates a bytes version of a Message object tree. Functionally identical to the base Generator except that the output is bytes and not string. When surrogates were used in the input to encode bytes, these are decoded back to bytes for output. If the policy has cte_type set to 7bit, then the message is transformed such that the non-ASCII bytes are properly content transfer encoded, using the charset unknown-8bit. The outfp object must accept bytes in its write method. """ # Bytes versions of this constant for use in manipulating data from # the BytesIO buffer. _encoded_EMPTY = b'' def write(self, s): self._fp.write(str(s).encode('ascii', 'surrogateescape')) def _new_buffer(self): return BytesIO() def _encode(self, s): return s.encode('ascii') def _write_headers(self, msg): # This is almost the same as the string version, except for handling # strings with 8bit bytes. for h, v in msg.raw_items(): self._fp.write(self.policy.fold_binary(h, v)) # A blank line always separates headers from body self.write(self._NL) def _handle_text(self, msg): # If the string has surrogates the original source was bytes, so # just write it back out. if msg._payload is None: return if _has_surrogates(msg._payload) and not self.policy.cte_type=='7bit': if self._mangle_from_: msg._payload = fcre.sub(">From ", msg._payload) self._write_lines(msg._payload) else: super(BytesGenerator,self)._handle_text(msg) # Default body handler _writeBody = _handle_text @classmethod def _compile_re(cls, s, flags): return re.compile(s.encode('ascii'), flags) _FMT = '[Non-text (%(type)s) part of message omitted, filename %(filename)s]' class DecodedGenerator(Generator): """Generates a text representation of a message. Like the Generator base class, except that non-text parts are substituted with a format string representing the part. """ def __init__(self, outfp, mangle_from_=True, maxheaderlen=78, fmt=None): """Like Generator.__init__() except that an additional optional argument is allowed. Walks through all subparts of a message. If the subpart is of main type `text', then it prints the decoded payload of the subpart. Otherwise, fmt is a format string that is used instead of the message payload. fmt is expanded with the following keywords (in %(keyword)s format): type : Full MIME type of the non-text part maintype : Main MIME type of the non-text part subtype : Sub-MIME type of the non-text part filename : Filename of the non-text part description: Description associated with the non-text part encoding : Content transfer encoding of the non-text part The default value for fmt is None, meaning [Non-text (%(type)s) part of message omitted, filename %(filename)s] """ Generator.__init__(self, outfp, mangle_from_, maxheaderlen) if fmt is None: self._fmt = _FMT else: self._fmt = fmt def _dispatch(self, msg): for part in msg.walk(): maintype = part.get_content_maintype() if maintype == 'text': print(part.get_payload(decode=False), file=self) elif maintype == 'multipart': # Just skip this pass else: print(self._fmt % { 'type' : part.get_content_type(), 'maintype' : part.get_content_maintype(), 'subtype' : part.get_content_subtype(), 'filename' : part.get_filename('[no filename]'), 'description': part.get('Content-Description', '[no description]'), 'encoding' : part.get('Content-Transfer-Encoding', '[no encoding]'), }, file=self) # Helper used by Generator._make_boundary _width = len(repr(sys.maxsize-1)) _fmt = '%%0%dd' % _width # Backward compatibility _make_boundary = Generator._make_boundary
apache-2.0
250,567,028,001,611,070
38.196787
93
0.602357
false
fabaff/ansible
lib/ansible/executor/playbook_executor.py
1
9866
# (c) 2012-2014, Michael DeHaan <[email protected]> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # Make coding more python3-ish from __future__ import (absolute_import, division, print_function) __metaclass__ = type import getpass import locale import os import signal import sys from ansible.compat.six import string_types from ansible.executor.task_queue_manager import TaskQueueManager from ansible.playbook import Playbook from ansible.template import Templar from ansible.utils.unicode import to_unicode try: from __main__ import display except ImportError: from ansible.utils.display import Display display = Display() class PlaybookExecutor: ''' This is the primary class for executing playbooks, and thus the basis for bin/ansible-playbook operation. ''' def __init__(self, playbooks, inventory, variable_manager, loader, options, passwords): self._playbooks = playbooks self._inventory = inventory self._variable_manager = variable_manager self._loader = loader self._options = options self.passwords = passwords self._unreachable_hosts = dict() if options.listhosts or options.listtasks or options.listtags or options.syntax: self._tqm = None else: self._tqm = TaskQueueManager(inventory=inventory, variable_manager=variable_manager, loader=loader, options=options, passwords=self.passwords) def run(self): ''' Run the given playbook, based on the settings in the play which may limit the runs to serialized groups, etc. ''' signal.signal(signal.SIGINT, self._cleanup) result = 0 entrylist = [] entry = {} try: for playbook_path in self._playbooks: pb = Playbook.load(playbook_path, variable_manager=self._variable_manager, loader=self._loader) self._inventory.set_playbook_basedir(os.path.dirname(playbook_path)) if self._tqm is None: # we are doing a listing entry = {'playbook': playbook_path} entry['plays'] = [] else: # make sure the tqm has callbacks loaded self._tqm.load_callbacks() self._tqm.send_callback('v2_playbook_on_start', pb) i = 1 plays = pb.get_plays() display.vv('%d plays in %s' % (len(plays), playbook_path)) for play in plays: if play._included_path is not None: self._loader.set_basedir(play._included_path) else: self._loader.set_basedir(pb._basedir) # clear any filters which may have been applied to the inventory self._inventory.remove_restriction() if play.vars_prompt: for var in play.vars_prompt: vname = var['name'] prompt = var.get("prompt", vname) default = var.get("default", None) private = var.get("private", True) confirm = var.get("confirm", False) encrypt = var.get("encrypt", None) salt_size = var.get("salt_size", None) salt = var.get("salt", None) if vname not in self._variable_manager.extra_vars: if self._tqm: self._tqm.send_callback('v2_playbook_on_vars_prompt', vname, private, prompt, encrypt, confirm, salt_size, salt, default) play.vars[vname] = display.do_var_prompt(vname, private, prompt, encrypt, confirm, salt_size, salt, default) else: # we are either in --list-<option> or syntax check play.vars[vname] = default # Create a temporary copy of the play here, so we can run post_validate # on it without the templating changes affecting the original object. all_vars = self._variable_manager.get_vars(loader=self._loader, play=play) templar = Templar(loader=self._loader, variables=all_vars) new_play = play.copy() new_play.post_validate(templar) if self._options.syntax: continue if self._tqm is None: # we are just doing a listing entry['plays'].append(new_play) else: self._tqm._unreachable_hosts.update(self._unreachable_hosts) # we are actually running plays for batch in self._get_serialized_batches(new_play): if len(batch) == 0: self._tqm.send_callback('v2_playbook_on_play_start', new_play) self._tqm.send_callback('v2_playbook_on_no_hosts_matched') break # restrict the inventory to the hosts in the serialized batch self._inventory.restrict_to_hosts(batch) # and run it... result = self._tqm.run(play=play) # check the number of failures here, to see if they're above the maximum # failure percentage allowed, or if any errors are fatal. If either of those # conditions are met, we break out, otherwise we only break out if the entire # batch failed failed_hosts_count = len(self._tqm._failed_hosts) + len(self._tqm._unreachable_hosts) if new_play.any_errors_fatal and failed_hosts_count > 0: break elif new_play.max_fail_percentage is not None and \ int((new_play.max_fail_percentage)/100.0 * len(batch)) > int((len(batch) - failed_hosts_count) / len(batch) * 100.0): break elif len(batch) == failed_hosts_count: break # clear the failed hosts dictionaires in the TQM for the next batch self._unreachable_hosts.update(self._tqm._unreachable_hosts) self._tqm.clear_failed_hosts() # if the last result wasn't zero or 3 (some hosts were unreachable), # break out of the serial batch loop if result not in (0, 3): break i = i + 1 # per play if entry: entrylist.append(entry) # per playbook # send the stats callback for this playbook if self._tqm is not None: self._tqm.send_callback('v2_playbook_on_stats', self._tqm._stats) # if the last result wasn't zero, break out of the playbook file name loop if result != 0: break if entrylist: return entrylist finally: if self._tqm is not None: self._cleanup() if self._options.syntax: display.display("No issues encountered") return result return result def _cleanup(self, signum=None, framenum=None): return self._tqm.cleanup() def _get_serialized_batches(self, play): ''' Returns a list of hosts, subdivided into batches based on the serial size specified in the play. ''' # make sure we have a unique list of hosts all_hosts = self._inventory.get_hosts(play.hosts) # check to see if the serial number was specified as a percentage, # and convert it to an integer value based on the number of hosts if isinstance(play.serial, string_types) and play.serial.endswith('%'): serial_pct = int(play.serial.replace("%","")) serial = int((serial_pct/100.0) * len(all_hosts)) else: if play.serial is None: serial = -1 else: serial = int(play.serial) # if the serial count was not specified or is invalid, default to # a list of all hosts, otherwise split the list of hosts into chunks # which are based on the serial size if serial <= 0: return [all_hosts] else: serialized_batches = [] while len(all_hosts) > 0: play_hosts = [] for x in range(serial): if len(all_hosts) > 0: play_hosts.append(all_hosts.pop(0)) serialized_batches.append(play_hosts) return serialized_batches
gpl-3.0
-1,444,482,905,630,737,400
40.628692
157
0.534259
false
savi-dev/horizon
horizon/dashboards/nova/containers/forms.py
1
7384
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright 2012 United States Government as represented by the # Administrator of the National Aeronautics and Space Administration. # All Rights Reserved. # # Copyright 2012 Nebula, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import logging from django import shortcuts from django.contrib import messages from django.core import validators from django.core.urlresolvers import reverse from django.utils.translation import ugettext_lazy as _ from horizon import api from horizon import exceptions from horizon import forms LOG = logging.getLogger(__name__) no_slash_validator = validators.RegexValidator(r'^(?u)[^/]+$', _("Slash is not an allowed " "character."), code="noslash") class CreateContainer(forms.SelfHandlingForm): parent = forms.CharField(max_length=255, required=False, widget=forms.HiddenInput) name = forms.CharField(max_length=255, label=_("Container Name"), validators=[no_slash_validator]) def handle(self, request, data): try: if not data['parent']: # Create a container api.swift_create_container(request, data["name"]) messages.success(request, _("Container created successfully.")) else: # Create a pseudo-folder container, slash, remainder = data['parent'].partition("/") remainder = remainder.rstrip("/") subfolder_name = "/".join([bit for bit in (remainder, data['name']) if bit]) api.swift_create_subfolder(request, container, subfolder_name) messages.success(request, _("Folder created successfully.")) url = "horizon:nova:containers:object_index" if remainder: remainder = remainder.rstrip("/") remainder += "/" return shortcuts.redirect(url, container, remainder) except: exceptions.handle(request, _('Unable to create container.')) return shortcuts.redirect("horizon:nova:containers:index") class UploadObject(forms.SelfHandlingForm): path = forms.CharField(max_length=255, required=False, widget=forms.HiddenInput) name = forms.CharField(max_length=255, label=_("Object Name"), validators=[no_slash_validator]) object_file = forms.FileField(label=_("File")) container_name = forms.CharField(widget=forms.HiddenInput()) def handle(self, request, data): object_file = self.files['object_file'] if data['path']: object_path = "/".join([data['path'].rstrip("/"), data['name']]) else: object_path = data['name'] try: obj = api.swift_upload_object(request, data['container_name'], object_path, object_file) obj.metadata['orig-filename'] = object_file.name obj.sync_metadata() messages.success(request, _("Object was successfully uploaded.")) except: exceptions.handle(request, _("Unable to upload object.")) return shortcuts.redirect("horizon:nova:containers:object_index", data['container_name'], data['path']) class CopyObject(forms.SelfHandlingForm): new_container_name = forms.ChoiceField(label=_("Destination container"), validators=[no_slash_validator]) path = forms.CharField(max_length=255, required=False) new_object_name = forms.CharField(max_length=255, label=_("Destination object name"), validators=[no_slash_validator]) orig_container_name = forms.CharField(widget=forms.HiddenInput()) orig_object_name = forms.CharField(widget=forms.HiddenInput()) def __init__(self, *args, **kwargs): containers = kwargs.pop('containers') super(CopyObject, self).__init__(*args, **kwargs) self.fields['new_container_name'].choices = containers def handle(self, request, data): object_index = "horizon:nova:containers:object_index" orig_container = data['orig_container_name'] orig_object = data['orig_object_name'] new_container = data['new_container_name'] new_object = data['new_object_name'] new_path = "%s%s" % (data['path'], new_object) # Iteratively make sure all the directory markers exist. if data['path']: path_component = "" for bit in data['path'].split("/"): path_component += bit try: api.swift.swift_create_subfolder(request, new_container, path_component) except: redirect = reverse(object_index, args=(orig_container,)) exceptions.handle(request, _("Unable to copy object."), redirect=redirect) path_component += "/" # Now copy the object itself. try: api.swift_copy_object(request, orig_container, orig_object, new_container, new_path) dest = "%s/%s" % (new_container, data['path']) vals = {"dest": dest.rstrip("/"), "orig": orig_object.split("/")[-1], "new": new_object} messages.success(request, _('Copied "%(orig)s" to "%(dest)s" as "%(new)s".') % vals) except exceptions.HorizonException, exc: messages.error(request, exc) return shortcuts.redirect(object_index, orig_container) except: redirect = reverse(object_index, args=(orig_container,)) exceptions.handle(request, _("Unable to copy object."), redirect=redirect) return shortcuts.redirect(object_index, new_container, data['path'])
apache-2.0
7,625,742,781,935,055,000
42.181287
79
0.527221
false
mahak/keystone
keystone/auth/plugins/base.py
2
3476
# Copyright 2013 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import abc import collections from keystone.common import provider_api from keystone import exception AuthHandlerResponse = collections.namedtuple( 'AuthHandlerResponse', 'status, response_body, response_data') class AuthMethodHandler(provider_api.ProviderAPIMixin, object, metaclass=abc.ABCMeta): """Abstract base class for an authentication plugin.""" def __init__(self): pass @abc.abstractmethod def authenticate(self, auth_payload): """Authenticate user and return an authentication context. :param auth_payload: the payload content of the authentication request for a given method :type auth_payload: dict If successful, plugin must set ``user_id`` in ``response_data``. ``method_name`` is used to convey any additional authentication methods in case authentication is for re-scoping. For example, if the authentication is for re-scoping, plugin must append the previous method names into ``method_names``; NOTE: This behavior is exclusive to the re-scope type action. Here's an example of ``response_data`` on successful authentication:: { "methods": [ "password", "token" ], "user_id": "abc123" } Plugins are invoked in the order in which they are specified in the ``methods`` attribute of the ``identity`` object. For example, ``custom-plugin`` is invoked before ``password``, which is invoked before ``token`` in the following authentication request:: { "auth": { "identity": { "custom-plugin": { "custom-data": "sdfdfsfsfsdfsf" }, "methods": [ "custom-plugin", "password", "token" ], "password": { "user": { "id": "s23sfad1", "password": "secret" } }, "token": { "id": "sdfafasdfsfasfasdfds" } } } } :returns: AuthHandlerResponse with status set to ``True`` if auth was successful. If `status` is ``False`` and this is a multi-step auth, the ``response_body`` can be in a form of a dict for the next step in authentication. :raises keystone.exception.Unauthorized: for authentication failure """ raise exception.Unauthorized()
apache-2.0
4,375,000,550,260,059,600
36.376344
79
0.546893
false
GaussDing/django
tests/gis_tests/geoapp/test_feeds.py
33
4256
from __future__ import unicode_literals from xml.dom import minidom from django.conf import settings from django.contrib.gis.geos import HAS_GEOS from django.contrib.sites.models import Site from django.test import ( TestCase, modify_settings, override_settings, skipUnlessDBFeature, ) if HAS_GEOS: from .models import City @modify_settings(INSTALLED_APPS={'append': 'django.contrib.sites'}) @override_settings(ROOT_URLCONF='gis_tests.geoapp.urls') @skipUnlessDBFeature("gis_enabled") class GeoFeedTest(TestCase): fixtures = ['initial'] def setUp(self): Site(id=settings.SITE_ID, domain="example.com", name="example.com").save() def assertChildNodes(self, elem, expected): "Taken from syndication/tests.py." actual = set(n.nodeName for n in elem.childNodes) expected = set(expected) self.assertEqual(actual, expected) def test_geofeed_rss(self): "Tests geographic feeds using GeoRSS over RSSv2." # Uses `GEOSGeometry` in `item_geometry` doc1 = minidom.parseString(self.client.get('/feeds/rss1/').content) # Uses a 2-tuple in `item_geometry` doc2 = minidom.parseString(self.client.get('/feeds/rss2/').content) feed1, feed2 = doc1.firstChild, doc2.firstChild # Making sure the box got added to the second GeoRSS feed. self.assertChildNodes(feed2.getElementsByTagName('channel')[0], ['title', 'link', 'description', 'language', 'lastBuildDate', 'item', 'georss:box', 'atom:link'] ) # Incrementing through the feeds. for feed in [feed1, feed2]: # Ensuring the georss namespace was added to the <rss> element. self.assertEqual(feed.getAttribute('xmlns:georss'), 'http://www.georss.org/georss') chan = feed.getElementsByTagName('channel')[0] items = chan.getElementsByTagName('item') self.assertEqual(len(items), City.objects.count()) # Ensuring the georss element was added to each item in the feed. for item in items: self.assertChildNodes(item, ['title', 'link', 'description', 'guid', 'georss:point']) def test_geofeed_atom(self): "Testing geographic feeds using GeoRSS over Atom." doc1 = minidom.parseString(self.client.get('/feeds/atom1/').content) doc2 = minidom.parseString(self.client.get('/feeds/atom2/').content) feed1, feed2 = doc1.firstChild, doc2.firstChild # Making sure the box got added to the second GeoRSS feed. self.assertChildNodes(feed2, ['title', 'link', 'id', 'updated', 'entry', 'georss:box']) for feed in [feed1, feed2]: # Ensuring the georsss namespace was added to the <feed> element. self.assertEqual(feed.getAttribute('xmlns:georss'), 'http://www.georss.org/georss') entries = feed.getElementsByTagName('entry') self.assertEqual(len(entries), City.objects.count()) # Ensuring the georss element was added to each entry in the feed. for entry in entries: self.assertChildNodes(entry, ['title', 'link', 'id', 'summary', 'georss:point']) def test_geofeed_w3c(self): "Testing geographic feeds using W3C Geo." doc = minidom.parseString(self.client.get('/feeds/w3cgeo1/').content) feed = doc.firstChild # Ensuring the geo namespace was added to the <feed> element. self.assertEqual(feed.getAttribute('xmlns:geo'), 'http://www.w3.org/2003/01/geo/wgs84_pos#') chan = feed.getElementsByTagName('channel')[0] items = chan.getElementsByTagName('item') self.assertEqual(len(items), City.objects.count()) # Ensuring the geo:lat and geo:lon element was added to each item in the feed. for item in items: self.assertChildNodes(item, ['title', 'link', 'description', 'guid', 'geo:lat', 'geo:lon']) # Boxes and Polygons aren't allowed in W3C Geo feeds. self.assertRaises(ValueError, self.client.get, '/feeds/w3cgeo2/') # Box in <channel> self.assertRaises(ValueError, self.client.get, '/feeds/w3cgeo3/') # Polygons in <entry>
bsd-3-clause
8,169,172,913,197,229,000
45.26087
103
0.643797
false
SkillSmart/ConferenceManagementSystem
ApplicationManagement/migrations/0012_auto_20170331_1943.py
1
1448
# -*- coding: utf-8 -*- # Generated by Django 1.10.5 on 2017-03-31 17:43 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('ApplicationManagement', '0011_auto_20170331_1649'), ] operations = [ migrations.RemoveField( model_name='feedback', name='category_1', ), migrations.RemoveField( model_name='feedback', name='category_2', ), migrations.RemoveField( model_name='feedback', name='category_3', ), migrations.RemoveField( model_name='feedback', name='category_4', ), migrations.AddField( model_name='feedback', name='feedbackComm', field=models.IntegerField(blank=True, null=True, verbose_name='How well was the Feedback Communicated?'), ), migrations.AddField( model_name='feedback', name='feedbackQuality', field=models.IntegerField(blank=True, null=True, verbose_name='How helpfull was his/her Feedback?'), ), migrations.AddField( model_name='feedback', name='feedbackRelated', field=models.IntegerField(blank=True, null=True, verbose_name='How well did the Feedback relate to the Assessment Criteria?'), ), ]
mit
1,433,048,053,057,716,500
30.478261
138
0.575276
false
michaelyin/code-for-blog
2009/plotting_data_monitor/com_monitor.py
15
2988
import Queue import threading import time import serial class ComMonitorThread(threading.Thread): """ A thread for monitoring a COM port. The COM port is opened when the thread is started. data_q: Queue for received data. Items in the queue are (data, timestamp) pairs, where data is a binary string representing the received data, and timestamp is the time elapsed from the thread's start (in seconds). error_q: Queue for error messages. In particular, if the serial port fails to open for some reason, an error is placed into this queue. port: The COM port to open. Must be recognized by the system. port_baud/stopbits/parity: Serial communication parameters port_timeout: The timeout used for reading the COM port. If this value is low, the thread will return data in finer grained chunks, with more accurate timestamps, but it will also consume more CPU. """ def __init__( self, data_q, error_q, port_num, port_baud, port_stopbits=serial.STOPBITS_ONE, port_parity=serial.PARITY_NONE, port_timeout=0.01): threading.Thread.__init__(self) self.serial_port = None self.serial_arg = dict( port=port_num, baudrate=port_baud, stopbits=port_stopbits, parity=port_parity, timeout=port_timeout) self.data_q = data_q self.error_q = error_q self.alive = threading.Event() self.alive.set() def run(self): try: if self.serial_port: self.serial_port.close() self.serial_port = serial.Serial(**self.serial_arg) except serial.SerialException, e: self.error_q.put(e.message) return # Restart the clock time.clock() while self.alive.isSet(): # Reading 1 byte, followed by whatever is left in the # read buffer, as suggested by the developer of # PySerial. # data = self.serial_port.read(1) data += self.serial_port.read(self.serial_port.inWaiting()) if len(data) > 0: timestamp = time.clock() self.data_q.put((data, timestamp)) # clean up if self.serial_port: self.serial_port.close() def join(self, timeout=None): self.alive.clear() threading.Thread.join(self, timeout)
unlicense
8,809,533,817,093,793,000
31.255556
71
0.498996
false
systemdaemon/systemd
tools/gdb-sd_dump_hashmaps.py
112
5114
# -*- Mode: python; coding: utf-8; indent-tabs-mode: nil -*- */ # # This file is part of systemd. # # Copyright 2014 Michal Schmidt # # systemd is free software; you can redistribute it and/or modify it # under the terms of the GNU Lesser General Public License as published by # the Free Software Foundation; either version 2.1 of the License, or # (at your option) any later version. # # systemd is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public License # along with systemd; If not, see <http://www.gnu.org/licenses/>. import gdb class sd_dump_hashmaps(gdb.Command): "dump systemd's hashmaps" def __init__(self): super(sd_dump_hashmaps, self).__init__("sd_dump_hashmaps", gdb.COMMAND_DATA, gdb.COMPLETE_NONE) def invoke(self, arg, from_tty): d = gdb.parse_and_eval("hashmap_debug_list") all_entry_sizes = gdb.parse_and_eval("all_entry_sizes") all_direct_buckets = gdb.parse_and_eval("all_direct_buckets") hashmap_base_t = gdb.lookup_type("HashmapBase") uchar_t = gdb.lookup_type("unsigned char") ulong_t = gdb.lookup_type("unsigned long") debug_offset = gdb.parse_and_eval("(unsigned long)&((HashmapBase*)0)->debug") print "type, hash, indirect, entries, max_entries, buckets, creator" while d: h = gdb.parse_and_eval("(HashmapBase*)((char*)%d - %d)" % (int(d.cast(ulong_t)), debug_offset)) if h["has_indirect"]: storage_ptr = h["indirect"]["storage"].cast(uchar_t.pointer()) n_entries = h["indirect"]["n_entries"] n_buckets = h["indirect"]["n_buckets"] else: storage_ptr = h["direct"]["storage"].cast(uchar_t.pointer()) n_entries = h["n_direct_entries"] n_buckets = all_direct_buckets[int(h["type"])]; t = ["plain", "ordered", "set"][int(h["type"])] print "%s, %s, %s, %d, %d, %d, %s (%s:%d)" % (t, h["hash_ops"], bool(h["has_indirect"]), n_entries, d["max_entries"], n_buckets, d["func"], d["file"], d["line"]) if arg != "" and n_entries > 0: dib_raw_addr = storage_ptr + (all_entry_sizes[h["type"]] * n_buckets) histogram = {} for i in xrange(0, n_buckets): dib = int(dib_raw_addr[i]) histogram[dib] = histogram.get(dib, 0) + 1 for dib in sorted(iter(histogram)): if dib != 255: print "%3d %8d %f%% of entries" % (dib, histogram[dib], 100.0*histogram[dib]/n_entries) else: print "%3d %8d %f%% of slots" % (dib, histogram[dib], 100.0*histogram[dib]/n_buckets) print "mean DIB of entries: %f" % (sum([dib*histogram[dib] for dib in iter(histogram) if dib != 255])*1.0/n_entries) blocks = [] current_len = 1 prev = int(dib_raw_addr[0]) for i in xrange(1, n_buckets): dib = int(dib_raw_addr[i]) if (dib == 255) != (prev == 255): if prev != 255: blocks += [[i, current_len]] current_len = 1 else: current_len += 1 prev = dib if prev != 255: blocks += [[i, current_len]] # a block may be wrapped around if len(blocks) > 1 and blocks[0][0] == blocks[0][1] and blocks[-1][0] == n_buckets - 1: blocks[0][1] += blocks[-1][1] blocks = blocks[0:-1] print "max block: %s" % max(blocks, key=lambda a: a[1]) print "sum block lens: %d" % sum(b[1] for b in blocks) print "mean block len: %f" % (1.0 * sum(b[1] for b in blocks) / len(blocks)) d = d["debug_list_next"] sd_dump_hashmaps()
gpl-2.0
-2,819,548,992,402,399,000
53.404255
185
0.437231
false
lukeburden/django-allauth
allauth/socialaccount/providers/bitbucket_oauth2/provider.py
10
1122
from allauth.socialaccount.providers.base import ProviderAccount from allauth.socialaccount.providers.oauth2.provider import OAuth2Provider class BitbucketOAuth2Account(ProviderAccount): def get_profile_url(self): return (self.account.extra_data .get('links', {}) .get('html', {}) .get('href')) def get_avatar_url(self): return (self.account.extra_data .get('links', {}) .get('avatar', {}) .get('href')) def to_str(self): dflt = super(BitbucketOAuth2Account, self).to_str() return self.account.extra_data.get('display_name', dflt) class BitbucketOAuth2Provider(OAuth2Provider): id = 'bitbucket_oauth2' name = 'Bitbucket' account_class = BitbucketOAuth2Account def extract_uid(self, data): return data['username'] def extract_common_fields(self, data): return dict(email=data.get('email'), username=data.get('username'), name=data.get('display_name')) provider_classes = [BitbucketOAuth2Provider]
mit
3,572,753,055,431,844,400
29.324324
74
0.610517
false
rancher/validation-tests
tests/v3_validation/cattlevalidationtest/core/test_network_policy.py
2
33949
from common_fixtures import * # NOQA test_network_policy = os.environ.get( 'TEST_NETWORK_POLICY', "False") np_reason = \ 'Intended to not execute this network policy test' if_network_policy = pytest.mark.skipif(test_network_policy != "ALL", reason=np_reason) if_network_policy_none = pytest.mark.skipif( test_network_policy != "NONE", reason=np_reason) if_network_policy_within_stack = pytest.mark.skipif( test_network_policy != "WITHIN_STACK", reason=np_reason) if_network_policy_within_service = pytest.mark.skipif( test_network_policy != "WITHIN_SERVICE", reason=np_reason) if_network_policy_within_linked = pytest.mark.skipif( test_network_policy != "WITHIN_LINKED", reason=np_reason) if_network_policy_groupby = pytest.mark.skipif( test_network_policy != "WITHIN_GROUPBY", reason=np_reason) NETWORKPOLICY_SUBDIR = \ os.path.join(os.path.dirname(os.path.realpath(__file__)), 'resources/networkpolicy') policy_within_stack = {"within": "stack", "action": "allow"} policy_groupby = {"between": {"groupBy": "com.rancher.stack.location"}, "action": "allow"} policy_within_service = {"within": "service", "action": "allow"} policy_within_linked = {"within": "linked", "action": "allow"} shared_environment = {"env": []} @pytest.fixture(scope='session', autouse=True) def create_env_for_network_policy(request, client, socat_containers): assert check_for_network_policy_manager(client) env2 = create_stack_with_service(client, "test2", NETWORKPOLICY_SUBDIR, "stack2.yml", "stack2-rc.yml") assert len(env2.services()) == 6 env1 = create_stack_with_service(client, "test1", NETWORKPOLICY_SUBDIR, "stack1.yml", "stack1-rc.yml") assert len(env1.services()) == 11 create_standalone_containers(client) time.sleep(sleep_interval) populate_env_details(client) def fin(): to_delete = [env1, env2] delete_all(client, to_delete) delete_all(client, shared_environment["containers"]) delete_all(client, shared_environment["containers_with_label"]) request.addfinalizer(fin) def populate_env_details(client): env = client.list_stack(name="test1") assert len(env) == 1 env1 = env[0] env = client.list_stack(name="test2") assert len(env) == 1 env2 = env[0] shared_environment["env"].append(env1) shared_environment["env"].append(env2) shared_environment["stack1_test1allow"] = \ get_service_by_name(client, env1, "test1allow") shared_environment["stack1_test2allow"] = \ get_service_by_name(client, env1, "test2allow") shared_environment["stack1_test3deny"] = \ get_service_by_name(client, env1, "test3deny") shared_environment["stack1_test4deny"] = \ get_service_by_name(client, env1, "test4deny") shared_environment["stack1_lbwithinstack"] = \ get_service_by_name(client, env1, "lbwithininstack") shared_environment["stack1_lbcrossstack"] = \ get_service_by_name(client, env1, "lbcrossstack") shared_environment["stack1_servicewithlinks"] = \ get_service_by_name(client, env1, "servicewithlinks") shared_environment["stack1_servicecrosslinks"] = \ get_service_by_name(client, env1, "servicecrosslinks") shared_environment["stack1_servicelinktosidekick"] = \ get_service_by_name(client, env1, "servicelinktosidekick") shared_environment["stack1_linktowebservice"] = \ get_service_by_name(client, env1, "linktowebservice") shared_environment["stack2_test1allow"] = \ get_service_by_name(client, env2, "test1allow") shared_environment["stack2_test2allow"] = \ get_service_by_name(client, env2, "test2allow") shared_environment["stack2_test3deny"] = \ get_service_by_name(client, env2, "test3deny") shared_environment["stack2_test4deny"] = \ get_service_by_name(client, env2, "test4deny") service_with_sidekick = {} service_with_sidekick["p_con1"] = \ get_container_by_name(client, "test2-testp1-1") service_with_sidekick["p_con2"] = \ get_container_by_name(client, "test2-testp1-2") service_with_sidekick["s1_con1"] = \ get_container_by_name(client, "test2-testp1-tests1-1") service_with_sidekick["s1_con2"] = \ get_container_by_name(client, "test2-testp1-tests1-2") service_with_sidekick["s2_con1"] = \ get_container_by_name(client, "test2-testp1-tests2-1") service_with_sidekick["s2_con2"] = \ get_container_by_name(client, "test2-testp1-tests2-2") shared_environment["stack2_sidekick"] = service_with_sidekick time.sleep(sleep_interval) def validate_default_network_action_deny_networkpolicy_allow_within_stacks( client): # Validate that standalone containers are not able reach any # service containers for container in shared_environment["containers"]: validate_connectivity_between_con_to_services( client, container, [shared_environment["stack1_test2allow"], shared_environment["stack2_test4deny"]], connection="deny") # Validate that there connectivity between containers of different # services within the same stack is allowed validate_connectivity_between_services( client, shared_environment["stack1_test1allow"], [shared_environment["stack1_test2allow"], shared_environment["stack1_test3deny"], shared_environment["stack1_test4deny"]], connection="allow") # Validate that there is no connectivity between containers of different # services across stacks validate_connectivity_between_services( client, shared_environment["stack1_test1allow"], [shared_environment["stack2_test1allow"], shared_environment["stack2_test2allow"], shared_environment["stack2_test3deny"], shared_environment["stack2_test4deny"]], connection="deny") # Validate that LB is able reach all targets which are in the same stack as # Lb validate_lb_service(client, shared_environment["stack1_lbwithinstack"], "9091", [shared_environment["stack1_test1allow"]]) # Validate that LB is able reach all targets which are in the same stack as # Lb validate_linked_service(client, shared_environment["stack1_servicewithlinks"], [shared_environment["stack1_test1allow"]], "99") # Cross stacks access for links should be denied validate_linked_service(client, shared_environment["stack1_servicecrosslinks"], [shared_environment["stack2_test2allow"]], "98", linkName="test2allow.test2", not_reachable=True) # Cross stacks access for LBs should be denied validate_lb_service_for_no_access( client, shared_environment["stack1_lbcrossstack"], "9090") def validate_default_network_action_deny_networkpolicy_none( client): # Validate that standalone containers are not able reach any # service containers for container in shared_environment["containers"]: validate_connectivity_between_con_to_services( client, container, [shared_environment["stack1_test2allow"], shared_environment["stack2_test4deny"]], connection="deny") # Validate that there is no connectivity between containers of different # services across stacks and within stacks validate_connectivity_between_services( client, shared_environment["stack1_test1allow"], [shared_environment["stack1_test2allow"], shared_environment["stack1_test3deny"], shared_environment["stack1_test4deny"], shared_environment["stack2_test1allow"], shared_environment["stack2_test2allow"], shared_environment["stack2_test3deny"], shared_environment["stack2_test4deny"]], connection="deny") # Validate that Lb service is not able to reach targets within the # same stack and cross stacks validate_lb_service_for_no_access( client, shared_environment["stack1_lbwithinstack"], "9091") validate_lb_service_for_no_access( client, shared_environment["stack1_lbcrossstack"], "9090") # Validate that connectivity between linked service is denied within the # same stack and cross stacks validate_linked_service(client, shared_environment["stack1_servicewithlinks"], [shared_environment["stack1_test1allow"]], "99", not_reachable=True) validate_linked_service(client, shared_environment["stack1_servicecrosslinks"], [shared_environment["stack2_test2allow"]], "98", linkName="test2allow.test2", not_reachable=True) def validate_default_network_action_deny_networkpolicy_groupby( client): # Validate that containers that do not have the labels defined # in group by policy are not allowed to communicate with other # service containers for container in shared_environment["containers"]: validate_connectivity_between_con_to_services( client, container, [shared_environment["stack1_test2allow"], shared_environment["stack2_test4deny"]], connection="deny") # Validate that stand alone containers that have the labels defined # in group by policy are allowed to communicate with service containers # having the same labels for container in shared_environment["containers_with_label"]: validate_connectivity_between_con_to_services( client, container, [shared_environment["stack1_test2allow"], shared_environment["stack2_test1allow"], shared_environment["stack2_test2allow"]], connection="allow") # Validate that service containers that have matching labels defined # in group by policy are allowed to communicate with each other validate_connectivity_between_services( client, shared_environment["stack1_test1allow"], [shared_environment["stack1_test2allow"], shared_environment["stack2_test1allow"], shared_environment["stack2_test2allow"]], connection="allow") # Validate that all service containers within the same service that has # group by labels are able to communicate with each other validate_connectivity_between_services( client, shared_environment["stack1_test3deny"], [shared_environment["stack2_test3deny"]], connection="allow") # Validate that service containers that do not have matching labels defined # in group by policy are not allowed to communicate with each other validate_connectivity_between_services( client, shared_environment["stack1_test1allow"], [shared_environment["stack1_test3deny"], shared_environment["stack1_test4deny"], shared_environment["stack2_test3deny"], shared_environment["stack2_test4deny"]], connection="deny") validate_connectivity_between_services( client, shared_environment["stack1_test3deny"], [shared_environment["stack1_test1allow"], shared_environment["stack1_test2allow"], shared_environment["stack1_test4deny"], shared_environment["stack2_test1allow"], shared_environment["stack2_test2allow"], shared_environment["stack2_test4deny"]], connection="deny") def validate_default_network_action_deny_networkpolicy_within_service( client): # Validate that standalone containers are not able reach any # service containers for container in shared_environment["containers"]: validate_connectivity_between_con_to_services( client, container, [shared_environment["stack1_test1allow"], shared_environment["stack2_test4deny"]], connection="deny") # Validate that containers belonging to the same service are able to # communicate with each other validate_connectivity_between_services( client, shared_environment["stack1_test1allow"], [shared_environment["stack1_test1allow"]], connection="allow") # Validate that containers belonging to the different services within # the same stack or cross stack are not able to communicate with each other validate_connectivity_between_services( client, shared_environment["stack1_test1allow"], [shared_environment["stack1_test2allow"], shared_environment["stack2_test1allow"], shared_environment["stack2_test2allow"]], connection="deny") # Validate that Lb services has no access to targets with in # same stacks or cross stacks validate_lb_service_for_no_access( client, shared_environment["stack1_lbcrossstack"], "9090") validate_lb_service_for_no_access( client, shared_environment["stack1_lbwithinstack"], "9091") # Validate that connectivity between linked service is denied within the # same stack and cross stacks validate_linked_service( client, shared_environment["stack1_servicewithlinks"], [shared_environment["stack1_test1allow"]], "99", not_reachable=True) validate_linked_service(client, shared_environment["stack1_servicecrosslinks"], [shared_environment["stack2_test2allow"]], "98", linkName="test2allow.test2", not_reachable=True) def validate_default_network_action_deny_networkpolicy_within_service_for_sk( client): # Validate that containers of primary services are able to connect with # other containers in the same service and containers in other sidekick # services validate_connectivity_between_container_list( client, shared_environment["stack2_sidekick"]["p_con1"], [shared_environment["stack2_sidekick"]["p_con2"], shared_environment["stack2_sidekick"]["s1_con1"], shared_environment["stack2_sidekick"]["s1_con2"], shared_environment["stack2_sidekick"]["s2_con1"], shared_environment["stack2_sidekick"]["s2_con2"]], "allow") # Validate that containers of sidekick services are able to connect with # other containers in the same service and containers in other sidekick # services and primary service validate_connectivity_between_container_list( client, shared_environment["stack2_sidekick"]["s1_con1"], [shared_environment["stack2_sidekick"]["p_con1"], shared_environment["stack2_sidekick"]["p_con2"], shared_environment["stack2_sidekick"]["s1_con2"], shared_environment["stack2_sidekick"]["s2_con1"], shared_environment["stack2_sidekick"]["s2_con2"]], "allow") validate_connectivity_between_container_list( client, shared_environment["stack2_sidekick"]["s2_con1"], [shared_environment["stack2_sidekick"]["p_con1"], shared_environment["stack2_sidekick"]["p_con2"], shared_environment["stack2_sidekick"]["s1_con1"], shared_environment["stack2_sidekick"]["s1_con1"], shared_environment["stack2_sidekick"]["s2_con2"]], "allow") def validate_default_network_action_deny_networkpolicy_within_linked( client): # Validate that standalone containers are not able reach any # service containers for container in shared_environment["containers"]: validate_connectivity_between_con_to_services( client, container, [shared_environment["stack1_test2allow"], shared_environment["stack2_test4deny"]], connection="deny") # Validate that containers belonging to a service are not able to # communicate with other containers in the same service or different # service validate_connectivity_between_services( client, shared_environment["stack1_test1allow"], [shared_environment["stack1_test1allow"], shared_environment["stack1_test2allow"], shared_environment["stack2_test1allow"], shared_environment["stack2_test2allow"]], connection="deny") # Validate that Lb services has access to targets with in # same stacks validate_lb_service(client, shared_environment["stack1_lbwithinstack"], "9091", [shared_environment["stack1_test1allow"]]) # Validate that Lb services has access to targets cross stacks validate_lb_service(client, shared_environment["stack1_lbcrossstack"], "9090", [shared_environment["stack2_test1allow"]]) service_with_links = shared_environment["stack1_servicewithlinks"] linked_service = [shared_environment["stack1_test1allow"]] validate_dna_deny_np_within_linked_for_linked_service( client, service_with_links, linked_service, "99") service_with_links = shared_environment["stack1_servicecrosslinks"] linked_service = [shared_environment["stack2_test1allow"]] validate_dna_deny_np_within_linked_for_linked_service( client, service_with_links, linked_service, "98", "mylink") def validate_dna_deny_np_within_linked_for_linked_service( client, service_with_links, linked_service, port, linkName=None): # Validate that all containers of a service with link has access to # the containers of the service that it is linked to validate_connectivity_between_services( client, service_with_links, linked_service, connection="allow") # Validate that all containers of a service that is linked by other service # has no access to the containers of the service that it is linked by # (s1 -> s2) containers of s2 have no access to s1 for l_service in linked_service: validate_connectivity_between_services( client, l_service, [service_with_links], connection="deny") # Validate that containers are reachable using their link name validate_linked_service(client, service_with_links, linked_service, port, linkName=linkName) def validate_default_network_action_deny_networkpolicy_within_linked_for_sk( client): containers = get_service_container_list( client, shared_environment["stack1_servicelinktosidekick"]) # Validate connectivity between containers of linked services to linked # service with sidekick for con in containers: validate_connectivity_between_container_list( client, con, shared_environment["stack2_sidekick"].values(), "allow") for linked_con in shared_environment["stack2_sidekick"].values(): for con in containers: validate_connectivity_between_containers( client, linked_con, con, "deny") def validate_dna_deny_np_within_linked_for_servicealias( client): # Validate connectivity between containers of linked services to services # linked to webservice validate_connectivity_between_services( client, shared_environment["stack1_linktowebservice"], [shared_environment["stack1_test4deny"], shared_environment["stack2_test3deny"]], connection="allow") validate_connectivity_between_services( client, shared_environment["stack1_test4deny"], [shared_environment["stack1_linktowebservice"]], connection="deny") validate_connectivity_between_services( client, shared_environment["stack2_tes34deny"], [shared_environment["stack1_linktowebservice"]], connection="deny") @if_network_policy def test_default_network_action_deny_networkpolicy_allow_within_stacks( client): set_network_policy(client, "deny", policy_within_stack) validate_default_network_action_deny_networkpolicy_allow_within_stacks( client) @if_network_policy_within_stack def test_dna_deny_np_allow_within_stacks_stop_service( client, socat_containers): set_network_policy(client, "deny", policy_within_stack) stop_service_instances(client, shared_environment["env"][0], shared_environment["stack1_test1allow"], [1]) validate_default_network_action_deny_networkpolicy_allow_within_stacks( client) @if_network_policy_within_stack def test_dna_deny_np_allow_within_stacks_delete_service( client, socat_containers): set_network_policy(client, "deny", policy_within_stack) delete_service_instances(client, shared_environment["env"][0], shared_environment["stack1_test1allow"], [1]) validate_default_network_action_deny_networkpolicy_allow_within_stacks( client) @if_network_policy_within_stack def test_dna_deny_np_allow_within_stacks_restart_service( client, socat_containers): set_network_policy(client, "deny", policy_within_stack) restart_service_instances(client, shared_environment["env"][0], shared_environment["stack1_test1allow"], [1]) validate_default_network_action_deny_networkpolicy_allow_within_stacks( client) @if_network_policy def test_default_network_action_deny_networkpolicy_none(client): set_network_policy(client, "deny") validate_default_network_action_deny_networkpolicy_none( client) @if_network_policy_none def test_dna_deny_np_none_stop_service( client, socat_containers): set_network_policy(client, "deny") stop_service_instances(client, shared_environment["env"][0], shared_environment["stack1_test1allow"], [1]) validate_default_network_action_deny_networkpolicy_none( client) @if_network_policy_none def test_dna_deny_np_none_delete_service( client, socat_containers): set_network_policy(client, "deny") delete_service_instances(client, shared_environment["env"][0], shared_environment["stack1_test1allow"], [1]) validate_default_network_action_deny_networkpolicy_none( client) @if_network_policy_none def test_dna_deny_np_none_restart_service( client, socat_containers): set_network_policy(client, "deny") restart_service_instances(client, shared_environment["env"][0], shared_environment["stack1_test1allow"], [1]) validate_default_network_action_deny_networkpolicy_none( client) @if_network_policy def test_default_network_action_deny_networkpolicy_groupby( client): set_network_policy(client, "deny", policy_groupby) validate_default_network_action_deny_networkpolicy_groupby( client) @if_network_policy_groupby def test_dna_deny_np_groupby_stop_service( client, socat_containers): set_network_policy(client, "deny", policy_groupby) stop_service_instances(client, shared_environment["env"][0], shared_environment["stack1_test1allow"], [1]) validate_default_network_action_deny_networkpolicy_groupby( client) @if_network_policy_groupby def test_dna_deny_np_groupby_delete_service( client, socat_containers): set_network_policy(client, "deny", policy_groupby) delete_service_instances(client, shared_environment["env"][0], shared_environment["stack1_test1allow"], [1]) validate_default_network_action_deny_networkpolicy_groupby( client) @if_network_policy_groupby def test_dna_deny_np_groupby_restart_service( client, socat_containers): set_network_policy(client, "deny", policy_groupby) restart_service_instances(client, shared_environment["env"][0], shared_environment["stack1_test1allow"], [1]) validate_default_network_action_deny_networkpolicy_groupby( client) @if_network_policy def test_default_network_action_deny_networkpolicy_allow_within_service( client): set_network_policy(client, "deny", policy_within_service) validate_default_network_action_deny_networkpolicy_within_service( client) @if_network_policy_within_service def test_dna_deny_np_allow_within_service_delete_service( client): set_network_policy(client, "deny", policy_within_service) delete_service_instances(client, shared_environment["env"][0], shared_environment["stack1_test1allow"], [1]) delete_service_instances(client, shared_environment["env"][0], shared_environment["stack1_lbcrossstack"], [1]) delete_service_instances(client, shared_environment["env"][0], shared_environment["stack1_lbwithinstack"], [1]) delete_service_instances( client, shared_environment["env"][0], shared_environment["stack1_servicewithlinks"], [1]) validate_default_network_action_deny_networkpolicy_within_service( client) @if_network_policy_within_service def test_dna_deny_np_allow_within_service_scale_service( client): set_network_policy(client, "deny", policy_within_service) scale_service(shared_environment["stack1_test1allow"], client, 3) scale_service(shared_environment["stack1_lbcrossstack"], client, 3) scale_service(shared_environment["stack1_lbwithinstack"], client, 3) scale_service(shared_environment["stack1_servicewithlinks"], client, 3) populate_env_details(client) validate_default_network_action_deny_networkpolicy_within_service( client) scale_service(shared_environment["stack1_test1allow"], client, 2) scale_service(shared_environment["stack1_lbcrossstack"], client, 2) scale_service(shared_environment["stack1_lbwithinstack"], client, 2) scale_service(shared_environment["stack1_servicewithlinks"], client, 2) @if_network_policy_within_service def test_dna_deny_np_allow_within_service_stop_service( client): set_network_policy(client, "deny", policy_within_service) validate_default_network_action_deny_networkpolicy_within_service( client) stop_service_instances(client, shared_environment["env"][0], shared_environment["stack1_test1allow"], [1]) stop_service_instances(client, shared_environment["env"][0], shared_environment["stack1_lbcrossstack"], [1]) stop_service_instances(client, shared_environment["env"][0], shared_environment["stack1_lbwithinstack"], [1]) stop_service_instances( client, shared_environment["env"][0], shared_environment["stack1_servicewithlinks"], [1]) validate_default_network_action_deny_networkpolicy_within_service( client) @if_network_policy def test_dna_deny_np_allow_within_service_check_sidekicks( client): set_network_policy(client, "deny", policy_within_service) validate_default_network_action_deny_networkpolicy_within_service_for_sk( client) @if_network_policy def test_default_network_action_deny_networkpolicy_allow_within_linked( client): set_network_policy(client, "deny", policy_within_linked) validate_default_network_action_deny_networkpolicy_within_linked( client) @if_network_policy def test_dna_deny_np_allow_within_linked_for_sk( client): set_network_policy(client, "deny", policy_within_linked) validate_default_network_action_deny_networkpolicy_within_linked_for_sk( client) @if_network_policy def test_dna_deny_np_allow_within_linked_for_sa( client): set_network_policy(client, "deny", policy_within_linked) validate_dna_deny_np_within_linked_for_servicealias( client) @if_network_policy_within_linked def test_dna_deny_np_allow_within_linked_after_scaleup( client): set_network_policy(client, "deny", policy_within_linked) service_with_links = shared_environment["stack1_servicewithlinks"] linked_service = shared_environment["stack1_test1allow"] validate_dna_deny_np_within_linked_for_linked_service( client, service_with_links, [linked_service], "99") scale_service(linked_service, client, 3) shared_environment["stack1_test1allow"] = \ get_service_by_name(client, shared_environment["env"][0], "test1allow") linked_service = shared_environment["stack1_test1allow"] validate_dna_deny_np_within_linked_for_linked_service( client, service_with_links, [linked_service], "99") scale_service(linked_service, client, 2) shared_environment["stack1_test1allow"] = \ get_service_by_name(client, shared_environment["env"][0], "test1allow") linked_service = shared_environment["stack1_test1allow"] scale_service(service_with_links, client, 3) shared_environment["stack1_servicewithlinks"] = \ get_service_by_name(client, shared_environment["env"][0], "servicewithlinks") service_with_links = shared_environment["stack1_servicewithlinks"] validate_dna_deny_np_within_linked_for_linked_service( client, service_with_links, [linked_service], "99") scale_service(service_with_links, client, 2) shared_environment["stack1_servicewithlinks"] = \ get_service_by_name(client, shared_environment["env"][0], "servicewithlinks") @if_network_policy_within_linked def test_dna_deny_np_allow_within_linked_after_adding_removing_links( client): set_network_policy(client, "deny", policy_within_linked) service_with_links = shared_environment["stack1_servicewithlinks"] linked_service = [shared_environment["stack1_test1allow"]] validate_dna_deny_np_within_linked_for_linked_service( client, service_with_links, linked_service, "99") # Add another service link service_with_links.setservicelinks( serviceLinks=[ {"serviceId": shared_environment["stack1_test1allow"].id}, {"serviceId": shared_environment["stack1_test2allow"].id}]) validate_dna_deny_np_within_linked_for_linked_service( client, service_with_links, [shared_environment["stack1_test1allow"]], "99") validate_dna_deny_np_within_linked_for_linked_service( client, service_with_links, [shared_environment["stack1_test2allow"]], "99") # Remove existing service link service_with_links.setservicelinks( serviceLinks=[ {"serviceId": shared_environment["stack1_test1allow"].id}]) linked_service = [shared_environment["stack1_test1allow"]] validate_dna_deny_np_within_linked_for_linked_service( client, service_with_links, linked_service, "99") validate_connectivity_between_services( client, service_with_links, [shared_environment["stack1_test2allow"]], connection="deny") validate_connectivity_between_services( client, shared_environment["stack1_test2allow"], [service_with_links], connection="deny") def scale_service(service, client, final_scale): service = client.update(service, name=service.name, scale=final_scale) service = client.wait_success(service, 300) assert service.state == "active" assert service.scale == final_scale check_container_in_service(client, service) def set_network_policy(client, defaultPolicyAction="allow", policy=None): networks = client.list_network(name='ipsec') assert len(networks) == 1 network = networks[0] network = client.update( network, defaultPolicyAction=defaultPolicyAction, policy=policy) network = wait_success(client, network) assert network.defaultPolicyAction == defaultPolicyAction populate_env_details(client) def check_for_network_policy_manager(client): np_manager = False env = client.list_stack(name="network-policy-manager") if len(env) == 1: service = get_service_by_name(client, env[0], "network-policy-manager") if service.state == "active": np_manager = True return np_manager def create_standalone_containers(client): hosts = client.list_host(kind='docker', removed_null=True) cons = [] cons_with_label = [] for host in hosts: con_name = random_str() con = client.create_container( name=con_name, ports=['3001:22'], image=HEALTH_CHECK_IMAGE_UUID, networkMode=MANAGED_NETWORK, requestedHostId=host.id) con = client.wait_success(con) assert con.state == "running" cons.append(con) shared_environment["containers"] = cons for host in hosts: con_name = random_str() con = client.create_container( name=con_name, ports=['3002:22'], image=HEALTH_CHECK_IMAGE_UUID, networkMode=MANAGED_NETWORK, requestedHostId=host.id, labels={"com.rancher.stack.location": "east"}) con = client.wait_success(con) assert con.state == "running" cons_with_label.append(con) shared_environment["containers_with_label"] = cons_with_label
apache-2.0
-7,541,229,297,950,756,000
40.655215
79
0.659813
false
michelts/lettuce
tests/integration/test_brocolis.py
17
2167
# -*- coding: utf-8 -*- # <Lettuce - Behaviour Driven Development for python> # Copyright (C) <2010-2012> Gabriel Falcão <[email protected]> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import commands from lettuce.fs import FileSystem from nose.tools import assert_equals from tests.util import run_scenario current_directory = FileSystem.dirname(__file__) @FileSystem.in_directory(current_directory, 'django', 'brocolis') def test_harvest_with_debug_mode_enabled(): 'python manage.py harvest -d turns settings.DEBUG=True' for option in ['-d', '--debug-mode']: status, out = run_scenario('leaves', 'enabled', **{option: None}) assert_equals(status, 0, out) @FileSystem.in_directory(current_directory, 'django', 'brocolis') def test_harvest_with_debug_mode_disabled(): 'python manage.py harvest without turns settings.DEBUG=False' status, out = run_scenario('leaves', 'disabled') assert_equals(status, 0, out) @FileSystem.in_directory(current_directory, 'django', 'brocolis') def test_harvest_sets_environment_variabled_for_gae(): 'harvest sets environment variables SERVER_NAME and SERVER_PORT in order to work with google app engine' status, out = run_scenario('leaves', 'appengine') assert_equals(status, 0, out) @FileSystem.in_directory(current_directory, 'django', 'brocolis') def test_harvest_uses_test_runner(): 'harvest uses TEST_RUNNER specified in settings' status, out = run_scenario('leaves', 'disabled') assert_equals(status, 0, out) assert "Custom test runner enabled." in out
gpl-3.0
4,176,015,016,514,085,400
37
108
0.731764
false
Chrispassold/ionicons
builder/generate.py
357
9438
from subprocess import call import os import json BUILDER_PATH = os.path.dirname(os.path.abspath(__file__)) ROOT_PATH = os.path.join(BUILDER_PATH, '..') FONTS_FOLDER_PATH = os.path.join(ROOT_PATH, 'fonts') CSS_FOLDER_PATH = os.path.join(ROOT_PATH, 'css') SCSS_FOLDER_PATH = os.path.join(ROOT_PATH, 'scss') LESS_FOLDER_PATH = os.path.join(ROOT_PATH, 'less') def main(): generate_font_files() data = get_build_data() rename_svg_glyph_names(data) generate_scss(data) generate_less(data) generate_cheatsheet(data) generate_component_json(data) generate_composer_json(data) generate_bower_json(data) def generate_font_files(): print "Generate Fonts" cmd = "fontforge -script %s/scripts/generate_font.py" % (BUILDER_PATH) call(cmd, shell=True) def rename_svg_glyph_names(data): # hacky and slow (but safe) way to rename glyph-name attributes svg_path = os.path.join(FONTS_FOLDER_PATH, 'ionicons.svg') svg_file = open(svg_path, 'r+') svg_text = svg_file.read() svg_file.seek(0) for ionicon in data['icons']: # uniF2CA org_name = 'uni%s' % (ionicon['code'].replace('0x', '').upper()) ion_name = 'ion-%s' % (ionicon['name']) svg_text = svg_text.replace(org_name, ion_name) svg_file.write(svg_text) svg_file.close() def generate_less(data): print "Generate LESS" font_name = data['name'] font_version = data['version'] css_prefix = data['prefix'] variables_file_path = os.path.join(LESS_FOLDER_PATH, '_ionicons-variables.less') icons_file_path = os.path.join(LESS_FOLDER_PATH, '_ionicons-icons.less') d = [] d.append('/*!'); d.append('Ionicons, v%s' % (font_version) ); d.append('Created by Ben Sperry for the Ionic Framework, http://ionicons.com/'); d.append('https://twitter.com/benjsperry https://twitter.com/ionicframework'); d.append('MIT License: https://github.com/driftyco/ionicons'); d.append('*/'); d.append('// Ionicons Variables') d.append('// --------------------------\n') d.append('@ionicons-font-path: "../fonts";') d.append('@ionicons-font-family: "%s";' % (font_name) ) d.append('@ionicons-version: "%s";' % (font_version) ) d.append('@ionicons-prefix: %s;' % (css_prefix) ) d.append('') for ionicon in data['icons']: chr_code = ionicon['code'].replace('0x', '\\') d.append('@ionicon-var-%s: "%s";' % (ionicon['name'], chr_code) ) f = open(variables_file_path, 'w') f.write( '\n'.join(d) ) f.close() d = [] d.append('// Ionicons Icons') d.append('// --------------------------\n') group = [ '.%s' % (data['name'].lower()) ] for ionicon in data['icons']: group.append('.@{ionicons-prefix}%s:before' % (ionicon['name']) ) d.append( ',\n'.join(group) ) d.append('{') d.append(' &:extend(.ion);') d.append('}') for ionicon in data['icons']: chr_code = ionicon['code'].replace('0x', '\\') d.append('.@{ionicons-prefix}%s:before { content: @ionicon-var-%s; }' % (ionicon['name'], ionicon['name']) ) f = open(icons_file_path, 'w') f.write( '\n'.join(d) ) f.close() def generate_scss(data): print "Generate SCSS" font_name = data['name'] font_version = data['version'] css_prefix = data['prefix'] variables_file_path = os.path.join(SCSS_FOLDER_PATH, '_ionicons-variables.scss') icons_file_path = os.path.join(SCSS_FOLDER_PATH, '_ionicons-icons.scss') d = [] d.append('// Ionicons Variables') d.append('// --------------------------\n') d.append('$ionicons-font-path: "../fonts" !default;') d.append('$ionicons-font-family: "%s" !default;' % (font_name) ) d.append('$ionicons-version: "%s" !default;' % (font_version) ) d.append('$ionicons-prefix: %s !default;' % (css_prefix) ) d.append('') for ionicon in data['icons']: chr_code = ionicon['code'].replace('0x', '\\') d.append('$ionicon-var-%s: "%s";' % (ionicon['name'], chr_code) ) f = open(variables_file_path, 'w') f.write( '\n'.join(d) ) f.close() d = [] d.append('// Ionicons Icons') d.append('// --------------------------\n') group = [ '.%s' % (data['name'].lower()) ] for ionicon in data['icons']: group.append('.#{$ionicons-prefix}%s:before' % (ionicon['name']) ) d.append( ',\n'.join(group) ) d.append('{') d.append(' @extend .ion;') d.append('}') for ionicon in data['icons']: chr_code = ionicon['code'].replace('0x', '\\') d.append('.#{$ionicons-prefix}%s:before { content: $ionicon-var-%s; }' % (ionicon['name'], ionicon['name']) ) f = open(icons_file_path, 'w') f.write( '\n'.join(d) ) f.close() generate_css_from_scss(data) def generate_css_from_scss(data): print "Generate CSS From SCSS" scss_file_path = os.path.join(SCSS_FOLDER_PATH, 'ionicons.scss') css_file_path = os.path.join(CSS_FOLDER_PATH, 'ionicons.css') css_min_file_path = os.path.join(CSS_FOLDER_PATH, 'ionicons.min.css') cmd = "sass %s %s --style compact" % (scss_file_path, css_file_path) call(cmd, shell=True) print "Generate Minified CSS From SCSS" cmd = "sass %s %s --style compressed" % (scss_file_path, css_min_file_path) call(cmd, shell=True) def generate_cheatsheet(data): print "Generate Cheatsheet" cheatsheet_file_path = os.path.join(ROOT_PATH, 'cheatsheet.html') template_path = os.path.join(BUILDER_PATH, 'cheatsheet', 'template.html') icon_row_path = os.path.join(BUILDER_PATH, 'cheatsheet', 'icon-row.html') f = open(template_path, 'r') template_html = f.read() f.close() f = open(icon_row_path, 'r') icon_row_template = f.read() f.close() content = [] for ionicon in data['icons']: css_code = ionicon['code'].replace('0x', '\\') escaped_html_code = ionicon['code'].replace('0x', '&amp;#x') + ';' html_code = ionicon['code'].replace('0x', '&#x') + ';' item_row = icon_row_template item_row = item_row.replace('{{name}}', ionicon['name']) item_row = item_row.replace('{{prefix}}', data['prefix']) item_row = item_row.replace('{{css_code}}', css_code) item_row = item_row.replace('{{escaped_html_code}}', escaped_html_code) item_row = item_row.replace('{{html_code}}', html_code) content.append(item_row) template_html = template_html.replace("{{font_name}}", data["name"]) template_html = template_html.replace("{{font_version}}", data["version"]) template_html = template_html.replace("{{icon_count}}", str(len(data["icons"])) ) template_html = template_html.replace("{{content}}", '\n'.join(content) ) f = open(cheatsheet_file_path, 'w') f.write(template_html) f.close() def generate_component_json(data): print "Generate component.json" d = { "name": data['name'], "repo": "driftyco/ionicons", "description": "The premium icon font for Ionic Framework.", "version": data['version'], "keywords": [], "dependencies": {}, "development": {}, "license": "MIT", "styles": [ "css/%s.css" % (data['name'].lower()) ], "fonts": [ "fonts/%s.eot" % (data['name'].lower()), "fonts/%s.svg" % (data['name'].lower()), "fonts/%s.ttf" % (data['name'].lower()), "fonts/%s.woff" % (data['name'].lower()) ] } txt = json.dumps(d, indent=4, separators=(',', ': ')) component_file_path = os.path.join(ROOT_PATH, 'component.json') f = open(component_file_path, 'w') f.write(txt) f.close() def generate_composer_json(data): print "Generate composer.json" d = { "name": "driftyco/ionicons", "description": "The premium icon font for Ionic Framework.", "keywords": [ "fonts", "icon font", "icons", "ionic", "web font"], "homepage": "http://ionicons.com/", "authors": [ { "name": "Ben Sperry", "email": "[email protected]", "role": "Designer", "homepage": "https://twitter.com/benjsperry" }, { "name": "Adam Bradley", "email": "[email protected]", "role": "Developer", "homepage": "https://twitter.com/adamdbradley" }, { "name": "Max Lynch", "email": "[email protected]", "role": "Developer", "homepage": "https://twitter.com/maxlynch" } ], "extra": {}, "license": [ "MIT" ] } txt = json.dumps(d, indent=4, separators=(',', ': ')) composer_file_path = os.path.join(ROOT_PATH, 'composer.json') f = open(composer_file_path, 'w') f.write(txt) f.close() def generate_bower_json(data): print "Generate bower.json" d = { "name": data['name'], "version": data['version'], "homepage": "https://github.com/driftyco/ionicons", "authors": [ "Ben Sperry <[email protected]>", "Adam Bradley <[email protected]>", "Max Lynch <[email protected]>" ], "description": "Ionicons - free and beautiful icons from the creators of Ionic Framework", "main": [ "css/%s.css" % (data['name'].lower()), "fonts/*" ], "keywords": [ "fonts", "icon font", "icons", "ionic", "web font"], "license": "MIT", "ignore": [ "**/.*", "builder", "node_modules", "bower_components", "test", "tests" ] } txt = json.dumps(d, indent=4, separators=(',', ': ')) bower_file_path = os.path.join(ROOT_PATH, 'bower.json') f = open(bower_file_path, 'w') f.write(txt) f.close() def get_build_data(): build_data_path = os.path.join(BUILDER_PATH, 'build_data.json') f = open(build_data_path, 'r') data = json.loads(f.read()) f.close() return data if __name__ == "__main__": main()
mit
7,055,744,480,035,983,000
28.679245
113
0.593346
false
dvliman/jaikuengine
.google_appengine/lib/django-1.4/django/contrib/gis/gdal/datasource.py
92
4724
""" DataSource is a wrapper for the OGR Data Source object, which provides an interface for reading vector geometry data from many different file formats (including ESRI shapefiles). When instantiating a DataSource object, use the filename of a GDAL-supported data source. For example, a SHP file or a TIGER/Line file from the government. The ds_driver keyword is used internally when a ctypes pointer is passed in directly. Example: ds = DataSource('/home/foo/bar.shp') for layer in ds: for feature in layer: # Getting the geometry for the feature. g = feature.geom # Getting the 'description' field for the feature. desc = feature['description'] # We can also increment through all of the fields # attached to this feature. for field in feature: # Get the name of the field (e.g. 'description') nm = field.name # Get the type (integer) of the field, e.g. 0 => OFTInteger t = field.type # Returns the value the field; OFTIntegers return ints, # OFTReal returns floats, all else returns string. val = field.value """ # ctypes prerequisites. from ctypes import byref # The GDAL C library, OGR exceptions, and the Layer object. from django.contrib.gis.gdal.base import GDALBase from django.contrib.gis.gdal.driver import Driver from django.contrib.gis.gdal.error import OGRException, OGRIndexError from django.contrib.gis.gdal.layer import Layer # Getting the ctypes prototypes for the DataSource. from django.contrib.gis.gdal.prototypes import ds as capi # For more information, see the OGR C API source code: # http://www.gdal.org/ogr/ogr__api_8h.html # # The OGR_DS_* routines are relevant here. class DataSource(GDALBase): "Wraps an OGR Data Source object." #### Python 'magic' routines #### def __init__(self, ds_input, ds_driver=False, write=False): # The write flag. if write: self._write = 1 else: self._write = 0 # Registering all the drivers, this needs to be done # _before_ we try to open up a data source. if not capi.get_driver_count(): capi.register_all() if isinstance(ds_input, basestring): # The data source driver is a void pointer. ds_driver = Driver.ptr_type() try: # OGROpen will auto-detect the data source type. ds = capi.open_ds(ds_input, self._write, byref(ds_driver)) except OGRException: # Making the error message more clear rather than something # like "Invalid pointer returned from OGROpen". raise OGRException('Could not open the datasource at "%s"' % ds_input) elif isinstance(ds_input, self.ptr_type) and isinstance(ds_driver, Driver.ptr_type): ds = ds_input else: raise OGRException('Invalid data source input type: %s' % type(ds_input)) if bool(ds): self.ptr = ds self.driver = Driver(ds_driver) else: # Raise an exception if the returned pointer is NULL raise OGRException('Invalid data source file "%s"' % ds_input) def __del__(self): "Destroys this DataStructure object." if self._ptr: capi.destroy_ds(self._ptr) def __iter__(self): "Allows for iteration over the layers in a data source." for i in xrange(self.layer_count): yield self[i] def __getitem__(self, index): "Allows use of the index [] operator to get a layer at the index." if isinstance(index, basestring): l = capi.get_layer_by_name(self.ptr, index) if not l: raise OGRIndexError('invalid OGR Layer name given: "%s"' % index) elif isinstance(index, int): if index < 0 or index >= self.layer_count: raise OGRIndexError('index out of range') l = capi.get_layer(self._ptr, index) else: raise TypeError('Invalid index type: %s' % type(index)) return Layer(l, self) def __len__(self): "Returns the number of layers within the data source." return self.layer_count def __str__(self): "Returns OGR GetName and Driver for the Data Source." return '%s (%s)' % (self.name, str(self.driver)) @property def layer_count(self): "Returns the number of layers in the data source." return capi.get_layer_count(self._ptr) @property def name(self): "Returns the name of the data source." return capi.get_ds_name(self._ptr)
apache-2.0
-1,417,176,880,735,346,400
35.90625
92
0.617273
false
Rogentos/legacy-anaconda
storage/devicelibs/crypto.py
2
6138
# # crypto.py # # Copyright (C) 2009 Red Hat, Inc. All rights reserved. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # # Author(s): Dave Lehman <[email protected]> # Martin Sivak <[email protected]> # import os from pycryptsetup import CryptSetup import iutil from ..errors import * import gettext _ = lambda x: gettext.ldgettext("anaconda", x) # Keep the character set size a power of two to make sure all characters are # equally likely GENERATED_PASSPHRASE_CHARSET = ("0123456789" "ABCDEFGHIJKLMNOPQRSTUVWXYZ" "abcdefghijklmnopqrstuvwxyz" "./") # 20 chars * 6 bits per char = 120 "bits of security" GENERATED_PASSPHRASE_LENGTH = 20 def generateBackupPassphrase(): rnd = os.urandom(GENERATED_PASSPHRASE_LENGTH) cs = GENERATED_PASSPHRASE_CHARSET raw = "".join([cs[ord(c) % len(cs)] for c in rnd]) # Make the result easier to read parts = [] for i in xrange(0, len(raw), 5): parts.append(raw[i : i + 5]) return "-".join(parts) def askyes(question): return True def dolog(priority, text): pass def is_luks(device): cs = CryptSetup(yesDialog = askyes, logFunc = dolog) return cs.isLuks(device) def luks_uuid(device): cs = CryptSetup(yesDialog = askyes, logFunc = dolog) return cs.luksUUID(device).strip() def luks_status(name): """True means active, False means inactive (or non-existent)""" cs = CryptSetup(yesDialog = askyes, logFunc = dolog) return cs.luksStatus(name)!=0 def luks_format(device, passphrase=None, key_file=None, cipher=None, key_size=None): cs = CryptSetup(yesDialog = askyes, logFunc = dolog) key_file_unlink = False if passphrase: key_file = cs.prepare_passphrase_file(passphrase) key_file_unlink = True elif key_file and os.path.isfile(key_file): pass else: raise ValueError("luks_format requires either a passphrase or a key file") #None is not considered as default value and pycryptsetup doesn't accept it #so we need to filter out all Nones kwargs = {} kwargs["device"] = device if cipher: kwargs["cipher"] = cipher if key_file: kwargs["keyfile"] = key_file if key_size: kwargs["keysize"] = key_size rc = cs.luksFormat(**kwargs) if key_file_unlink: os.unlink(key_file) if rc: raise CryptoError("luks_format failed for '%s'" % device) def luks_open(device, name, passphrase=None, key_file=None): cs = CryptSetup(yesDialog = askyes, logFunc = dolog) key_file_unlink = False if passphrase: key_file = cs.prepare_passphrase_file(passphrase) key_file_unlink = True elif key_file and os.path.isfile(key_file): pass else: raise ValueError("luks_open requires either a passphrase or a key file") rc = cs.luksOpen(device = device, name = name, keyfile = key_file) if key_file_unlink: os.unlink(key_file) if rc: raise CryptoError("luks_open failed for %s (%s)" % (device, name)) def luks_close(name): cs = CryptSetup(yesDialog = askyes, logFunc = dolog) rc = cs.luksClose(name) if rc: raise CryptoError("luks_close failed for %s" % name) def luks_add_key(device, new_passphrase=None, new_key_file=None, passphrase=None, key_file=None): params = ["-q"] p = os.pipe() if passphrase: os.write(p[1], "%s\n" % passphrase) elif key_file and os.path.isfile(key_file): params.extend(["--key-file", key_file]) else: raise CryptoError("luks_add_key requires either a passphrase or a key file") params.extend(["luksAddKey", device]) if new_passphrase: os.write(p[1], "%s\n" % new_passphrase) elif new_key_file and os.path.isfile(new_key_file): params.append("%s" % new_key_file) else: raise CryptoError("luks_add_key requires either a passphrase or a key file to add") os.close(p[1]) rc = iutil.execWithRedirect("cryptsetup", params, stdin = p[0], stdout = "/dev/tty5", stderr = "/dev/tty5") os.close(p[0]) if rc: raise CryptoError("luks add key failed with errcode %d" % (rc,)) def luks_remove_key(device, del_passphrase=None, del_key_file=None, passphrase=None, key_file=None): params = [] p = os.pipe() if del_passphrase: #the first question is about the key we want to remove os.write(p[1], "%s\n" % del_passphrase) if passphrase: os.write(p[1], "%s\n" % passphrase) elif key_file and os.path.isfile(key_file): params.extend(["--key-file", key_file]) else: raise CryptoError("luks_remove_key requires either a passphrase or a key file") params.extend(["luksRemoveKey", device]) if del_passphrase: pass elif del_key_file and os.path.isfile(del_key_file): params.append("%s" % del_key_file) else: raise CryptoError("luks_remove_key requires either a passphrase or a key file to remove") os.close(p[1]) rc = iutil.execWithRedirect("cryptsetup", params, stdin = p[0], stdout = "/dev/tty5", stderr = "/dev/tty5") os.close(p[0]) if rc: raise CryptoError("luks_remove_key failed with errcode %d" % (rc,))
gpl-2.0
-6,418,685,966,033,070,000
30.803109
97
0.620398
false
eunchong/build
scripts/slave/recipe_modules/skia/ios_flavor.py
1
5292
# Copyright 2015 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import copy import default_flavor """iOS flavor utils, used for building for and running tests on iOS.""" class iOSFlavorUtils(default_flavor.DefaultFlavorUtils): def __init__(self, skia_api): super(iOSFlavorUtils, self).__init__(skia_api) self.ios_bin = self._skia_api.m.path['slave_build'].join( 'skia', 'platform_tools', 'ios', 'bin') def step(self, name, cmd, **kwargs): args = [self.ios_bin.join('ios_run_skia')] # Convert 'dm' and 'nanobench' from positional arguments # to flags, which is what iOSShell expects to select which # one is being run. cmd = ["--" + c if c in ['dm', 'nanobench'] else c for c in cmd] return self._skia_api.run(self._skia_api.m.step, name=name, cmd=args + cmd, **kwargs) def compile(self, target): """Build the given target.""" cmd = [self.ios_bin.join('ios_ninja')] self._skia_api.run(self._skia_api.m.step, 'build iOSShell', cmd=cmd, cwd=self._skia_api.m.path['checkout']) def device_path_join(self, *args): """Like os.path.join(), but for paths on a connected iOS device.""" return '/'.join(args) def device_path_exists(self, path): """Like os.path.exists(), but for paths on a connected device.""" return self._skia_api.run( self._skia_api.m.step, 'exists %s' % path, cmd=[self.ios_bin.join('ios_path_exists'), path], infra_step=True, ) # pragma: no cover def _remove_device_dir(self, path): """Remove the directory on the device.""" return self._skia_api.run( self._skia_api.m.step, 'rmdir %s' % path, cmd=[self.ios_bin.join('ios_rm'), path], infra_step=True, ) def _create_device_dir(self, path): """Create the directory on the device.""" return self._skia_api.run( self._skia_api.m.step, 'mkdir %s' % path, cmd=[self.ios_bin.join('ios_mkdir'), path], infra_step=True, ) def copy_directory_contents_to_device(self, host_dir, device_dir): """Like shutil.copytree(), but for copying to a connected device.""" return self._skia_api.run( self._skia_api.m.step, name='push %s to %s' % (self._skia_api.m.path.basename(host_dir), self._skia_api.m.path.basename(device_dir)), cmd=[self.ios_bin.join('ios_push_if_needed'), host_dir, device_dir], infra_step=True, ) def copy_directory_contents_to_host(self, device_dir, host_dir): """Like shutil.copytree(), but for copying from a connected device.""" self._skia_api.run( self._skia_api.m.step, name='pull %s' % self._skia_api.m.path.basename(device_dir), cmd=[self.ios_bin.join('ios_pull_if_needed'), device_dir, host_dir], infra_step=True, ) def copy_file_to_device(self, host_path, device_path): """Like shutil.copyfile, but for copying to a connected device.""" self._skia_api.run( self._skia_api.m.step, name='push %s' % host_path, cmd=[self.ios_bin.join('ios_push_file'), host_path, device_path], infra_step=True, ) # pragma: no cover def create_clean_device_dir(self, path): """Like shutil.rmtree() + os.makedirs(), but on a connected device.""" self._remove_device_dir(path) self._create_device_dir(path) def install(self): """Run device-specific installation steps.""" self._skia_api.run( self._skia_api.m.step, name='install iOSShell', cmd=[self.ios_bin.join('ios_install')], infra_step=True) def cleanup_steps(self): """Run any device-specific cleanup steps.""" if self._skia_api.do_test_steps or self._skia_api.do_perf_steps: self._skia_api.run( self._skia_api.m.step, name='reboot', cmd=[self.ios_bin.join('ios_restart')], infra_step=True) self._skia_api.run( self._skia_api.m.step, name='wait for reboot', cmd=['sleep', '20'], infra_step=True) def read_file_on_device(self, path): """Read the given file.""" ret = self._skia_api.run( self._skia_api.m.step, name='read %s' % self._skia_api.m.path.basename(path), cmd=[self.ios_bin.join('ios_cat_file'), path], stdout=self._skia_api.m.raw_io.output(), infra_step=True) return ret.stdout.rstrip() if ret.stdout else ret.stdout def remove_file_on_device(self, path): """Remove the file on the device.""" return self._skia_api.run( self._skia_api.m.step, 'rm %s' % path, cmd=[self.ios_bin.join('ios_rm'), path], infra_step=True, ) def get_device_dirs(self): """ Set the directories which will be used by the build steps.""" prefix = self.device_path_join('skiabot', 'skia_') return default_flavor.DeviceDirs( dm_dir=prefix + 'dm', perf_data_dir=prefix + 'perf', resource_dir=prefix + 'resources', images_dir=prefix + 'images', skp_dir=prefix + 'skp/skps', tmp_dir=prefix + 'tmp_dir')
bsd-3-clause
-1,379,111,132,624,642,300
33.815789
79
0.595049
false
hipnusleo/laserjet
resource/pypi/cryptography-1.7.1/src/_cffi_src/openssl/engine.py
1
5445
# This file is dual licensed under the terms of the Apache License, Version # 2.0, and the BSD License. See the LICENSE file in the root of this repository # for complete details. from __future__ import absolute_import, division, print_function INCLUDES = """ #include <openssl/engine.h> """ TYPES = """ static const long Cryptography_HAS_ENGINE_CRYPTODEV; typedef ... ENGINE; typedef ... RSA_METHOD; typedef ... DSA_METHOD; typedef ... DH_METHOD; typedef struct { int (*bytes)(unsigned char *, int); int (*pseudorand)(unsigned char *, int); int (*status)(); ...; } RAND_METHOD; typedef int (*ENGINE_GEN_INT_FUNC_PTR)(ENGINE *); typedef ... *ENGINE_CTRL_FUNC_PTR; typedef ... *ENGINE_LOAD_KEY_PTR; typedef ... *ENGINE_CIPHERS_PTR; typedef ... *ENGINE_DIGESTS_PTR; typedef ... ENGINE_CMD_DEFN; typedef ... UI_METHOD; static const unsigned int ENGINE_METHOD_RSA; static const unsigned int ENGINE_METHOD_DSA; static const unsigned int ENGINE_METHOD_RAND; static const unsigned int ENGINE_METHOD_CIPHERS; static const unsigned int ENGINE_METHOD_DIGESTS; static const unsigned int ENGINE_METHOD_ALL; static const unsigned int ENGINE_METHOD_NONE; static const int ENGINE_R_CONFLICTING_ENGINE_ID; """ FUNCTIONS = """ ENGINE *ENGINE_get_first(void); ENGINE *ENGINE_get_last(void); ENGINE *ENGINE_get_next(ENGINE *); ENGINE *ENGINE_get_prev(ENGINE *); int ENGINE_add(ENGINE *); int ENGINE_remove(ENGINE *); ENGINE *ENGINE_by_id(const char *); int ENGINE_init(ENGINE *); int ENGINE_finish(ENGINE *); void ENGINE_load_builtin_engines(void); ENGINE *ENGINE_get_default_RSA(void); ENGINE *ENGINE_get_default_DSA(void); ENGINE *ENGINE_get_default_DH(void); ENGINE *ENGINE_get_default_RAND(void); ENGINE *ENGINE_get_cipher_engine(int); ENGINE *ENGINE_get_digest_engine(int); int ENGINE_set_default_RSA(ENGINE *); int ENGINE_set_default_DSA(ENGINE *); int ENGINE_set_default_DH(ENGINE *); int ENGINE_set_default_RAND(ENGINE *); int ENGINE_set_default_ciphers(ENGINE *); int ENGINE_set_default_digests(ENGINE *); int ENGINE_set_default_string(ENGINE *, const char *); int ENGINE_set_default(ENGINE *, unsigned int); unsigned int ENGINE_get_table_flags(void); void ENGINE_set_table_flags(unsigned int); int ENGINE_register_RSA(ENGINE *); void ENGINE_unregister_RSA(ENGINE *); void ENGINE_register_all_RSA(void); int ENGINE_register_DSA(ENGINE *); void ENGINE_unregister_DSA(ENGINE *); void ENGINE_register_all_DSA(void); int ENGINE_register_DH(ENGINE *); void ENGINE_unregister_DH(ENGINE *); void ENGINE_register_all_DH(void); int ENGINE_register_RAND(ENGINE *); void ENGINE_unregister_RAND(ENGINE *); void ENGINE_register_all_RAND(void); int ENGINE_register_ciphers(ENGINE *); void ENGINE_unregister_ciphers(ENGINE *); void ENGINE_register_all_ciphers(void); int ENGINE_register_digests(ENGINE *); void ENGINE_unregister_digests(ENGINE *); void ENGINE_register_all_digests(void); int ENGINE_register_complete(ENGINE *); int ENGINE_register_all_complete(void); int ENGINE_ctrl(ENGINE *, int, long, void *, void (*)(void)); int ENGINE_cmd_is_executable(ENGINE *, int); int ENGINE_ctrl_cmd(ENGINE *, const char *, long, void *, void (*)(void), int); int ENGINE_ctrl_cmd_string(ENGINE *, const char *, const char *, int); ENGINE *ENGINE_new(void); int ENGINE_free(ENGINE *); int ENGINE_up_ref(ENGINE *); int ENGINE_set_id(ENGINE *, const char *); int ENGINE_set_name(ENGINE *, const char *); int ENGINE_set_RSA(ENGINE *, const RSA_METHOD *); int ENGINE_set_DSA(ENGINE *, const DSA_METHOD *); int ENGINE_set_DH(ENGINE *, const DH_METHOD *); int ENGINE_set_RAND(ENGINE *, const RAND_METHOD *); int ENGINE_set_destroy_function(ENGINE *, ENGINE_GEN_INT_FUNC_PTR); int ENGINE_set_init_function(ENGINE *, ENGINE_GEN_INT_FUNC_PTR); int ENGINE_set_finish_function(ENGINE *, ENGINE_GEN_INT_FUNC_PTR); int ENGINE_set_ctrl_function(ENGINE *, ENGINE_CTRL_FUNC_PTR); int ENGINE_set_load_privkey_function(ENGINE *, ENGINE_LOAD_KEY_PTR); int ENGINE_set_load_pubkey_function(ENGINE *, ENGINE_LOAD_KEY_PTR); int ENGINE_set_ciphers(ENGINE *, ENGINE_CIPHERS_PTR); int ENGINE_set_digests(ENGINE *, ENGINE_DIGESTS_PTR); int ENGINE_set_flags(ENGINE *, int); int ENGINE_set_cmd_defns(ENGINE *, const ENGINE_CMD_DEFN *); const char *ENGINE_get_id(const ENGINE *); const char *ENGINE_get_name(const ENGINE *); const RSA_METHOD *ENGINE_get_RSA(const ENGINE *); const DSA_METHOD *ENGINE_get_DSA(const ENGINE *); const DH_METHOD *ENGINE_get_DH(const ENGINE *); const RAND_METHOD *ENGINE_get_RAND(const ENGINE *); const EVP_CIPHER *ENGINE_get_cipher(ENGINE *, int); const EVP_MD *ENGINE_get_digest(ENGINE *, int); int ENGINE_get_flags(const ENGINE *); const ENGINE_CMD_DEFN *ENGINE_get_cmd_defns(const ENGINE *); EVP_PKEY *ENGINE_load_private_key(ENGINE *, const char *, UI_METHOD *, void *); EVP_PKEY *ENGINE_load_public_key(ENGINE *, const char *, UI_METHOD *, void *); void ENGINE_add_conf_module(void); """ MACROS = """ /* these became macros in 1.1.0 */ void ENGINE_load_openssl(void); void ENGINE_load_dynamic(void); void ENGINE_cleanup(void); void ENGINE_load_cryptodev(void); """ CUSTOMIZATIONS = """ #if defined(LIBRESSL_VERSION_NUMBER) static const long Cryptography_HAS_ENGINE_CRYPTODEV = 0; void (*ENGINE_load_cryptodev)(void) = NULL; #else static const long Cryptography_HAS_ENGINE_CRYPTODEV = 1; #endif """
apache-2.0
-2,814,201,960,246,762,000
35.294521
79
0.713315
false
rhinstaller/anaconda
tests/unit_tests/pyanaconda_tests/modules/payloads/payload/test_module_payload_dnf_utils.py
2
15676
# # Copyright (C) 2020 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # import unittest from textwrap import dedent from unittest.mock import patch, Mock from blivet.size import Size from pyanaconda.core.constants import GROUP_PACKAGE_TYPES_REQUIRED, GROUP_PACKAGE_TYPES_ALL from pyanaconda.modules.common.constants.objects import DEVICE_TREE from pyanaconda.modules.common.constants.services import STORAGE from pyanaconda.modules.common.structures.packages import PackagesSelectionData from pyanaconda.modules.payloads.payload.dnf.dnf_manager import DNFManager from pyanaconda.modules.payloads.payload.dnf.utils import get_kernel_package, \ get_product_release_version, get_installation_specs, get_kernel_version_list, \ pick_download_location, calculate_required_space, get_free_space_map, _pick_mount_points from tests.unit_tests.pyanaconda_tests import patch_dbus_get_proxy_with_cache class DNFUtilsPackagesTestCase(unittest.TestCase): def test_get_kernel_package_excluded(self): """Test the get_kernel_package function with kernel excluded.""" kernel = get_kernel_package(Mock(), exclude_list=["kernel"]) self.assertEqual(kernel, None) def test_get_kernel_package_unavailable(self): """Test the get_kernel_package function with unavailable packages.""" dnf_manager = Mock(spec=DNFManager) dnf_manager.is_package_available.return_value = False with self.assertLogs(level="ERROR") as cm: kernel = get_kernel_package(dnf_manager, exclude_list=[]) msg = "Failed to select a kernel" self.assertIn(msg, "\n".join(cm.output)) self.assertEqual(kernel, None) @patch("pyanaconda.modules.payloads.payload.dnf.utils.is_lpae_available") def test_get_kernel_package_lpae(self, is_lpae): """Test the get_kernel_package function with LPAE.""" is_lpae.return_value = True dnf_manager = Mock(spec=DNFManager) dnf_manager.is_package_available.return_value = True kernel = get_kernel_package(dnf_manager, exclude_list=[]) self.assertEqual(kernel, "kernel-lpae") kernel = get_kernel_package(dnf_manager, exclude_list=["kernel-lpae"]) self.assertEqual(kernel, "kernel") @patch("pyanaconda.modules.payloads.payload.dnf.utils.is_lpae_available") def test_get_kernel_package(self, is_lpae): """Test the get_kernel_package function.""" is_lpae.return_value = False dnf_manager = Mock(spec=DNFManager) dnf_manager.is_package_available.return_value = True kernel = get_kernel_package(dnf_manager, exclude_list=[]) self.assertEqual(kernel, "kernel") @patch("pyanaconda.modules.payloads.payload.dnf.utils.productVersion", "invalid") def test_get_product_release_version_invalid(self): """Test the get_product_release_version function with an invalid value.""" self.assertEqual(get_product_release_version(), "rawhide") @patch("pyanaconda.modules.payloads.payload.dnf.utils.productVersion", "28") def test_get_product_release_version_number(self): """Test the get_product_release_version function with a valid number.""" self.assertEqual(get_product_release_version(), "28") @patch("pyanaconda.modules.payloads.payload.dnf.utils.productVersion", "7.4") def test_get_product_release_version_dot(self): """Test the get_product_release_version function with a dot.""" self.assertEqual(get_product_release_version(), "7.4") def test_get_installation_specs_default(self): """Test the get_installation_specs function with defaults.""" data = PackagesSelectionData() self.assertEqual(get_installation_specs(data), (["@core"], [])) def test_get_installation_specs_nocore(self): """Test the get_installation_specs function without core.""" data = PackagesSelectionData() data.core_group_enabled = False self.assertEqual(get_installation_specs(data), ([], ["@core"])) def test_get_installation_specs_environment(self): """Test the get_installation_specs function with environment.""" data = PackagesSelectionData() data.environment = "environment-1" self.assertEqual(get_installation_specs(data), ( ["@environment-1", "@core"], [] )) env = "environment-2" self.assertEqual(get_installation_specs(data, default_environment=env), ( ["@environment-1", "@core"], [] )) data.default_environment_enabled = True self.assertEqual(get_installation_specs(data, default_environment=env), ( ["@environment-2", "@core"], [] )) def test_get_installation_specs_packages(self): """Test the get_installation_specs function with packages.""" data = PackagesSelectionData() data.packages = ["p1", "p2", "p3"] data.excluded_packages = ["p4", "p5", "p6"] self.assertEqual(get_installation_specs(data), ( ["@core", "p1", "p2", "p3"], ["p4", "p5", "p6"] )) def test_get_installation_specs_groups(self): """Test the get_installation_specs function with groups.""" data = PackagesSelectionData() data.groups = ["g1", "g2", "g3"] data.excluded_groups = ["g4", "g5", "g6"] data.groups_package_types = { "g1": GROUP_PACKAGE_TYPES_REQUIRED, "g3": GROUP_PACKAGE_TYPES_ALL, "g4": GROUP_PACKAGE_TYPES_REQUIRED, "g6": GROUP_PACKAGE_TYPES_ALL, } self.assertEqual(get_installation_specs(data), ( [ "@core", "@g1/mandatory,conditional", "@g2", "@g3/mandatory,default,conditional,optional"], [ "@g4", "@g5", "@g6" ] )) @patch("pyanaconda.modules.payloads.payload.dnf.utils.rpm") def test_get_kernel_version_list(self, mock_rpm): """Test the get_kernel_version_list function.""" hdr_1 = Mock(filenames=[ "/boot/vmlinuz-0-rescue-dbe69c1b88f94a67b689e3f44b0550c8" "/boot/vmlinuz-5.8.15-201.fc32.x86_64", "/boot/efi/EFI/default/vmlinuz-6.8.15-201.fc32.x86_64", ]) hdr_2 = Mock(filenames=[ "/boot/vmlinuz-5.8.16-200.fc32.x86_64", "/boot/efi/EFI/default/vmlinuz-7.8.16-200.fc32.x86_64", "/boot/vmlinuz-5.8.18-200.fc32.x86_64" "/boot/efi/EFI/default/vmlinuz-8.8.18-200.fc32.x86_64" ]) ts = Mock() ts.dbMatch.return_value = [hdr_1, hdr_2] mock_rpm.TransactionSet.return_value = ts self.assertEqual(get_kernel_version_list(), [ '5.8.15-201.fc32.x86_64', '5.8.16-200.fc32.x86_64', '6.8.15-201.fc32.x86_64', '7.8.16-200.fc32.x86_64', '8.8.18-200.fc32.x86_64' ]) @patch("pyanaconda.modules.payloads.payload.dnf.utils.execWithCapture") def test_get_free_space(self, exec_mock): """Test the get_free_space function.""" output = """ Mounted on Avail /dev 100 /dev/shm 200 /run 300 / 400 /tmp 500 /boot 600 /home 700 /boot/efi 800 """ exec_mock.return_value = dedent(output).strip() self.assertEqual(get_free_space_map(), { '/dev': Size("100 KiB"), '/dev/shm': Size("200 KiB"), '/run': Size("300 KiB"), '/': Size("400 KiB"), '/tmp': Size("500 KiB"), '/boot': Size("600 KiB"), '/home': Size("700 KiB"), '/boot/efi': Size("800 KiB"), }) @patch("os.statvfs") @patch("pyanaconda.modules.payloads.payload.dnf.utils.conf") @patch("pyanaconda.modules.payloads.payload.dnf.utils.execWithCapture") def test_get_free_space_image(self, exec_mock, conf_mock, statvfs_mock): """Test the get_free_space function.""" output = """ Mounted on Avail / 100 /boot 200 """ exec_mock.return_value = dedent(output).strip() conf_mock.target.is_hardware = False statvfs_mock.return_value = Mock(f_frsize=1024, f_bfree=300) self.assertEqual(get_free_space_map(), { '/': Size("100 KiB"), '/boot': Size("200 KiB"), '/var/tmp': Size("300 KiB"), }) def test_pick_mount_points(self): """Test the _pick_mount_points function.""" mount_points = { "/": Size("1 G"), "/home": Size("1 G"), "/var/tmp": Size("1 G"), "/mnt/sysroot": Size("1 G"), "/mnt/sysroot/home": Size("1 G"), "/mnt/sysroot/tmp": Size("1 G"), "/mnt/sysroot/var": Size("1 G"), "/mnt/sysroot/usr": Size("1 G"), } # All mount points are big enough. # Choose all suitable mount points. sufficient = _pick_mount_points( mount_points, download_size=Size("0.5 G"), install_size=Size("0.5 G") ) self.assertEqual(sufficient, { "/var/tmp", "/mnt/sysroot", "/mnt/sysroot/home", "/mnt/sysroot/tmp", "/mnt/sysroot/var" }) # No mount point is big enough for installation. # Choose non-sysroot mount points for download. sufficient = _pick_mount_points( mount_points, download_size=Size("0.5 G"), install_size=Size("1.5 G") ) self.assertEqual(sufficient, { "/var/tmp", }) # No mount point is big enough for installation or download. sufficient = _pick_mount_points( mount_points, download_size=Size("1.5 G"), install_size=Size("1.5 G") ) self.assertEqual(sufficient, set()) @patch("pyanaconda.modules.payloads.payload.dnf.utils.get_free_space_map") def test_pick_download_location(self, free_space_getter): """Test the pick_download_location function.""" download_size = Size(100) installation_size = Size(200) total_size = Size(300) dnf_manager = Mock() dnf_manager.get_download_size.return_value = download_size dnf_manager.get_installation_size.return_value = installation_size # Found mount points for download and install. # Don't use /mnt/sysroot if possible. free_space_getter.return_value = { "/var/tmp": download_size, "/mnt/sysroot": total_size, } path = pick_download_location(dnf_manager) self.assertEqual(path, "/var/tmp/dnf.package.cache") # Found mount points only for download. # Use the biggest mount point. free_space_getter.return_value = { "/mnt/sysroot/tmp": download_size + 1, "/mnt/sysroot/home": download_size, } path = pick_download_location(dnf_manager) self.assertEqual(path, "/mnt/sysroot/tmp/dnf.package.cache") # No mount point to use. # Fail with an exception. free_space_getter.return_value = {} with self.assertRaises(RuntimeError) as cm: pick_download_location(dnf_manager) msg = "Not enough disk space to download the packages; size 100 B." self.assertEqual(str(cm.exception), msg) @patch("pyanaconda.modules.payloads.payload.dnf.utils.execWithCapture") @patch_dbus_get_proxy_with_cache def test_get_combined_free_space(self, proxy_getter, exec_mock): """Test the get_free_space function with the combined options.""" output = """ Mounted on Avail / 100 /tmp 200 """ exec_mock.return_value = dedent(output).strip() mount_points = { '/': Size("300 KiB"), '/boot': Size("400 KiB"), } def get_mount_points(): return list(mount_points.keys()) def get_free_space(paths): return sum(map(mount_points.get, paths)) device_tree = STORAGE.get_proxy(DEVICE_TREE) device_tree.GetMountPoints.side_effect = get_mount_points device_tree.GetFileSystemFreeSpace.side_effect = get_free_space self.assertEqual(get_free_space_map(current=True, scheduled=False), { '/': Size("100 KiB"), '/tmp': Size("200 KiB"), }) self.assertEqual(get_free_space_map(current=False, scheduled=True), { '/mnt/sysroot': Size("300 KiB"), '/mnt/sysroot/boot': Size("400 KiB"), }) self.assertEqual(get_free_space_map(current=True, scheduled=True), { '/': Size("100 KiB"), '/tmp': Size("200 KiB"), '/mnt/sysroot': Size("300 KiB"), '/mnt/sysroot/boot': Size("400 KiB"), }) self.assertEqual(get_free_space_map(current=False, scheduled=False), {}) @patch("pyanaconda.modules.payloads.payload.dnf.utils.get_free_space_map") def test_calculate_required_space(self, free_space_getter): """Test the calculate_required_space function.""" download_size = Size(100) installation_size = Size(200) total_size = Size(300) dnf_manager = Mock() dnf_manager.get_download_size.return_value = download_size dnf_manager.get_installation_size.return_value = installation_size # No mount point to use. # The total size is required. free_space_getter.return_value = {} self.assertEqual(calculate_required_space(dnf_manager), total_size) # Found a mount point for download and install. # The total size is required. free_space_getter.return_value = { "/mnt/sysroot/home": total_size } self.assertEqual(calculate_required_space(dnf_manager), total_size) # Found a mount point for download. # The installation size is required. free_space_getter.return_value = { "/var/tmp": download_size } self.assertEqual(calculate_required_space(dnf_manager), installation_size) # The biggest mount point can be used for download and install. # The total size is required. free_space_getter.return_value = { "/var/tmp": download_size, "/mnt/sysroot": total_size } self.assertEqual(calculate_required_space(dnf_manager), total_size)
gpl-2.0
-7,208,815,140,267,659,000
37.610837
92
0.598941
false
adfernandes/pcp
src/pcp/pidstat/test/process_stackutil_test.py
6
2213
#!/usr/bin/env pmpython # # Copyright (C) 2016 Sitaram Shelke. # # This program is free software; you can redistribute it and/or modify it # under the terms of the GNU General Public License as published by the # Free Software Foundation; either version 2 of the License, or (at your # option) any later version. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY # or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License # for more details. # import mock import unittest from pcp_pidstat import ProcessStackUtil class TestProcessStackUtil(unittest.TestCase): def setUp(self): self.__metric_repository = mock.Mock() self.__metric_repository.current_value = mock.Mock(side_effect=self.metric_repo_current_value_side_effect) def metric_repo_current_value_side_effect(self, metric_name,instance): if metric_name == 'proc.memory.vmstack' and instance == 1: return 136 if metric_name == 'proc.psinfo.cmd' and instance == 1: return "test" if metric_name == 'proc.id.uid' and instance == 1: return 1 if metric_name == 'proc.psinfo.pid' and instance == 1: return 1 def test_stack_size(self): process_stack_usage = ProcessStackUtil(1,self.__metric_repository) stack_size = process_stack_usage.stack_size() self.assertEquals(stack_size, 136) def test_stack_referenced_size(self): self.skipTest(reason="Implement when suitable metric is found") def test_pid(self): process_stack_usage = ProcessStackUtil(1,self.__metric_repository) pid = process_stack_usage.pid() self.assertEqual(pid,1) def test_process_name(self): process_stack_usage = ProcessStackUtil(1,self.__metric_repository) name = process_stack_usage.process_name() self.assertEqual(name,'test') def test_user_id(self): process_stack_usage = ProcessStackUtil(1,self.__metric_repository) user_id = process_stack_usage.user_id() self.assertEqual(user_id,1) if __name__ == '__main__': unittest.main()
lgpl-2.1
4,987,127,336,330,948,000
31.072464
114
0.676005
false
mou4e/zirconium
build/android/adb_reverse_forwarder.py
15
2519
#!/usr/bin/env python # # Copyright (c) 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Command line tool for forwarding ports from a device to the host. Allows an Android device to connect to services running on the host machine, i.e., "adb forward" in reverse. Requires |host_forwarder| and |device_forwarder| to be built. """ import optparse import sys import time from pylib import android_commands from pylib import constants, forwarder from pylib.device import device_utils from pylib.utils import run_tests_helper def main(argv): parser = optparse.OptionParser(usage='Usage: %prog [options] device_port ' 'host_port [device_port_2 host_port_2] ...', description=__doc__) parser.add_option('-v', '--verbose', dest='verbose_count', default=0, action='count', help='Verbose level (multiple times for more)') parser.add_option('--device', help='Serial number of device we should use.') parser.add_option('--debug', action='store_const', const='Debug', dest='build_type', default='Release', help='Use Debug build of host tools instead of Release.') options, args = parser.parse_args(argv) run_tests_helper.SetLogLevel(options.verbose_count) if len(args) < 2 or not len(args) % 2: parser.error('Need even number of port pairs') sys.exit(1) try: port_pairs = map(int, args[1:]) port_pairs = zip(port_pairs[::2], port_pairs[1::2]) except ValueError: parser.error('Bad port number') sys.exit(1) devices = android_commands.GetAttachedDevices() if options.device: if options.device not in devices: raise Exception('Error: %s not in attached devices %s' % (options.device, ','.join(devices))) devices = [options.device] else: if not devices: raise Exception('Error: no connected devices') print "No device specified. Defaulting to " + devices[0] device = device_utils.DeviceUtils(devices[0]) constants.SetBuildType(options.build_type) try: forwarder.Forwarder.Map(port_pairs, device) while True: time.sleep(60) except KeyboardInterrupt: sys.exit(0) finally: forwarder.Forwarder.UnmapAllDevicePorts(device) if __name__ == '__main__': main(sys.argv)
bsd-3-clause
261,904,930,896,026,880
31.294872
80
0.637555
false
zaqwes8811/micro-apps
extern/gmock-1.6.0/gtest/test/gtest_uninitialized_test.py
2901
2480
#!/usr/bin/env python # # Copyright 2008, Google Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """Verifies that Google Test warns the user when not initialized properly.""" __author__ = '[email protected] (Zhanyong Wan)' import gtest_test_utils COMMAND = gtest_test_utils.GetTestExecutablePath('gtest_uninitialized_test_') def Assert(condition): if not condition: raise AssertionError def AssertEq(expected, actual): if expected != actual: print 'Expected: %s' % (expected,) print ' Actual: %s' % (actual,) raise AssertionError def TestExitCodeAndOutput(command): """Runs the given command and verifies its exit code and output.""" # Verifies that 'command' exits with code 1. p = gtest_test_utils.Subprocess(command) Assert(p.exited) AssertEq(1, p.exit_code) Assert('InitGoogleTest' in p.output) class GTestUninitializedTest(gtest_test_utils.TestCase): def testExitCodeAndOutput(self): TestExitCodeAndOutput(COMMAND) if __name__ == '__main__': gtest_test_utils.Main()
mit
-5,457,692,574,667,149,000
34.428571
77
0.755645
false
pim89/youtube-dl
youtube_dl/extractor/brightcove.py
5
27756
# coding: utf-8 from __future__ import unicode_literals import re import json from .common import InfoExtractor from ..compat import ( compat_etree_fromstring, compat_parse_qs, compat_str, compat_urllib_parse_urlparse, compat_urlparse, compat_xml_parse_error, compat_HTTPError, ) from ..utils import ( determine_ext, ExtractorError, find_xpath_attr, fix_xml_ampersands, float_or_none, js_to_json, int_or_none, parse_iso8601, unescapeHTML, unsmuggle_url, update_url_query, clean_html, mimetype2ext, ) class BrightcoveLegacyIE(InfoExtractor): IE_NAME = 'brightcove:legacy' _VALID_URL = r'(?:https?://.*brightcove\.com/(services|viewer).*?\?|brightcove:)(?P<query>.*)' _FEDERATED_URL = 'http://c.brightcove.com/services/viewer/htmlFederated' _TESTS = [ { # From http://www.8tv.cat/8aldia/videos/xavier-sala-i-martin-aquesta-tarda-a-8-al-dia/ 'url': 'http://c.brightcove.com/services/viewer/htmlFederated?playerID=1654948606001&flashID=myExperience&%40videoPlayer=2371591881001', 'md5': '5423e113865d26e40624dce2e4b45d95', 'note': 'Test Brightcove downloads and detection in GenericIE', 'info_dict': { 'id': '2371591881001', 'ext': 'mp4', 'title': 'Xavier Sala i Martín: “Un banc que no presta és un banc zombi que no serveix per a res”', 'uploader': '8TV', 'description': 'md5:a950cc4285c43e44d763d036710cd9cd', 'timestamp': 1368213670, 'upload_date': '20130510', 'uploader_id': '1589608506001', } }, { # From http://medianetwork.oracle.com/video/player/1785452137001 'url': 'http://c.brightcove.com/services/viewer/htmlFederated?playerID=1217746023001&flashID=myPlayer&%40videoPlayer=1785452137001', 'info_dict': { 'id': '1785452137001', 'ext': 'flv', 'title': 'JVMLS 2012: Arrays 2.0 - Opportunities and Challenges', 'description': 'John Rose speaks at the JVM Language Summit, August 1, 2012.', 'uploader': 'Oracle', 'timestamp': 1344975024, 'upload_date': '20120814', 'uploader_id': '1460825906', }, }, { # From http://mashable.com/2013/10/26/thermoelectric-bracelet-lets-you-control-your-body-temperature/ 'url': 'http://c.brightcove.com/services/viewer/federated_f9?&playerID=1265504713001&publisherID=AQ%7E%7E%2CAAABBzUwv1E%7E%2CxP-xFHVUstiMFlNYfvF4G9yFnNaqCw_9&videoID=2750934548001', 'info_dict': { 'id': '2750934548001', 'ext': 'mp4', 'title': 'This Bracelet Acts as a Personal Thermostat', 'description': 'md5:547b78c64f4112766ccf4e151c20b6a0', 'uploader': 'Mashable', 'timestamp': 1382041798, 'upload_date': '20131017', 'uploader_id': '1130468786001', }, }, { # test that the default referer works # from http://national.ballet.ca/interact/video/Lost_in_Motion_II/ 'url': 'http://link.brightcove.com/services/player/bcpid756015033001?bckey=AQ~~,AAAApYJi_Ck~,GxhXCegT1Dp39ilhXuxMJxasUhVNZiil&bctid=2878862109001', 'info_dict': { 'id': '2878862109001', 'ext': 'mp4', 'title': 'Lost in Motion II', 'description': 'md5:363109c02998fee92ec02211bd8000df', 'uploader': 'National Ballet of Canada', }, 'skip': 'Video gone', }, { # test flv videos served by akamaihd.net # From http://www.redbull.com/en/bike/stories/1331655643987/replay-uci-dh-world-cup-2014-from-fort-william 'url': 'http://c.brightcove.com/services/viewer/htmlFederated?%40videoPlayer=ref%3Aevent-stream-356&linkBaseURL=http%3A%2F%2Fwww.redbull.com%2Fen%2Fbike%2Fvideos%2F1331655630249%2Freplay-uci-fort-william-2014-dh&playerKey=AQ%7E%7E%2CAAAApYJ7UqE%7E%2Cxqr_zXk0I-zzNndy8NlHogrCb5QdyZRf&playerID=1398061561001#__youtubedl_smuggle=%7B%22Referer%22%3A+%22http%3A%2F%2Fwww.redbull.com%2Fen%2Fbike%2Fstories%2F1331655643987%2Freplay-uci-dh-world-cup-2014-from-fort-william%22%7D', # The md5 checksum changes on each download 'info_dict': { 'id': '3750436379001', 'ext': 'flv', 'title': 'UCI MTB World Cup 2014: Fort William, UK - Downhill Finals', 'uploader': 'RBTV Old (do not use)', 'description': 'UCI MTB World Cup 2014: Fort William, UK - Downhill Finals', 'timestamp': 1409122195, 'upload_date': '20140827', 'uploader_id': '710858724001', }, }, { # playlist with 'videoList' # from http://support.brightcove.com/en/video-cloud/docs/playlist-support-single-video-players 'url': 'http://c.brightcove.com/services/viewer/htmlFederated?playerID=3550052898001&playerKey=AQ%7E%7E%2CAAABmA9XpXk%7E%2C-Kp7jNgisre1fG5OdqpAFUTcs0lP_ZoL', 'info_dict': { 'title': 'Sealife', 'id': '3550319591001', }, 'playlist_mincount': 7, }, { # playlist with 'playlistTab' (https://github.com/rg3/youtube-dl/issues/9965) 'url': 'http://c.brightcove.com/services/json/experience/runtime/?command=get_programming_for_experience&playerKey=AQ%7E%7E,AAABXlLMdok%7E,NJ4EoMlZ4rZdx9eU1rkMVd8EaYPBBUlg', 'info_dict': { 'id': '1522758701001', 'title': 'Lesson 08', }, 'playlist_mincount': 10, }, ] FLV_VCODECS = { 1: 'SORENSON', 2: 'ON2', 3: 'H264', 4: 'VP8', } @classmethod def _build_brighcove_url(cls, object_str): """ Build a Brightcove url from a xml string containing <object class="BrightcoveExperience">{params}</object> """ # Fix up some stupid HTML, see https://github.com/rg3/youtube-dl/issues/1553 object_str = re.sub(r'(<param(?:\s+[a-zA-Z0-9_]+="[^"]*")*)>', lambda m: m.group(1) + '/>', object_str) # Fix up some stupid XML, see https://github.com/rg3/youtube-dl/issues/1608 object_str = object_str.replace('<--', '<!--') # remove namespace to simplify extraction object_str = re.sub(r'(<object[^>]*)(xmlns=".*?")', r'\1', object_str) object_str = fix_xml_ampersands(object_str) try: object_doc = compat_etree_fromstring(object_str.encode('utf-8')) except compat_xml_parse_error: return fv_el = find_xpath_attr(object_doc, './param', 'name', 'flashVars') if fv_el is not None: flashvars = dict( (k, v[0]) for k, v in compat_parse_qs(fv_el.attrib['value']).items()) else: flashvars = {} data_url = object_doc.attrib.get('data', '') data_url_params = compat_parse_qs(compat_urllib_parse_urlparse(data_url).query) def find_param(name): if name in flashvars: return flashvars[name] node = find_xpath_attr(object_doc, './param', 'name', name) if node is not None: return node.attrib['value'] return data_url_params.get(name) params = {} playerID = find_param('playerID') if playerID is None: raise ExtractorError('Cannot find player ID') params['playerID'] = playerID playerKey = find_param('playerKey') # Not all pages define this value if playerKey is not None: params['playerKey'] = playerKey # These fields hold the id of the video videoPlayer = find_param('@videoPlayer') or find_param('videoId') or find_param('videoID') or find_param('@videoList') if videoPlayer is not None: params['@videoPlayer'] = videoPlayer linkBase = find_param('linkBaseURL') if linkBase is not None: params['linkBaseURL'] = linkBase return cls._make_brightcove_url(params) @classmethod def _build_brighcove_url_from_js(cls, object_js): # The layout of JS is as follows: # customBC.createVideo = function (width, height, playerID, playerKey, videoPlayer, VideoRandomID) { # // build Brightcove <object /> XML # } m = re.search( r'''(?x)customBC.\createVideo\( .*? # skipping width and height ["\'](?P<playerID>\d+)["\']\s*,\s* # playerID ["\'](?P<playerKey>AQ[^"\']{48})[^"\']*["\']\s*,\s* # playerKey begins with AQ and is 50 characters # in length, however it's appended to itself # in places, so truncate ["\'](?P<videoID>\d+)["\'] # @videoPlayer ''', object_js) if m: return cls._make_brightcove_url(m.groupdict()) @classmethod def _make_brightcove_url(cls, params): return update_url_query(cls._FEDERATED_URL, params) @classmethod def _extract_brightcove_url(cls, webpage): """Try to extract the brightcove url from the webpage, returns None if it can't be found """ urls = cls._extract_brightcove_urls(webpage) return urls[0] if urls else None @classmethod def _extract_brightcove_urls(cls, webpage): """Return a list of all Brightcove URLs from the webpage """ url_m = re.search( r'<meta\s+property=[\'"]og:video[\'"]\s+content=[\'"](https?://(?:secure|c)\.brightcove.com/[^\'"]+)[\'"]', webpage) if url_m: url = unescapeHTML(url_m.group(1)) # Some sites don't add it, we can't download with this url, for example: # http://www.ktvu.com/videos/news/raw-video-caltrain-releases-video-of-man-almost/vCTZdY/ if 'playerKey' in url or 'videoId' in url: return [url] matches = re.findall( r'''(?sx)<object (?: [^>]+?class=[\'"][^>]*?BrightcoveExperience.*?[\'"] | [^>]*?>\s*<param\s+name="movie"\s+value="https?://[^/]*brightcove\.com/ ).+?>\s*</object>''', webpage) if matches: return list(filter(None, [cls._build_brighcove_url(m) for m in matches])) return list(filter(None, [ cls._build_brighcove_url_from_js(custom_bc) for custom_bc in re.findall(r'(customBC\.createVideo\(.+?\);)', webpage)])) def _real_extract(self, url): url, smuggled_data = unsmuggle_url(url, {}) # Change the 'videoId' and others field to '@videoPlayer' url = re.sub(r'(?<=[?&])(videoI(d|D)|bctid)', '%40videoPlayer', url) # Change bckey (used by bcove.me urls) to playerKey url = re.sub(r'(?<=[?&])bckey', 'playerKey', url) mobj = re.match(self._VALID_URL, url) query_str = mobj.group('query') query = compat_urlparse.parse_qs(query_str) videoPlayer = query.get('@videoPlayer') if videoPlayer: # We set the original url as the default 'Referer' header referer = smuggled_data.get('Referer', url) return self._get_video_info( videoPlayer[0], query, referer=referer) elif 'playerKey' in query: player_key = query['playerKey'] return self._get_playlist_info(player_key[0]) else: raise ExtractorError( 'Cannot find playerKey= variable. Did you forget quotes in a shell invocation?', expected=True) def _get_video_info(self, video_id, query, referer=None): headers = {} linkBase = query.get('linkBaseURL') if linkBase is not None: referer = linkBase[0] if referer is not None: headers['Referer'] = referer webpage = self._download_webpage(self._FEDERATED_URL, video_id, headers=headers, query=query) error_msg = self._html_search_regex( r"<h1>We're sorry.</h1>([\s\n]*<p>.*?</p>)+", webpage, 'error message', default=None) if error_msg is not None: raise ExtractorError( 'brightcove said: %s' % error_msg, expected=True) self.report_extraction(video_id) info = self._search_regex(r'var experienceJSON = ({.*});', webpage, 'json') info = json.loads(info)['data'] video_info = info['programmedContent']['videoPlayer']['mediaDTO'] video_info['_youtubedl_adServerURL'] = info.get('adServerURL') return self._extract_video_info(video_info) def _get_playlist_info(self, player_key): info_url = 'http://c.brightcove.com/services/json/experience/runtime/?command=get_programming_for_experience&playerKey=%s' % player_key playlist_info = self._download_webpage( info_url, player_key, 'Downloading playlist information') json_data = json.loads(playlist_info) if 'videoList' in json_data: playlist_info = json_data['videoList'] playlist_dto = playlist_info['mediaCollectionDTO'] elif 'playlistTabs' in json_data: playlist_info = json_data['playlistTabs'] playlist_dto = playlist_info['lineupListDTO']['playlistDTOs'][0] else: raise ExtractorError('Empty playlist') videos = [self._extract_video_info(video_info) for video_info in playlist_dto['videoDTOs']] return self.playlist_result(videos, playlist_id='%s' % playlist_info['id'], playlist_title=playlist_dto['displayName']) def _extract_video_info(self, video_info): video_id = compat_str(video_info['id']) publisher_id = video_info.get('publisherId') info = { 'id': video_id, 'title': video_info['displayName'].strip(), 'description': video_info.get('shortDescription'), 'thumbnail': video_info.get('videoStillURL') or video_info.get('thumbnailURL'), 'uploader': video_info.get('publisherName'), 'uploader_id': compat_str(publisher_id) if publisher_id else None, 'duration': float_or_none(video_info.get('length'), 1000), 'timestamp': int_or_none(video_info.get('creationDate'), 1000), } renditions = video_info.get('renditions', []) + video_info.get('IOSRenditions', []) if renditions: formats = [] for rend in renditions: url = rend['defaultURL'] if not url: continue ext = None if rend['remote']: url_comp = compat_urllib_parse_urlparse(url) if url_comp.path.endswith('.m3u8'): formats.extend( self._extract_m3u8_formats( url, video_id, 'mp4', 'm3u8_native', m3u8_id='hls', fatal=False)) continue elif 'akamaihd.net' in url_comp.netloc: # This type of renditions are served through # akamaihd.net, but they don't use f4m manifests url = url.replace('control/', '') + '?&v=3.3.0&fp=13&r=FEEFJ&g=RTSJIMBMPFPB' ext = 'flv' if ext is None: ext = determine_ext(url) tbr = int_or_none(rend.get('encodingRate'), 1000) a_format = { 'format_id': 'http%s' % ('-%s' % tbr if tbr else ''), 'url': url, 'ext': ext, 'filesize': int_or_none(rend.get('size')) or None, 'tbr': tbr, } if rend.get('audioOnly'): a_format.update({ 'vcodec': 'none', }) else: a_format.update({ 'height': int_or_none(rend.get('frameHeight')), 'width': int_or_none(rend.get('frameWidth')), 'vcodec': rend.get('videoCodec'), }) # m3u8 manifests with remote == false are media playlists # Not calling _extract_m3u8_formats here to save network traffic if ext == 'm3u8': a_format.update({ 'format_id': 'hls%s' % ('-%s' % tbr if tbr else ''), 'ext': 'mp4', 'protocol': 'm3u8_native', }) formats.append(a_format) self._sort_formats(formats) info['formats'] = formats elif video_info.get('FLVFullLengthURL') is not None: info.update({ 'url': video_info['FLVFullLengthURL'], 'vcodec': self.FLV_VCODECS.get(video_info.get('FLVFullCodec')), 'filesize': int_or_none(video_info.get('FLVFullSize')), }) if self._downloader.params.get('include_ads', False): adServerURL = video_info.get('_youtubedl_adServerURL') if adServerURL: ad_info = { '_type': 'url', 'url': adServerURL, } if 'url' in info: return { '_type': 'playlist', 'title': info['title'], 'entries': [ad_info, info], } else: return ad_info if 'url' not in info and not info.get('formats'): raise ExtractorError('Unable to extract video url for %s' % video_id) return info class BrightcoveNewIE(InfoExtractor): IE_NAME = 'brightcove:new' _VALID_URL = r'https?://players\.brightcove\.net/(?P<account_id>\d+)/(?P<player_id>[^/]+)_(?P<embed>[^/]+)/index\.html\?.*videoId=(?P<video_id>\d+|ref:[^&]+)' _TESTS = [{ 'url': 'http://players.brightcove.net/929656772001/e41d32dc-ec74-459e-a845-6c69f7b724ea_default/index.html?videoId=4463358922001', 'md5': 'c8100925723840d4b0d243f7025703be', 'info_dict': { 'id': '4463358922001', 'ext': 'mp4', 'title': 'Meet the man behind Popcorn Time', 'description': 'md5:eac376a4fe366edc70279bfb681aea16', 'duration': 165.768, 'timestamp': 1441391203, 'upload_date': '20150904', 'uploader_id': '929656772001', 'formats': 'mincount:22', }, }, { # with rtmp streams 'url': 'http://players.brightcove.net/4036320279001/5d112ed9-283f-485f-a7f9-33f42e8bc042_default/index.html?videoId=4279049078001', 'info_dict': { 'id': '4279049078001', 'ext': 'mp4', 'title': 'Titansgrave: Chapter 0', 'description': 'Titansgrave: Chapter 0', 'duration': 1242.058, 'timestamp': 1433556729, 'upload_date': '20150606', 'uploader_id': '4036320279001', 'formats': 'mincount:41', }, 'params': { # m3u8 download 'skip_download': True, } }, { # ref: prefixed video id 'url': 'http://players.brightcove.net/3910869709001/21519b5c-4b3b-4363-accb-bdc8f358f823_default/index.html?videoId=ref:7069442', 'only_matching': True, }, { # non numeric ref: prefixed video id 'url': 'http://players.brightcove.net/710858724001/default_default/index.html?videoId=ref:event-stream-356', 'only_matching': True, }, { # unavailable video without message but with error_code 'url': 'http://players.brightcove.net/1305187701/c832abfb-641b-44eb-9da0-2fe76786505f_default/index.html?videoId=4377407326001', 'only_matching': True, }] @staticmethod def _extract_url(webpage): urls = BrightcoveNewIE._extract_urls(webpage) return urls[0] if urls else None @staticmethod def _extract_urls(webpage): # Reference: # 1. http://docs.brightcove.com/en/video-cloud/brightcove-player/guides/publish-video.html#setvideoiniframe # 2. http://docs.brightcove.com/en/video-cloud/brightcove-player/guides/publish-video.html#setvideousingjavascript # 3. http://docs.brightcove.com/en/video-cloud/brightcove-player/guides/embed-in-page.html # 4. https://support.brightcove.com/en/video-cloud/docs/dynamically-assigning-videos-player entries = [] # Look for iframe embeds [1] for _, url in re.findall( r'<iframe[^>]+src=(["\'])((?:https?:)?//players\.brightcove\.net/\d+/[^/]+/index\.html.+?)\1', webpage): entries.append(url if url.startswith('http') else 'http:' + url) # Look for embed_in_page embeds [2] for video_id, account_id, player_id, embed in re.findall( # According to examples from [3] it's unclear whether video id # may be optional and what to do when it is # According to [4] data-video-id may be prefixed with ref: r'''(?sx) <video[^>]+ data-video-id=["\'](\d+|ref:[^"\']+)["\'][^>]*>.*? </video>.*? <script[^>]+ src=["\'](?:https?:)?//players\.brightcove\.net/ (\d+)/([^/]+)_([^/]+)/index(?:\.min)?\.js ''', webpage): entries.append( 'http://players.brightcove.net/%s/%s_%s/index.html?videoId=%s' % (account_id, player_id, embed, video_id)) return entries def _real_extract(self, url): account_id, player_id, embed, video_id = re.match(self._VALID_URL, url).groups() webpage = self._download_webpage( 'http://players.brightcove.net/%s/%s_%s/index.min.js' % (account_id, player_id, embed), video_id) policy_key = None catalog = self._search_regex( r'catalog\(({.+?})\);', webpage, 'catalog', default=None) if catalog: catalog = self._parse_json( js_to_json(catalog), video_id, fatal=False) if catalog: policy_key = catalog.get('policyKey') if not policy_key: policy_key = self._search_regex( r'policyKey\s*:\s*(["\'])(?P<pk>.+?)\1', webpage, 'policy key', group='pk') api_url = 'https://edge.api.brightcove.com/playback/v1/accounts/%s/videos/%s' % (account_id, video_id) try: json_data = self._download_json(api_url, video_id, headers={ 'Accept': 'application/json;pk=%s' % policy_key }) except ExtractorError as e: if isinstance(e.cause, compat_HTTPError) and e.cause.code == 403: json_data = self._parse_json(e.cause.read().decode(), video_id)[0] raise ExtractorError( json_data.get('message') or json_data['error_code'], expected=True) raise title = json_data['name'].strip() formats = [] for source in json_data.get('sources', []): container = source.get('container') ext = mimetype2ext(source.get('type')) src = source.get('src') if ext == 'ism': continue elif ext == 'm3u8' or container == 'M2TS': if not src: continue formats.extend(self._extract_m3u8_formats( src, video_id, 'mp4', 'm3u8_native', m3u8_id='hls', fatal=False)) elif ext == 'mpd': if not src: continue formats.extend(self._extract_mpd_formats(src, video_id, 'dash', fatal=False)) else: streaming_src = source.get('streaming_src') stream_name, app_name = source.get('stream_name'), source.get('app_name') if not src and not streaming_src and (not stream_name or not app_name): continue tbr = float_or_none(source.get('avg_bitrate'), 1000) height = int_or_none(source.get('height')) width = int_or_none(source.get('width')) f = { 'tbr': tbr, 'filesize': int_or_none(source.get('size')), 'container': container, 'ext': ext or container.lower(), } if width == 0 and height == 0: f.update({ 'vcodec': 'none', }) else: f.update({ 'width': width, 'height': height, 'vcodec': source.get('codec'), }) def build_format_id(kind): format_id = kind if tbr: format_id += '-%dk' % int(tbr) if height: format_id += '-%dp' % height return format_id if src or streaming_src: f.update({ 'url': src or streaming_src, 'format_id': build_format_id('http' if src else 'http-streaming'), 'source_preference': 0 if src else -1, }) else: f.update({ 'url': app_name, 'play_path': stream_name, 'format_id': build_format_id('rtmp'), }) formats.append(f) errors = json_data.get('errors') if not formats and errors: error = errors[0] raise ExtractorError( error.get('message') or error.get('error_subcode') or error['error_code'], expected=True) self._sort_formats(formats) subtitles = {} for text_track in json_data.get('text_tracks', []): if text_track.get('src'): subtitles.setdefault(text_track.get('srclang'), []).append({ 'url': text_track['src'], }) is_live = False duration = float_or_none(json_data.get('duration'), 1000) if duration and duration < 0: is_live = True return { 'id': video_id, 'title': self._live_title(title) if is_live else title, 'description': clean_html(json_data.get('description')), 'thumbnail': json_data.get('thumbnail') or json_data.get('poster'), 'duration': duration, 'timestamp': parse_iso8601(json_data.get('published_at')), 'uploader_id': account_id, 'formats': formats, 'subtitles': subtitles, 'tags': json_data.get('tags', []), 'is_live': is_live, }
unlicense
3,632,902,321,030,330,000
42.291732
484
0.52436
false
BowdoinOrient/bongo
bongo/apps/bongo/tests/model_tests.py
1
13646
from bongo.apps.bongo.tests import factories from django.test import TestCase from django.contrib.auth.models import User from django.conf import settings from django.utils.text import slugify import os """ Test content type models and related: test, video, PDF, photo, HTML, pullquote, post """ class TextTestCase(TestCase): def test_creator(self): text = factories.TextFactory.create() creator1 = factories.CreatorFactory.create() creator2 = factories.CreatorFactory.create() text.creators.add(creator1) text.creators.add(creator2) text.save() for creator in text.creators.all(): self.assertIn(text, creator.works()) for creator in [creator1, creator2]: self.assertIn(text, creator.works()) text.delete() creator1.delete() creator2.delete() def test_fields(self): text = factories.TextFactory.create() self.assertIsNotNone(text.caption) def test_excerpt(self): text = factories.TextFactory.build() self.assertEquals(text.excerpt, "The excerpt isn't correct until it's saved") text.body = ( "Quinoa hashtag Kickstarter bespoke. Schlitz PBR&B 3 wolf moon, photo booth swag occupy banh mi PBR " + "artisan lo-fi nor.bongo. Lomo selvage leggings quinoa, ugh cliche cornhole asymmetrical gluten-free " + "Echo Park. Tumblr put a bird on it drinking vinegar sriracha, leggings mumbl.bongo actually four " + "loko twee fixie mustache. Mustache drinking vinegar cliche, meggings before they sold out fap " + "Kickstarter tofu banjo master cleanse ennui fingerstache kogi you probably haven't heard of them. " + "Polaroid photo booth chia biodiesel trust fund typewriter locavore, Blue Bottle 90's Neutra umami " + "flannel. Portland Helvetica umami freegan locavore direct trade, polaroid 3 wolf moon actually." ) text.save() self.assertEquals(text.excerpt, ( "Quinoa hashtag Kickstarter bespoke. Schlitz PBR&B 3 wolf moon, photo booth swag occupy banh mi PBR " + "artisan lo-fi nor.bongo. Lomo selvage leggings quinoa, ugh cliche cornhole asymmetrical gluten-free " + "Echo Park." )) text.delete() class VideoTestCase(TestCase): def test_creator(self): video = factories.VideoFactory.create() creator1 = factories.CreatorFactory.create() creator2 = factories.CreatorFactory.create() video.creators.add(creator1) video.creators.add(creator2) video.save() for creator in video.creators.all(): self.assertIn(video, creator.works()) for creator in [creator1, creator2]: self.assertIn(video, creator.works()) video.delete() creator1.delete() creator2.delete() def test_fields(self): video = factories.VideoFactory.build() self.assertIsNotNone(video.caption) self.assertIsNotNone(video.url()) class PDFTestCase(TestCase): def test_creator(self): pdf = factories.PDFFactory.create() creator1 = factories.CreatorFactory.create() creator2 = factories.CreatorFactory.create() pdf.creators.add(creator1) pdf.creators.add(creator2) pdf.save() for creator in pdf.creators.all(): self.assertIn(pdf, creator.works()) for creator in [creator1, creator2]: self.assertIn(pdf, creator.works()) pdf.delete() creator1.delete() creator2.delete() def test_fields(self): pdf = factories.PDFFactory.create() self.assertIsNotNone(pdf.caption) # @todo: test staticfile class PhotoTestCase(TestCase): def test_creator(self): photo = factories.PhotoFactory.create() creator1 = factories.CreatorFactory.create() creator2 = factories.CreatorFactory.create() photo.creators.add(creator1) photo.creators.add(creator2) photo.save() for creator in photo.creators.all(): self.assertIn(photo, creator.works()) for creator in [creator1, creator2]: self.assertIn(photo, creator.works()) photo.delete() creator1.delete() creator2.delete() def test_fields(self): photo = factories.PhotoFactory.create() self.assertIsNotNone(photo.caption) # @todo: test staticfile class HTMLTestCase(TestCase): def test_creator(self): html = factories.HTMLFactory.create() creator1 = factories.CreatorFactory.create() creator2 = factories.CreatorFactory.create() html.creators.add(creator1) html.creators.add(creator2) html.save() for creator in html.creators.all(): self.assertIn(html, creator.works()) for creator in [creator1, creator2]: self.assertIn(html, creator.works()) html.delete() creator1.delete() creator2.delete() def test_fields(self): html = factories.HTMLFactory.create() self.assertIsNotNone(html.caption) self.assertIsNotNone(html.content) class PullquoteTestCase(TestCase): def test_creator(self): pullquote = factories.PullquoteFactory.create() creator1 = factories.CreatorFactory.create() creator2 = factories.CreatorFactory.create() pullquote.creators.add(creator1) pullquote.creators.add(creator2) pullquote.save() for creator in pullquote.creators.all(): self.assertIn(pullquote, creator.works()) for creator in [creator1, creator2]: self.assertIn(pullquote, creator.works()) pullquote.delete() creator1.delete() creator2.delete() def test_fields(self): pullquote = factories.PullquoteFactory.create() self.assertIsNotNone(pullquote.caption) self.assertIsNotNone(pullquote.quote) self.assertIsNotNone(pullquote.attribution) class PostTestCase(TestCase): def test_similar_tags(self): # this is a damn good article. one of the best. with open( os.path.normpath( os.path.join( settings.SITE_ROOT, "bongo/apps/bongo/tests/naked.txt" ) ), "r" ) as f_txt: articlebody = f_txt.read() post = factories.PostFactory.create() similar_post = factories.PostFactory.create() text = factories.TextFactory.create(body=articlebody) post.text.add(text) post.save() similar_post.text.add(text) similar_post.save() post.taggit() similar_post.taggit() self.assertNotEqual(post.tags.all().count(), 0) self.assertNotEqual(similar_post.tags.all().count(), 0) self.assertEqual(post.similar_tags()[0], similar_post) def test_popularity(self): post1 = factories.PostFactory.create() post1.views_global = 1 post2 = factories.PostFactory.create() post2.views_global = 2 post3 = factories.PostFactory.create() post3.views_global = 3 self.assertGreater(post2.popularity(), post1.popularity()) self.assertGreater(post3.popularity(), post2.popularity()) def test_primary_section(self): """Test that this convenience method works, which, duh""" post = factories.PostFactory.create() self.assertEqual(post.primary_section(), post.section.classname()) def test_creators(self): """Test the creators() method for finding the authors of post's content""" post = factories.PostFactory.create() text = factories.TextFactory.create() author = factories.CreatorFactory.create() text.creators.add(author) post.text.add(text) text2 = factories.TextFactory.create() text2.creators.add(author) post.text.add(text2) creators = list(post.creators()) self.assertIn(author, creators) self.assertEqual(len(creators), 1) def test_slug(self): """Test that an article gets assigned a slug when saved""" post = factories.PostFactory.create() post.save() self.assertEqual(post.slug, slugify(post.title)) def test_slug_collision(self): posts = [factories.PostFactory.create() for x in range(3)] for post in posts: post.slug = None post.title = "Campus Concern Raises Concern" post.save() self.assertEqual(posts[0].slug, "campus-concern-raises-concern") self.assertEqual(posts[1].slug, "campus-concern-raises-concern-2") self.assertEqual(posts[2].slug, "campus-concern-raises-concern-3") """ Test user-related models: creators, users, jobs """ class UserTestCase(TestCase): def test_password(self): """ Test that a user gets a password, and it works to log them in """ user = factories.UserFactory.create() self.assertNotEqual(user.password, u'') self.assertEqual(user.check_password("defaultpassword"), True) class CreatorTestCase(TestCase): def test_foreign_key(self): """ Test that Creators are properly hooked up to Jobs and Users """ user = factories.UserFactory.create() creator = factories.CreatorFactory.create() job = factories.JobFactory.create() creator.user = user creator.job = job creator.save() self.assertEquals(type(creator.user), User) from bongo.apps.bongo.models import Job self.assertEquals(type(creator.job), Job) creator.delete() def test_works(self): """ Test the connection between a creator and the content they've made """ me = factories.CreatorFactory.create() photo = factories.PhotoFactory.create() photo.creators.add(me) photo.save() video = factories.VideoFactory.create() video.creators.add(me) video.save() self.assertIn(photo, me.works()) self.assertIn(video, me.works()) me.delete() photo.delete() video.delete() def test_primary_section(self): """Test that Creators' primary_section method works""" creator = factories.CreatorFactory.create() section1 = factories.SectionFactory.create() section2 = factories.SectionFactory.create() post1 = factories.PostFactory.create() post1text = factories.TextFactory.create() post1text.creators.add(creator) post1.text.add(post1text) post1.section = section1 post1.save() post2 = factories.PostFactory.create() post2text = factories.TextFactory.create() post2text.creators.add(creator) post2.text.add(post2text) post2.section = section2 post2.save() post3 = factories.PostFactory.create() post3text = factories.TextFactory.create() post3text.creators.add(creator) post3.text.add(post3text) post3.section = section2 post3.save() self.assertEqual(creator.primary_section(), section2.classname()) class JobTestCase(TestCase): def test_foreign_key(self): job = factories.JobFactory.create() creator = factories.CreatorFactory.create() creator.job = job creator.save() self.assertEqual(job, creator.job) self.assertIn(creator, job.workers()) job.delete() creator.delete() """ Test metadata models: series, volumes, issues, sections, tags """ class SeriesTestCase(TestCase): def test_m2m(self): # @TODO pass def test_primary_section(self): """Test that Series' primary_section method works""" series = factories.SeriesFactory.create() section1 = factories.SectionFactory.create() section2 = factories.SectionFactory.create() post1 = factories.PostFactory.create() post1.section = section1 post1.series.add(series) post1.save() post2 = factories.PostFactory.create() post2.section = section2 post2.series.add(series) post2.save() post3 = factories.PostFactory.create() post3.section = section2 post3.series.add(series) post3.save() self.assertEqual(series.primary_section(), section2.classname()) class VolumeTestCase(TestCase): def test_foreign_key(self): # @TODO pass class IssueTestCase(TestCase): def test_foreign_key(self): # @TODO pass def test_custom_save(self): issue = factories.IssueFactory.create( volume = factories.VolumeFactory.create() ) self.assertEqual(issue.scribd, None) self.assertEqual(issue.scribd_image, None) issue.scribd = 99999999 issue.save() self.assertEqual(issue.scribd_image, None) issue.scribd = 201901393 issue.save() self.assertEqual(issue.scribd_image[:8], "https://") class SectionTestCase(TestCase): def test_foreign_key(self): # @TODO pass def test_shortname(self): section = factories.SectionFactory.create() self.assertLess(len(section.classname()), 9) self.assertEqual(section.classname(), section.classname().lower()) class TagTestCase(TestCase): def test_foreign_key(self): # @TODO pass def test_autogen(self): # @TODO pass
mit
8,609,577,131,761,195,000
28.473002
116
0.630808
false
viridia/coda
third-party/python/ply-3.4/ply/cpp.py
192
33040
# ----------------------------------------------------------------------------- # cpp.py # # Author: David Beazley (http://www.dabeaz.com) # Copyright (C) 2007 # All rights reserved # # This module implements an ANSI-C style lexical preprocessor for PLY. # ----------------------------------------------------------------------------- from __future__ import generators # ----------------------------------------------------------------------------- # Default preprocessor lexer definitions. These tokens are enough to get # a basic preprocessor working. Other modules may import these if they want # ----------------------------------------------------------------------------- tokens = ( 'CPP_ID','CPP_INTEGER', 'CPP_FLOAT', 'CPP_STRING', 'CPP_CHAR', 'CPP_WS', 'CPP_COMMENT', 'CPP_POUND','CPP_DPOUND' ) literals = "+-*/%|&~^<>=!?()[]{}.,;:\\\'\"" # Whitespace def t_CPP_WS(t): r'\s+' t.lexer.lineno += t.value.count("\n") return t t_CPP_POUND = r'\#' t_CPP_DPOUND = r'\#\#' # Identifier t_CPP_ID = r'[A-Za-z_][\w_]*' # Integer literal def CPP_INTEGER(t): r'(((((0x)|(0X))[0-9a-fA-F]+)|(\d+))([uU]|[lL]|[uU][lL]|[lL][uU])?)' return t t_CPP_INTEGER = CPP_INTEGER # Floating literal t_CPP_FLOAT = r'((\d+)(\.\d+)(e(\+|-)?(\d+))? | (\d+)e(\+|-)?(\d+))([lL]|[fF])?' # String literal def t_CPP_STRING(t): r'\"([^\\\n]|(\\(.|\n)))*?\"' t.lexer.lineno += t.value.count("\n") return t # Character constant 'c' or L'c' def t_CPP_CHAR(t): r'(L)?\'([^\\\n]|(\\(.|\n)))*?\'' t.lexer.lineno += t.value.count("\n") return t # Comment def t_CPP_COMMENT(t): r'(/\*(.|\n)*?\*/)|(//.*?\n)' t.lexer.lineno += t.value.count("\n") return t def t_error(t): t.type = t.value[0] t.value = t.value[0] t.lexer.skip(1) return t import re import copy import time import os.path # ----------------------------------------------------------------------------- # trigraph() # # Given an input string, this function replaces all trigraph sequences. # The following mapping is used: # # ??= # # ??/ \ # ??' ^ # ??( [ # ??) ] # ??! | # ??< { # ??> } # ??- ~ # ----------------------------------------------------------------------------- _trigraph_pat = re.compile(r'''\?\?[=/\'\(\)\!<>\-]''') _trigraph_rep = { '=':'#', '/':'\\', "'":'^', '(':'[', ')':']', '!':'|', '<':'{', '>':'}', '-':'~' } def trigraph(input): return _trigraph_pat.sub(lambda g: _trigraph_rep[g.group()[-1]],input) # ------------------------------------------------------------------ # Macro object # # This object holds information about preprocessor macros # # .name - Macro name (string) # .value - Macro value (a list of tokens) # .arglist - List of argument names # .variadic - Boolean indicating whether or not variadic macro # .vararg - Name of the variadic parameter # # When a macro is created, the macro replacement token sequence is # pre-scanned and used to create patch lists that are later used # during macro expansion # ------------------------------------------------------------------ class Macro(object): def __init__(self,name,value,arglist=None,variadic=False): self.name = name self.value = value self.arglist = arglist self.variadic = variadic if variadic: self.vararg = arglist[-1] self.source = None # ------------------------------------------------------------------ # Preprocessor object # # Object representing a preprocessor. Contains macro definitions, # include directories, and other information # ------------------------------------------------------------------ class Preprocessor(object): def __init__(self,lexer=None): if lexer is None: lexer = lex.lexer self.lexer = lexer self.macros = { } self.path = [] self.temp_path = [] # Probe the lexer for selected tokens self.lexprobe() tm = time.localtime() self.define("__DATE__ \"%s\"" % time.strftime("%b %d %Y",tm)) self.define("__TIME__ \"%s\"" % time.strftime("%H:%M:%S",tm)) self.parser = None # ----------------------------------------------------------------------------- # tokenize() # # Utility function. Given a string of text, tokenize into a list of tokens # ----------------------------------------------------------------------------- def tokenize(self,text): tokens = [] self.lexer.input(text) while True: tok = self.lexer.token() if not tok: break tokens.append(tok) return tokens # --------------------------------------------------------------------- # error() # # Report a preprocessor error/warning of some kind # ---------------------------------------------------------------------- def error(self,file,line,msg): print("%s:%d %s" % (file,line,msg)) # ---------------------------------------------------------------------- # lexprobe() # # This method probes the preprocessor lexer object to discover # the token types of symbols that are important to the preprocessor. # If this works right, the preprocessor will simply "work" # with any suitable lexer regardless of how tokens have been named. # ---------------------------------------------------------------------- def lexprobe(self): # Determine the token type for identifiers self.lexer.input("identifier") tok = self.lexer.token() if not tok or tok.value != "identifier": print("Couldn't determine identifier type") else: self.t_ID = tok.type # Determine the token type for integers self.lexer.input("12345") tok = self.lexer.token() if not tok or int(tok.value) != 12345: print("Couldn't determine integer type") else: self.t_INTEGER = tok.type self.t_INTEGER_TYPE = type(tok.value) # Determine the token type for strings enclosed in double quotes self.lexer.input("\"filename\"") tok = self.lexer.token() if not tok or tok.value != "\"filename\"": print("Couldn't determine string type") else: self.t_STRING = tok.type # Determine the token type for whitespace--if any self.lexer.input(" ") tok = self.lexer.token() if not tok or tok.value != " ": self.t_SPACE = None else: self.t_SPACE = tok.type # Determine the token type for newlines self.lexer.input("\n") tok = self.lexer.token() if not tok or tok.value != "\n": self.t_NEWLINE = None print("Couldn't determine token for newlines") else: self.t_NEWLINE = tok.type self.t_WS = (self.t_SPACE, self.t_NEWLINE) # Check for other characters used by the preprocessor chars = [ '<','>','#','##','\\','(',')',',','.'] for c in chars: self.lexer.input(c) tok = self.lexer.token() if not tok or tok.value != c: print("Unable to lex '%s' required for preprocessor" % c) # ---------------------------------------------------------------------- # add_path() # # Adds a search path to the preprocessor. # ---------------------------------------------------------------------- def add_path(self,path): self.path.append(path) # ---------------------------------------------------------------------- # group_lines() # # Given an input string, this function splits it into lines. Trailing whitespace # is removed. Any line ending with \ is grouped with the next line. This # function forms the lowest level of the preprocessor---grouping into text into # a line-by-line format. # ---------------------------------------------------------------------- def group_lines(self,input): lex = self.lexer.clone() lines = [x.rstrip() for x in input.splitlines()] for i in xrange(len(lines)): j = i+1 while lines[i].endswith('\\') and (j < len(lines)): lines[i] = lines[i][:-1]+lines[j] lines[j] = "" j += 1 input = "\n".join(lines) lex.input(input) lex.lineno = 1 current_line = [] while True: tok = lex.token() if not tok: break current_line.append(tok) if tok.type in self.t_WS and '\n' in tok.value: yield current_line current_line = [] if current_line: yield current_line # ---------------------------------------------------------------------- # tokenstrip() # # Remove leading/trailing whitespace tokens from a token list # ---------------------------------------------------------------------- def tokenstrip(self,tokens): i = 0 while i < len(tokens) and tokens[i].type in self.t_WS: i += 1 del tokens[:i] i = len(tokens)-1 while i >= 0 and tokens[i].type in self.t_WS: i -= 1 del tokens[i+1:] return tokens # ---------------------------------------------------------------------- # collect_args() # # Collects comma separated arguments from a list of tokens. The arguments # must be enclosed in parenthesis. Returns a tuple (tokencount,args,positions) # where tokencount is the number of tokens consumed, args is a list of arguments, # and positions is a list of integers containing the starting index of each # argument. Each argument is represented by a list of tokens. # # When collecting arguments, leading and trailing whitespace is removed # from each argument. # # This function properly handles nested parenthesis and commas---these do not # define new arguments. # ---------------------------------------------------------------------- def collect_args(self,tokenlist): args = [] positions = [] current_arg = [] nesting = 1 tokenlen = len(tokenlist) # Search for the opening '('. i = 0 while (i < tokenlen) and (tokenlist[i].type in self.t_WS): i += 1 if (i < tokenlen) and (tokenlist[i].value == '('): positions.append(i+1) else: self.error(self.source,tokenlist[0].lineno,"Missing '(' in macro arguments") return 0, [], [] i += 1 while i < tokenlen: t = tokenlist[i] if t.value == '(': current_arg.append(t) nesting += 1 elif t.value == ')': nesting -= 1 if nesting == 0: if current_arg: args.append(self.tokenstrip(current_arg)) positions.append(i) return i+1,args,positions current_arg.append(t) elif t.value == ',' and nesting == 1: args.append(self.tokenstrip(current_arg)) positions.append(i+1) current_arg = [] else: current_arg.append(t) i += 1 # Missing end argument self.error(self.source,tokenlist[-1].lineno,"Missing ')' in macro arguments") return 0, [],[] # ---------------------------------------------------------------------- # macro_prescan() # # Examine the macro value (token sequence) and identify patch points # This is used to speed up macro expansion later on---we'll know # right away where to apply patches to the value to form the expansion # ---------------------------------------------------------------------- def macro_prescan(self,macro): macro.patch = [] # Standard macro arguments macro.str_patch = [] # String conversion expansion macro.var_comma_patch = [] # Variadic macro comma patch i = 0 while i < len(macro.value): if macro.value[i].type == self.t_ID and macro.value[i].value in macro.arglist: argnum = macro.arglist.index(macro.value[i].value) # Conversion of argument to a string if i > 0 and macro.value[i-1].value == '#': macro.value[i] = copy.copy(macro.value[i]) macro.value[i].type = self.t_STRING del macro.value[i-1] macro.str_patch.append((argnum,i-1)) continue # Concatenation elif (i > 0 and macro.value[i-1].value == '##'): macro.patch.append(('c',argnum,i-1)) del macro.value[i-1] continue elif ((i+1) < len(macro.value) and macro.value[i+1].value == '##'): macro.patch.append(('c',argnum,i)) i += 1 continue # Standard expansion else: macro.patch.append(('e',argnum,i)) elif macro.value[i].value == '##': if macro.variadic and (i > 0) and (macro.value[i-1].value == ',') and \ ((i+1) < len(macro.value)) and (macro.value[i+1].type == self.t_ID) and \ (macro.value[i+1].value == macro.vararg): macro.var_comma_patch.append(i-1) i += 1 macro.patch.sort(key=lambda x: x[2],reverse=True) # ---------------------------------------------------------------------- # macro_expand_args() # # Given a Macro and list of arguments (each a token list), this method # returns an expanded version of a macro. The return value is a token sequence # representing the replacement macro tokens # ---------------------------------------------------------------------- def macro_expand_args(self,macro,args): # Make a copy of the macro token sequence rep = [copy.copy(_x) for _x in macro.value] # Make string expansion patches. These do not alter the length of the replacement sequence str_expansion = {} for argnum, i in macro.str_patch: if argnum not in str_expansion: str_expansion[argnum] = ('"%s"' % "".join([x.value for x in args[argnum]])).replace("\\","\\\\") rep[i] = copy.copy(rep[i]) rep[i].value = str_expansion[argnum] # Make the variadic macro comma patch. If the variadic macro argument is empty, we get rid comma_patch = False if macro.variadic and not args[-1]: for i in macro.var_comma_patch: rep[i] = None comma_patch = True # Make all other patches. The order of these matters. It is assumed that the patch list # has been sorted in reverse order of patch location since replacements will cause the # size of the replacement sequence to expand from the patch point. expanded = { } for ptype, argnum, i in macro.patch: # Concatenation. Argument is left unexpanded if ptype == 'c': rep[i:i+1] = args[argnum] # Normal expansion. Argument is macro expanded first elif ptype == 'e': if argnum not in expanded: expanded[argnum] = self.expand_macros(args[argnum]) rep[i:i+1] = expanded[argnum] # Get rid of removed comma if necessary if comma_patch: rep = [_i for _i in rep if _i] return rep # ---------------------------------------------------------------------- # expand_macros() # # Given a list of tokens, this function performs macro expansion. # The expanded argument is a dictionary that contains macros already # expanded. This is used to prevent infinite recursion. # ---------------------------------------------------------------------- def expand_macros(self,tokens,expanded=None): if expanded is None: expanded = {} i = 0 while i < len(tokens): t = tokens[i] if t.type == self.t_ID: if t.value in self.macros and t.value not in expanded: # Yes, we found a macro match expanded[t.value] = True m = self.macros[t.value] if not m.arglist: # A simple macro ex = self.expand_macros([copy.copy(_x) for _x in m.value],expanded) for e in ex: e.lineno = t.lineno tokens[i:i+1] = ex i += len(ex) else: # A macro with arguments j = i + 1 while j < len(tokens) and tokens[j].type in self.t_WS: j += 1 if tokens[j].value == '(': tokcount,args,positions = self.collect_args(tokens[j:]) if not m.variadic and len(args) != len(m.arglist): self.error(self.source,t.lineno,"Macro %s requires %d arguments" % (t.value,len(m.arglist))) i = j + tokcount elif m.variadic and len(args) < len(m.arglist)-1: if len(m.arglist) > 2: self.error(self.source,t.lineno,"Macro %s must have at least %d arguments" % (t.value, len(m.arglist)-1)) else: self.error(self.source,t.lineno,"Macro %s must have at least %d argument" % (t.value, len(m.arglist)-1)) i = j + tokcount else: if m.variadic: if len(args) == len(m.arglist)-1: args.append([]) else: args[len(m.arglist)-1] = tokens[j+positions[len(m.arglist)-1]:j+tokcount-1] del args[len(m.arglist):] # Get macro replacement text rep = self.macro_expand_args(m,args) rep = self.expand_macros(rep,expanded) for r in rep: r.lineno = t.lineno tokens[i:j+tokcount] = rep i += len(rep) del expanded[t.value] continue elif t.value == '__LINE__': t.type = self.t_INTEGER t.value = self.t_INTEGER_TYPE(t.lineno) i += 1 return tokens # ---------------------------------------------------------------------- # evalexpr() # # Evaluate an expression token sequence for the purposes of evaluating # integral expressions. # ---------------------------------------------------------------------- def evalexpr(self,tokens): # tokens = tokenize(line) # Search for defined macros i = 0 while i < len(tokens): if tokens[i].type == self.t_ID and tokens[i].value == 'defined': j = i + 1 needparen = False result = "0L" while j < len(tokens): if tokens[j].type in self.t_WS: j += 1 continue elif tokens[j].type == self.t_ID: if tokens[j].value in self.macros: result = "1L" else: result = "0L" if not needparen: break elif tokens[j].value == '(': needparen = True elif tokens[j].value == ')': break else: self.error(self.source,tokens[i].lineno,"Malformed defined()") j += 1 tokens[i].type = self.t_INTEGER tokens[i].value = self.t_INTEGER_TYPE(result) del tokens[i+1:j+1] i += 1 tokens = self.expand_macros(tokens) for i,t in enumerate(tokens): if t.type == self.t_ID: tokens[i] = copy.copy(t) tokens[i].type = self.t_INTEGER tokens[i].value = self.t_INTEGER_TYPE("0L") elif t.type == self.t_INTEGER: tokens[i] = copy.copy(t) # Strip off any trailing suffixes tokens[i].value = str(tokens[i].value) while tokens[i].value[-1] not in "0123456789abcdefABCDEF": tokens[i].value = tokens[i].value[:-1] expr = "".join([str(x.value) for x in tokens]) expr = expr.replace("&&"," and ") expr = expr.replace("||"," or ") expr = expr.replace("!"," not ") try: result = eval(expr) except StandardError: self.error(self.source,tokens[0].lineno,"Couldn't evaluate expression") result = 0 return result # ---------------------------------------------------------------------- # parsegen() # # Parse an input string/ # ---------------------------------------------------------------------- def parsegen(self,input,source=None): # Replace trigraph sequences t = trigraph(input) lines = self.group_lines(t) if not source: source = "" self.define("__FILE__ \"%s\"" % source) self.source = source chunk = [] enable = True iftrigger = False ifstack = [] for x in lines: for i,tok in enumerate(x): if tok.type not in self.t_WS: break if tok.value == '#': # Preprocessor directive for tok in x: if tok in self.t_WS and '\n' in tok.value: chunk.append(tok) dirtokens = self.tokenstrip(x[i+1:]) if dirtokens: name = dirtokens[0].value args = self.tokenstrip(dirtokens[1:]) else: name = "" args = [] if name == 'define': if enable: for tok in self.expand_macros(chunk): yield tok chunk = [] self.define(args) elif name == 'include': if enable: for tok in self.expand_macros(chunk): yield tok chunk = [] oldfile = self.macros['__FILE__'] for tok in self.include(args): yield tok self.macros['__FILE__'] = oldfile self.source = source elif name == 'undef': if enable: for tok in self.expand_macros(chunk): yield tok chunk = [] self.undef(args) elif name == 'ifdef': ifstack.append((enable,iftrigger)) if enable: if not args[0].value in self.macros: enable = False iftrigger = False else: iftrigger = True elif name == 'ifndef': ifstack.append((enable,iftrigger)) if enable: if args[0].value in self.macros: enable = False iftrigger = False else: iftrigger = True elif name == 'if': ifstack.append((enable,iftrigger)) if enable: result = self.evalexpr(args) if not result: enable = False iftrigger = False else: iftrigger = True elif name == 'elif': if ifstack: if ifstack[-1][0]: # We only pay attention if outer "if" allows this if enable: # If already true, we flip enable False enable = False elif not iftrigger: # If False, but not triggered yet, we'll check expression result = self.evalexpr(args) if result: enable = True iftrigger = True else: self.error(self.source,dirtokens[0].lineno,"Misplaced #elif") elif name == 'else': if ifstack: if ifstack[-1][0]: if enable: enable = False elif not iftrigger: enable = True iftrigger = True else: self.error(self.source,dirtokens[0].lineno,"Misplaced #else") elif name == 'endif': if ifstack: enable,iftrigger = ifstack.pop() else: self.error(self.source,dirtokens[0].lineno,"Misplaced #endif") else: # Unknown preprocessor directive pass else: # Normal text if enable: chunk.extend(x) for tok in self.expand_macros(chunk): yield tok chunk = [] # ---------------------------------------------------------------------- # include() # # Implementation of file-inclusion # ---------------------------------------------------------------------- def include(self,tokens): # Try to extract the filename and then process an include file if not tokens: return if tokens: if tokens[0].value != '<' and tokens[0].type != self.t_STRING: tokens = self.expand_macros(tokens) if tokens[0].value == '<': # Include <...> i = 1 while i < len(tokens): if tokens[i].value == '>': break i += 1 else: print("Malformed #include <...>") return filename = "".join([x.value for x in tokens[1:i]]) path = self.path + [""] + self.temp_path elif tokens[0].type == self.t_STRING: filename = tokens[0].value[1:-1] path = self.temp_path + [""] + self.path else: print("Malformed #include statement") return for p in path: iname = os.path.join(p,filename) try: data = open(iname,"r").read() dname = os.path.dirname(iname) if dname: self.temp_path.insert(0,dname) for tok in self.parsegen(data,filename): yield tok if dname: del self.temp_path[0] break except IOError: pass else: print("Couldn't find '%s'" % filename) # ---------------------------------------------------------------------- # define() # # Define a new macro # ---------------------------------------------------------------------- def define(self,tokens): if isinstance(tokens,(str,unicode)): tokens = self.tokenize(tokens) linetok = tokens try: name = linetok[0] if len(linetok) > 1: mtype = linetok[1] else: mtype = None if not mtype: m = Macro(name.value,[]) self.macros[name.value] = m elif mtype.type in self.t_WS: # A normal macro m = Macro(name.value,self.tokenstrip(linetok[2:])) self.macros[name.value] = m elif mtype.value == '(': # A macro with arguments tokcount, args, positions = self.collect_args(linetok[1:]) variadic = False for a in args: if variadic: print("No more arguments may follow a variadic argument") break astr = "".join([str(_i.value) for _i in a]) if astr == "...": variadic = True a[0].type = self.t_ID a[0].value = '__VA_ARGS__' variadic = True del a[1:] continue elif astr[-3:] == "..." and a[0].type == self.t_ID: variadic = True del a[1:] # If, for some reason, "." is part of the identifier, strip off the name for the purposes # of macro expansion if a[0].value[-3:] == '...': a[0].value = a[0].value[:-3] continue if len(a) > 1 or a[0].type != self.t_ID: print("Invalid macro argument") break else: mvalue = self.tokenstrip(linetok[1+tokcount:]) i = 0 while i < len(mvalue): if i+1 < len(mvalue): if mvalue[i].type in self.t_WS and mvalue[i+1].value == '##': del mvalue[i] continue elif mvalue[i].value == '##' and mvalue[i+1].type in self.t_WS: del mvalue[i+1] i += 1 m = Macro(name.value,mvalue,[x[0].value for x in args],variadic) self.macro_prescan(m) self.macros[name.value] = m else: print("Bad macro definition") except LookupError: print("Bad macro definition") # ---------------------------------------------------------------------- # undef() # # Undefine a macro # ---------------------------------------------------------------------- def undef(self,tokens): id = tokens[0].value try: del self.macros[id] except LookupError: pass # ---------------------------------------------------------------------- # parse() # # Parse input text. # ---------------------------------------------------------------------- def parse(self,input,source=None,ignore={}): self.ignore = ignore self.parser = self.parsegen(input,source) # ---------------------------------------------------------------------- # token() # # Method to return individual tokens # ---------------------------------------------------------------------- def token(self): try: while True: tok = next(self.parser) if tok.type not in self.ignore: return tok except StopIteration: self.parser = None return None if __name__ == '__main__': import ply.lex as lex lexer = lex.lex() # Run a preprocessor import sys f = open(sys.argv[1]) input = f.read() p = Preprocessor(lexer) p.parse(input,sys.argv[1]) while True: tok = p.token() if not tok: break print(p.source, tok)
apache-2.0
2,012,393,019,758,861,300
35.792873
141
0.415406
false
Chilledheart/chromium
tools/telemetry/third_party/typ/typ/arg_parser.py
33
13928
# Copyright 2014 Google Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import argparse import optparse from typ.host import Host class _Bailout(Exception): pass DEFAULT_COVERAGE_OMIT = ['*/typ/*', '*/site-packages/*'] DEFAULT_STATUS_FORMAT = '[%f/%t] ' DEFAULT_SUFFIXES = ['*_test.py', '*_unittest.py'] class ArgumentParser(argparse.ArgumentParser): @staticmethod def add_option_group(parser, title, discovery=False, running=False, reporting=False, skip=None): # TODO: Get rid of this when telemetry upgrades to argparse. ap = ArgumentParser(add_help=False, version=False, discovery=discovery, running=running, reporting=reporting) optlist = ap.optparse_options(skip=skip) group = optparse.OptionGroup(parser, title) group.add_options(optlist) parser.add_option_group(group) def __init__(self, host=None, add_help=True, version=True, discovery=True, reporting=True, running=True): super(ArgumentParser, self).__init__(prog='typ', add_help=add_help) self._host = host or Host() self.exit_status = None self.usage = '%(prog)s [options] [tests...]' if version: self.add_argument('-V', '--version', action='store_true', help='Print the typ version and exit.') if discovery: self.add_argument('-f', '--file-list', metavar='FILENAME', action='store', help=('Takes the list of tests from the file ' '(use "-" for stdin).')) self.add_argument('--all', action='store_true', help=('Run all the tests, including the ones ' 'normally skipped.')) self.add_argument('--isolate', metavar='glob', default=[], action='append', help=('Globs of tests to run in isolation ' '(serially).')) self.add_argument('--skip', metavar='glob', default=[], action='append', help=('Globs of test names to skip (' 'defaults to %(default)s).')) self.add_argument('--suffixes', metavar='glob', default=[], action='append', help=('Globs of test filenames to look for (' 'can specify multiple times; defaults ' 'to %s).' % DEFAULT_SUFFIXES)) if reporting: self.add_argument('--builder-name', help=('Builder name to include in the ' 'uploaded data.')) self.add_argument('-c', '--coverage', action='store_true', help='Reports coverage information.') self.add_argument('--coverage-source', action='append', default=[], help=('Directories to include when running and ' 'reporting coverage (defaults to ' '--top-level-dir plus --path)')) self.add_argument('--coverage-omit', action='append', default=[], help=('Globs to omit when reporting coverage ' '(defaults to %s).' % DEFAULT_COVERAGE_OMIT)) self.add_argument('--coverage-annotate', action='store_true', help=('Produce an annotate source report.')) self.add_argument('--coverage-show-missing', action='store_true', help=('Show missing line ranges in coverage ' 'report.')) self.add_argument('--master-name', help=('Buildbot master name to include in the ' 'uploaded data.')) self.add_argument('--metadata', action='append', default=[], help=('Optional key=value metadata that will ' 'be included in the results.')) self.add_argument('--test-results-server', help=('If specified, uploads the full results ' 'to this server.')) self.add_argument('--test-type', help=('Name of test type to include in the ' 'uploaded data (e.g., ' '"telemetry_unittests").')) self.add_argument('--write-full-results-to', metavar='FILENAME', action='store', help=('If specified, writes the full results to ' 'that path.')) self.add_argument('--write-trace-to', metavar='FILENAME', action='store', help=('If specified, writes the trace to ' 'that path.')) self.add_argument('tests', nargs='*', default=[], help=argparse.SUPPRESS) if running: self.add_argument('-d', '--debugger', action='store_true', help='Runs the tests under the debugger.') self.add_argument('-j', '--jobs', metavar='N', type=int, default=self._host.cpu_count(), help=('Runs N jobs in parallel ' '(defaults to %(default)s).')) self.add_argument('-l', '--list-only', action='store_true', help='Lists all the test names found and exits.') self.add_argument('-n', '--dry-run', action='store_true', help=argparse.SUPPRESS) self.add_argument('-q', '--quiet', action='store_true', default=False, help=('Runs as quietly as possible ' '(only prints errors).')) self.add_argument('-s', '--status-format', default=self._host.getenv('NINJA_STATUS', DEFAULT_STATUS_FORMAT), help=argparse.SUPPRESS) self.add_argument('-t', '--timing', action='store_true', help='Prints timing info.') self.add_argument('-v', '--verbose', action='count', default=0, help=('Prints more stuff (can specify multiple ' 'times for more output).')) self.add_argument('--passthrough', action='store_true', default=False, help='Prints all output while running.') self.add_argument('--retry-limit', type=int, default=0, help='Retries each failure up to N times.') self.add_argument('--terminal-width', type=int, default=self._host.terminal_width(), help=argparse.SUPPRESS) self.add_argument('--overwrite', action='store_true', default=None, help=argparse.SUPPRESS) self.add_argument('--no-overwrite', action='store_false', dest='overwrite', default=None, help=argparse.SUPPRESS) if discovery or running: self.add_argument('-P', '--path', action='append', default=[], help=('Adds dir to sys.path (can specify ' 'multiple times).')) self.add_argument('--top-level-dir', default=None, help=('Sets the top directory of project ' '(used when running subdirs).')) def parse_args(self, args=None, namespace=None): try: rargs = super(ArgumentParser, self).parse_args(args=args, namespace=namespace) except _Bailout: return None for val in rargs.metadata: if '=' not in val: self._print_message('Error: malformed --metadata "%s"' % val) self.exit_status = 2 if rargs.test_results_server: if not rargs.builder_name: self._print_message('Error: --builder-name must be specified ' 'along with --test-result-server') self.exit_status = 2 if not rargs.master_name: self._print_message('Error: --master-name must be specified ' 'along with --test-result-server') self.exit_status = 2 if not rargs.test_type: self._print_message('Error: --test-type must be specified ' 'along with --test-result-server') self.exit_status = 2 if not rargs.suffixes: rargs.suffixes = DEFAULT_SUFFIXES if not rargs.coverage_omit: rargs.coverage_omit = DEFAULT_COVERAGE_OMIT if rargs.debugger: # pragma: no cover rargs.jobs = 1 rargs.passthrough = True if rargs.overwrite is None: rargs.overwrite = self._host.stdout.isatty() and not rargs.verbose return rargs # Redefining built-in 'file' pylint: disable=W0622 def _print_message(self, msg, file=None): self._host.print_(msg=msg, stream=file, end='\n') def print_help(self, file=None): self._print_message(msg=self.format_help(), file=file) def error(self, message, bailout=True): # pylint: disable=W0221 self.exit(2, '%s: error: %s\n' % (self.prog, message), bailout=bailout) def exit(self, status=0, message=None, # pylint: disable=W0221 bailout=True): self.exit_status = status if message: self._print_message(message, file=self._host.stderr) if bailout: raise _Bailout() def optparse_options(self, skip=None): skip = skip or [] options = [] for action in self._actions: args = [flag for flag in action.option_strings if flag not in skip] if not args or action.help == '==SUPPRESS==': # must either be a positional argument like 'tests' # or an option we want to skip altogether. continue kwargs = { 'default': action.default, 'dest': action.dest, 'help': action.help, 'metavar': action.metavar, 'type': action.type, 'action': _action_str(action) } options.append(optparse.make_option(*args, **kwargs)) return options def argv_from_args(self, args): default_parser = ArgumentParser(host=self._host) default_args = default_parser.parse_args([]) argv = [] tests = [] d = vars(args) for k in sorted(d.keys()): v = d[k] argname = _argname_from_key(k) action = self._action_for_key(k) action_str = _action_str(action) if k == 'tests': tests = v continue if getattr(default_args, k) == v: # this arg has the default value, so skip it. continue assert action_str in ['append', 'count', 'store', 'store_true'] if action_str == 'append': for el in v: argv.append(argname) argv.append(el) elif action_str == 'count': for _ in range(v): argv.append(argname) elif action_str == 'store': argv.append(argname) argv.append(str(v)) else: # action_str == 'store_true' argv.append(argname) return argv + tests def _action_for_key(self, key): for action in self._actions: if action.dest == key: return action assert False, ('Could not find an action for %s' # pragma: no cover % key) def _action_str(action): # Access to a protected member pylint: disable=W0212 assert action.__class__ in ( argparse._AppendAction, argparse._CountAction, argparse._StoreAction, argparse._StoreTrueAction ) if isinstance(action, argparse._AppendAction): return 'append' if isinstance(action, argparse._CountAction): return 'count' if isinstance(action, argparse._StoreAction): return 'store' if isinstance(action, argparse._StoreTrueAction): return 'store_true' def _argname_from_key(key): return '--' + key.replace('_', '-')
bsd-3-clause
3,381,652,566,841,887,000
43.215873
79
0.48923
false
bazz-erp/erpnext
erpnext/setup/doctype/company/company.py
1
15659
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors # License: GNU General Public License v3. See license.txt from __future__ import unicode_literals import frappe, os from frappe import _ from frappe.utils import cint, today, formatdate import frappe.defaults from frappe.model.document import Document from frappe.contacts.address_and_contact import load_address_and_contact class Company(Document): def onload(self): load_address_and_contact(self, "company") self.get("__onload")["transactions_exist"] = self.check_if_transactions_exist() def check_if_transactions_exist(self): exists = False for doctype in ["Sales Invoice", "Delivery Note", "Sales Order", "Quotation", "Purchase Invoice", "Purchase Receipt", "Purchase Order", "Supplier Quotation"]: if frappe.db.sql("""select name from `tab%s` where company=%s and docstatus=1 limit 1""" % (doctype, "%s"), self.name): exists = True break return exists def validate(self): self.validate_abbr() self.validate_default_accounts() self.validate_currency() self.validate_coa_input() self.validate_perpetual_inventory() def validate_abbr(self): if not self.abbr: self.abbr = ''.join([c[0] for c in self.company_name.split()]).upper() self.abbr = self.abbr.strip() if self.get('__islocal') and len(self.abbr) > 5: frappe.throw(_("Abbreviation cannot have more than 5 characters")) if not self.abbr.strip(): frappe.throw(_("Abbreviation is mandatory")) if frappe.db.sql("select abbr from tabCompany where name!=%s and abbr=%s", (self.name, self.abbr)): frappe.throw(_("Abbreviation already used for another company")) def validate_default_accounts(self): for field in ["default_bank_account", "default_cash_account", "default_receivable_account", "default_payable_account", "default_expense_account", "default_income_account", "stock_received_but_not_billed", "stock_adjustment_account", "expenses_included_in_valuation", "default_payroll_payable_account", "default_deferred_checks_account"]: if self.get(field): for_company = frappe.db.get_value("Account", self.get(field), "company") if for_company != self.name: frappe.throw(_("Account {0} does not belong to company: {1}") .format(self.get(field), self.name)) def validate_currency(self): self.previous_default_currency = frappe.db.get_value("Company", self.name, "default_currency") if self.default_currency and self.previous_default_currency and \ self.default_currency != self.previous_default_currency and \ self.check_if_transactions_exist(): frappe.throw(_("Cannot change company's default currency, because there are existing transactions. Transactions must be cancelled to change the default currency.")) def on_update(self): if not frappe.db.sql("""select name from tabAccount where company=%s and docstatus<2 limit 1""", self.name): if not frappe.local.flags.ignore_chart_of_accounts: self.create_default_accounts() self.create_default_warehouses() self.install_country_fixtures() if not frappe.db.get_value("Cost Center", {"is_group": 0, "company": self.name}): self.create_default_cost_center() if not frappe.local.flags.ignore_chart_of_accounts: self.set_default_accounts() if self.default_cash_account: self.set_mode_of_payment_account() if self.default_currency: frappe.db.set_value("Currency", self.default_currency, "enabled", 1) if hasattr(frappe.local, 'enable_perpetual_inventory') and \ self.name in frappe.local.enable_perpetual_inventory: frappe.local.enable_perpetual_inventory[self.name] = self.enable_perpetual_inventory frappe.clear_cache() def install_country_fixtures(self): path = frappe.get_app_path('erpnext', 'regional', frappe.scrub(self.country)) if os.path.exists(path.encode("utf-8")): frappe.get_attr("erpnext.regional.{0}.setup.setup" .format(self.country.lower()))(self) def create_default_warehouses(self): for wh_detail in [ {"warehouse_name": _("All Warehouses"), "is_group": 1}, {"warehouse_name": _("Stores"), "is_group": 0}, {"warehouse_name": _("Work In Progress"), "is_group": 0}, {"warehouse_name": _("Finished Goods"), "is_group": 0}]: if not frappe.db.exists("Warehouse", "{0} - {1}".format(wh_detail["warehouse_name"], self.abbr)): stock_group = frappe.db.get_value("Account", {"account_type": "Stock", "is_group": 1, "company": self.name}) if stock_group: warehouse = frappe.get_doc({ "doctype":"Warehouse", "warehouse_name": wh_detail["warehouse_name"], "is_group": wh_detail["is_group"], "company": self.name, "parent_warehouse": "{0} - {1}".format(_("All Warehouses"), self.abbr) \ if not wh_detail["is_group"] else "" }) warehouse.flags.ignore_permissions = True warehouse.insert() def create_default_accounts(self): from erpnext.accounts.doctype.account.chart_of_accounts.chart_of_accounts import create_charts create_charts(self.name, self.chart_of_accounts, self.existing_company) frappe.db.set(self, "default_receivable_account", frappe.db.get_value("Account", {"company": self.name, "account_type": "Receivable", "is_group": 0})) frappe.db.set(self, "default_payable_account", frappe.db.get_value("Account", {"company": self.name, "account_type": "Payable", "is_group": 0})) def validate_coa_input(self): if self.create_chart_of_accounts_based_on == "Existing Company": self.chart_of_accounts = None if not self.existing_company: frappe.throw(_("Please select Existing Company for creating Chart of Accounts")) else: self.existing_company = None self.create_chart_of_accounts_based_on = "Standard Template" if not self.chart_of_accounts: self.chart_of_accounts = "Standard" def validate_perpetual_inventory(self): if not self.get("__islocal"): if cint(self.enable_perpetual_inventory) == 1 and not self.default_inventory_account: frappe.msgprint(_("Set default inventory account for perpetual inventory"), alert=True, indicator='orange') def set_default_accounts(self): self._set_default_account("default_cash_account", "Cash") self._set_default_account("default_bank_account", "Bank") self._set_default_account("round_off_account", "Round Off") self._set_default_account("accumulated_depreciation_account", "Accumulated Depreciation") self._set_default_account("depreciation_expense_account", "Depreciation") if self.enable_perpetual_inventory: self._set_default_account("stock_received_but_not_billed", "Stock Received But Not Billed") self._set_default_account("default_inventory_account", "Stock") self._set_default_account("stock_adjustment_account", "Stock Adjustment") self._set_default_account("expenses_included_in_valuation", "Expenses Included In Valuation") self._set_default_account("default_expense_account", "Cost of Goods Sold") if not self.default_income_account: self.db_set("default_income_account", frappe.db.get_value("Account", {"account_name": _("Sales"), "company": self.name})) if not self.default_payable_account: self.db_set("default_payable_account", self.default_payable_account) def _set_default_account(self, fieldname, account_type): if self.get(fieldname): return account = frappe.db.get_value("Account", {"account_type": account_type, "is_group": 0, "company": self.name}) if account: self.db_set(fieldname, account) def set_mode_of_payment_account(self): cash = frappe.db.get_value('Mode of Payment', {'type': 'Cash'}, 'name') if cash and self.default_cash_account \ and not frappe.db.get_value('Mode of Payment Account', {'company': self.name}): mode_of_payment = frappe.get_doc('Mode of Payment', cash) mode_of_payment.append('accounts', { 'company': self.name, 'default_account': self.default_cash_account }) mode_of_payment.save(ignore_permissions=True) def create_default_cost_center(self): cc_list = [ { 'cost_center_name': self.name, 'company':self.name, 'is_group': 1, 'parent_cost_center':None }, { 'cost_center_name':_('Main'), 'company':self.name, 'is_group':0, 'parent_cost_center':self.name + ' - ' + self.abbr }, ] for cc in cc_list: cc.update({"doctype": "Cost Center"}) cc_doc = frappe.get_doc(cc) cc_doc.flags.ignore_permissions = True if cc.get("cost_center_name") == self.name: cc_doc.flags.ignore_mandatory = True cc_doc.insert() frappe.db.set(self, "cost_center", _("Main") + " - " + self.abbr) frappe.db.set(self, "round_off_cost_center", _("Main") + " - " + self.abbr) frappe.db.set(self, "depreciation_cost_center", _("Main") + " - " + self.abbr) def before_rename(self, olddn, newdn, merge=False): if merge: frappe.throw(_("Sorry, companies cannot be merged")) def after_rename(self, olddn, newdn, merge=False): frappe.db.set(self, "company_name", newdn) frappe.db.sql("""update `tabDefaultValue` set defvalue=%s where defkey='Company' and defvalue=%s""", (newdn, olddn)) frappe.defaults.clear_cache() def abbreviate(self): self.abbr = ''.join([c[0].upper() for c in self.company_name.split()]) def on_trash(self): """ Trash accounts and cost centers for this company if no gl entry exists """ accounts = frappe.db.sql_list("select name from tabAccount where company=%s", self.name) cost_centers = frappe.db.sql_list("select name from `tabCost Center` where company=%s", self.name) warehouses = frappe.db.sql_list("select name from tabWarehouse where company=%s", self.name) rec = frappe.db.sql("SELECT name from `tabGL Entry` where company = %s", self.name) if not rec: frappe.db.sql("""delete from `tabBudget Account` where exists(select name from tabBudget where name=`tabBudget Account`.parent and company = %s)""", self.name) for doctype in ["Account", "Cost Center", "Budget", "Party Account"]: frappe.db.sql("delete from `tab{0}` where company = %s".format(doctype), self.name) if not frappe.db.get_value("Stock Ledger Entry", {"company": self.name}): frappe.db.sql("""delete from `tabWarehouse` where company=%s""", self.name) frappe.defaults.clear_default("company", value=self.name) # clear default accounts, warehouses from item if warehouses: for f in ["default_warehouse", "website_warehouse"]: frappe.db.sql("""update tabItem set %s=NULL where %s in (%s)""" % (f, f, ', '.join(['%s']*len(warehouses))), tuple(warehouses)) frappe.db.sql("""delete from `tabItem Reorder` where warehouse in (%s)""" % ', '.join(['%s']*len(warehouses)), tuple(warehouses)) if accounts: for f in ["income_account", "expense_account"]: frappe.db.sql("""update tabItem set %s=NULL where %s in (%s)""" % (f, f, ', '.join(['%s']*len(accounts))), tuple(accounts)) if cost_centers: for f in ["selling_cost_center", "buying_cost_center"]: frappe.db.sql("""update tabItem set %s=NULL where %s in (%s)""" % (f, f, ', '.join(['%s']*len(cost_centers))), tuple(cost_centers)) # reset default company frappe.db.sql("""update `tabSingles` set value="" where doctype='Global Defaults' and field='default_company' and value=%s""", self.name) @frappe.whitelist() def replace_abbr(company, old, new): new = new.strip() if not new: frappe.throw(_("Abbr can not be blank or space")) frappe.only_for("System Manager") frappe.db.set_value("Company", company, "abbr", new) def _rename_record(dt): for d in frappe.db.sql("select name from `tab%s` where company=%s" % (dt, '%s'), company): parts = d[0].rsplit(" - ", 1) if len(parts) == 1 or parts[1].lower() == old.lower(): frappe.rename_doc(dt, d[0], parts[0] + " - " + new) for dt in ["Warehouse", "Account", "Cost Center"]: _rename_record(dt) frappe.db.commit() def get_name_with_abbr(name, company): company_abbr = frappe.db.get_value("Company", company, "abbr") parts = name.split(" - ") if parts[-1].lower() != company_abbr.lower(): parts.append(company_abbr) return " - ".join(parts) def update_company_current_month_sales(company): current_month_year = formatdate(today(), "MM-yyyy") results = frappe.db.sql(''' select sum(base_grand_total) as total, date_format(posting_date, '%m-%Y') as month_year from `tabSales Invoice` where date_format(posting_date, '%m-%Y')="{0}" and docstatus = 1 and company = "{1}" group by month_year '''.format(current_month_year, frappe.db.escape(company)), as_dict = True) monthly_total = results[0]['total'] if len(results) > 0 else 0 frappe.db.set_value("Company", company, "total_monthly_sales", monthly_total) frappe.db.commit() def update_company_monthly_sales(company): '''Cache past year monthly sales of every company based on sales invoices''' from frappe.utils.goal import get_monthly_results import json filter_str = "company = '{0}' and status != 'Draft' and docstatus=1".format(frappe.db.escape(company)) month_to_value_dict = get_monthly_results("Sales Invoice", "base_grand_total", "posting_date", filter_str, "sum") frappe.db.set_value("Company", company, "sales_monthly_history", json.dumps(month_to_value_dict)) frappe.db.commit() def cache_companies_monthly_sales_history(): companies = [d['name'] for d in frappe.get_list("Company")] for company in companies: update_company_monthly_sales(company) frappe.db.commit() @frappe.whitelist() def get_company_details(company_name): return frappe.get_value("Company", company_name, "type")
gpl-3.0
-8,096,426,726,978,281,000
42.985955
180
0.592694
false
jedie/pypyjs-standalone
website/js/pypy.js-0.3.0/lib/modules/test/test_macpath.py
100
2000
import macpath from test import test_support, test_genericpath import unittest class MacPathTestCase(unittest.TestCase): def test_abspath(self): self.assertEqual(macpath.abspath("xx:yy"), "xx:yy") def test_isabs(self): isabs = macpath.isabs self.assertTrue(isabs("xx:yy")) self.assertTrue(isabs("xx:yy:")) self.assertTrue(isabs("xx:")) self.assertFalse(isabs("foo")) self.assertFalse(isabs(":foo")) self.assertFalse(isabs(":foo:bar")) self.assertFalse(isabs(":foo:bar:")) def test_split(self): split = macpath.split self.assertEqual(split("foo:bar"), ('foo:', 'bar')) self.assertEqual(split("conky:mountpoint:foo:bar"), ('conky:mountpoint:foo', 'bar')) self.assertEqual(split(":"), ('', '')) self.assertEqual(split(":conky:mountpoint:"), (':conky:mountpoint', '')) def test_splitext(self): splitext = macpath.splitext self.assertEqual(splitext(":foo.ext"), (':foo', '.ext')) self.assertEqual(splitext("foo:foo.ext"), ('foo:foo', '.ext')) self.assertEqual(splitext(".ext"), ('.ext', '')) self.assertEqual(splitext("foo.ext:foo"), ('foo.ext:foo', '')) self.assertEqual(splitext(":foo.ext:"), (':foo.ext:', '')) self.assertEqual(splitext(""), ('', '')) self.assertEqual(splitext("foo.bar.ext"), ('foo.bar', '.ext')) def test_normpath(self): # Issue 5827: Make sure normpath preserves unicode for path in (u'', u'.', u'/', u'\\', u':', u'///foo/.//bar//'): self.assertIsInstance(macpath.normpath(path), unicode, 'normpath() returned str instead of unicode') class MacCommonTest(test_genericpath.CommonTest): pathmodule = macpath def test_main(): test_support.run_unittest(MacPathTestCase, MacCommonTest) if __name__ == "__main__": test_main()
mit
-306,350,961,789,963,800
34.087719
79
0.5705
false
Kaftanov/Cchat
chat-client/client.py
1
5334
#!/usr/bin/env python3 """ ############################# Server applycation version python: python3 based on socket ############################# """ import select import socket import sys import datetime from PyQt5 import QtWidgets from PyQt5.QtCore import QThread, QPoint import userform from cchatui import Ui_CchatWindow from communication import send, receive class RegisterError(Exception): """ My exception for user's password """ def __init__(self, type_exception): Exception.__init__(self) if type_exception == 0: self.msg = "Cchat_Client: You password isn't correct, sry" elif type_exception == 1: self.msg = "Unexpected exception" def __str__(self): return self.msg class WorkThread(QThread): """ Class for working with pyqt thread this class run 'run_chat_loop()' in class 'Client' """ def __init__(self): QThread.__init__(self) def setWorker(self, Cl): self.Cl = Cl def run(self): self.Cl.run_chat_loop() class Client: """ Client is contain prompt -- string -- it's need for visual effect command line functions Server contain __init__ init socket, connect, get name form server cmdloop loop for wait witting message(send/receive) """ def __init__(self, server_host=None, server_port=None): """ init client object """ if server_host is None: self.server_host = 'localhost' else: self.server_host = server_host if server_port is None: self.server_port = 3490 else: self.server_port = server_port # Initial prompt self.user_name = self.connect() self.head = '%s~' % self.user_name self.initUI() def initUI(self): """ Initialize pyqt form""" application = QtWidgets.QApplication(sys.argv) CchatWindow = QtWidgets.QMainWindow() self.ui = Ui_CchatWindow() self.ui.setupUi(CchatWindow) self.ui.sendButton.clicked.connect(self.send_message) self.ui.inputLine.returnPressed.connect(self.send_message) CchatWindow.show() # set thread self.workThread = WorkThread() self.workThread.setWorker(self) self.workThread.start() sys.exit(application.exec_()) def print_into_box(self, data): """ Printing data into text box""" self.ui.textBox.append(data) pass def send_message(self): """ Send message into socket""" # Warning error send message if unbound magic data = self.ui.inputLine.text() time = str(datetime.datetime.now().time())[:16] self.print_into_box(self.head + time + ':' + data) self.ui.inputLine.clear() send(self.sock, data) def connect(self): """ Checking registration/login data""" is_authenticate = False while not is_authenticate: try: form = userform.create_userform() if form is None: sys.exit('KeyboardInterrupt from user_form') data = {} if form[0] == 0: data = form[1] data['type'] = 'log' elif form[0] == 1: data = form[1] data['type'] = 'reg' self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.sock.connect((self.server_host, self.server_port)) send(self.sock, data) receive_data = receive(self.sock) if receive_data == 'Error': raise RegisterError(0) elif receive_data == 'Success': is_authenticate = True return data['login'] else: raise RegisterError(1) except socket.error as error: print('Cchat_Client: Could not connect to chat server') print(error) sys.exit(1) except RegisterError as msg: print(msg) print("Try again") self.sock.close() except KeyboardInterrupt as signal: print(signal) if self.sock: self.sock.close() sys.exit(1) def run_chat_loop(self): is_shutdown = True while is_shutdown: in_fds, out_fds, err_fds = select.select([self.sock], [], []) for sock in in_fds: if sock is self.sock: data = receive(self.sock) if not data: self.print_into_box('Server was shutdown') is_shutdown = False break else: if not data['message']: continue message = data['head'] + data['message'] print(message) self.print_into_box(message) @staticmethod def time(): return str(datetime.datetime.now().time())[:16] if __name__ == "__main__": Client()
gpl-3.0
7,761,749,070,141,927,000
29.306818
77
0.512186
false
mikewiebe-ansible/ansible
test/units/modules/network/iosxr/test_iosxr_user.py
48
4023
# (c) 2016 Red Hat Inc. # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # Make coding more python3-ish from __future__ import (absolute_import, division, print_function) __metaclass__ = type from units.compat.mock import patch from ansible.modules.network.iosxr import iosxr_user from units.modules.utils import set_module_args from .iosxr_module import TestIosxrModule, load_fixture class TestIosxrUserModule(TestIosxrModule): module = iosxr_user def setUp(self): super(TestIosxrUserModule, self).setUp() self.mock_get_config = patch('ansible.modules.network.iosxr.iosxr_user.get_config') self.get_config = self.mock_get_config.start() self.mock_load_config = patch('ansible.modules.network.iosxr.iosxr_user.load_config') self.load_config = self.mock_load_config.start() self.mock_is_cliconf = patch('ansible.modules.network.iosxr.iosxr_user.is_cliconf') self.is_cliconf = self.mock_is_cliconf.start() def tearDown(self): super(TestIosxrUserModule, self).tearDown() self.mock_get_config.stop() self.mock_load_config.stop() self.mock_is_cliconf.stop() def load_fixtures(self, commands=None, transport='cli'): self.get_config.return_value = load_fixture('iosxr_user_config.cfg') self.load_config.return_value = dict(diff=None, session='session') self.is_cliconf.return_value = True def test_iosxr_user_delete(self): set_module_args(dict(name='ansible', state='absent')) result = self.execute_module(changed=True) self.assertEqual(result['commands'], ['no username ansible']) def test_iosxr_user_password(self): set_module_args(dict(name='ansible', configured_password='test')) result = self.execute_module(changed=True) self.assertEqual(result['commands'], ['username ansible secret test']) def test_iosxr_user_purge(self): set_module_args(dict(purge=True)) result = self.execute_module(changed=True) self.assertEqual(result['commands'], ['no username ansible']) def test_iosxr_user_group(self): set_module_args(dict(name='ansible', group='sysadmin')) result = self.execute_module(changed=True) self.assertEqual(result['commands'], ['username ansible group sysadmin']) def test_iosxr_user_update_password_changed(self): set_module_args(dict(name='test', configured_password='test', update_password='on_create')) result = self.execute_module(changed=True) self.assertEqual(result['commands'], ['username test', 'username test secret test']) def test_iosxr_user_update_password_on_create_ok(self): set_module_args(dict(name='ansible', configured_password='test', update_password='on_create')) self.execute_module() def test_iosxr_user_update_password_always(self): set_module_args(dict(name='ansible', configured_password='test', update_password='always')) result = self.execute_module(changed=True) self.assertEqual(result['commands'], ['username ansible secret test']) def test_iosxr_user_admin_mode(self): set_module_args(dict(name='ansible-2', configured_password='test-2', admin=True)) result = self.execute_module(changed=True) self.assertEqual(result['commands'], ['username ansible-2', 'username ansible-2 secret test-2'])
gpl-3.0
-5,538,975,829,170,900,000
41.797872
104
0.693512
false
saketkc/statsmodels
statsmodels/sandbox/distributions/tests/_est_fit.py
31
2609
# NOTE: contains only one test, _est_cont_fit, that is renamed so that # nose doesn't run it # I put this here for the record and for the case when someone wants to # verify the quality of fit # with current parameters: relatively small sample size, default starting values # Ran 84 tests in 401.797s # FAILED (failures=15) import numpy.testing as npt import numpy as np from scipy import stats from .distparams import distcont # this is not a proper statistical test for convergence, but only # verifies that the estimate and true values don't differ by too much n_repl1 = 1000 # sample size for first run n_repl2 = 5000 # sample size for second run, if first run fails thresh_percent = 0.25 # percent of true parameters for fail cut-off thresh_min = 0.75 # minimum difference estimate - true to fail test #distcont = [['genextreme', (3.3184017469423535,)]] def _est_cont_fit(): # this tests the closeness of the estimated parameters to the true # parameters with fit method of continuous distributions # Note: is slow, some distributions don't converge with sample size <= 10000 for distname, arg in distcont: yield check_cont_fit, distname,arg def check_cont_fit(distname,arg): distfn = getattr(stats, distname) rvs = distfn.rvs(size=n_repl1,*arg) est = distfn.fit(rvs) #,*arg) # start with default values truearg = np.hstack([arg,[0.0,1.0]]) diff = est-truearg txt = '' diffthreshold = np.max(np.vstack([truearg*thresh_percent, np.ones(distfn.numargs+2)*thresh_min]),0) # threshold for location diffthreshold[-2] = np.max([np.abs(rvs.mean())*thresh_percent,thresh_min]) if np.any(np.isnan(est)): raise AssertionError('nan returned in fit') else: if np.any((np.abs(diff) - diffthreshold) > 0.0): ## txt = 'WARNING - diff too large with small sample' ## print 'parameter diff =', diff - diffthreshold, txt rvs = np.concatenate([rvs,distfn.rvs(size=n_repl2-n_repl1,*arg)]) est = distfn.fit(rvs) #,*arg) truearg = np.hstack([arg,[0.0,1.0]]) diff = est-truearg if np.any((np.abs(diff) - diffthreshold) > 0.0): txt = 'parameter: %s\n' % str(truearg) txt += 'estimated: %s\n' % str(est) txt += 'diff : %s\n' % str(diff) raise AssertionError('fit not very good in %s\n' % distfn.name + txt) if __name__ == "__main__": import nose #nose.run(argv=['', __file__]) nose.runmodule(argv=[__file__,'-s'], exit=False)
bsd-3-clause
1,604,428,408,017,526,500
36.271429
85
0.633959
false
nwjs/chromium.src
components/test/data/password_manager/form_classification_tests/signin_forms_test.py
19
24192
# Copyright 2016 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import unittest from form_classification_test import FormClassificationTest """Unittest class for testing signin forms. The test methods were generated by the form annotation extension (components/test/data/password_manager/form_annotation_extension) """ class SignInFormsTest(FormClassificationTest): def test_i_360_cn(self): self.GoTo("http://i.360.cn/login/") self.CheckPwdField("INPUT[name='password'][type='password']", is_pwd_creation=False) def test_9gag_com(self): self.GoTo("http://9gag.com/") self.Click("A.btn-mute") self.CheckPwdField( "INPUT[id='login-email-password'][name='password'][type='password']", is_pwd_creation=False) def test_login_adf_ly(self): self.GoTo("https://login.adf.ly/login") self.CheckPwdField("INPUT#loginPassword[name='password'][type='password']", is_pwd_creation=False) def test_adobeid_na1_services_adobe_com(self): self.GoTo("http://www.adobe.com/") self.Click("SPAN.close > A") self.Click("LI[class*='signin'] > BUTTON") self.CheckPwdField( "INPUT[id='adobeid_password'][name='password'][type='password']" ".text-center", is_pwd_creation=False) def test_publishers_adsterra_net(self): self.GoTo("http://publishers.adsterra.net/login") self.CheckPwdField("INPUT[name='password'][type='password']", is_pwd_creation=False) def test_ssl_allegro_pl(self): self.GoTo("http://allegro.pl/") SignInFormsTest.driver.maximize_window() self.Click( "DIV.header-namespace > DIV.user-links-wrapper > DIV.wrapper-fluid > " "UL.user-nav > LI.login:nth-child(10) > A") self.CheckPwdField( "INPUT[id='password'][name='password'][type='password'].ng-untouched", is_pwd_creation=False) def test_www_amazon_com(self): self.GoTo("http://www.amazon.com/") self.Click("A[id='nav-link-yourAccount']") self.CheckPwdField( "INPUT[id='ap_password'][name='password'][type='password']" ".a-input-text", is_pwd_creation=False) def test_my_screenname_aol_com(self): self.GoTo( "https://my.screenname.aol.com/_cqr/login/login.psp?sitedomain=" "www.aol.com&lang=en&locale=us&authLev=0") self.CheckPwdField( "INPUT#pwdId1[name='password'][type='password'].inputBox", is_pwd_creation=False) def test_secure2_store_apple_com(self): self.GoTo("http://www.apple.com/") self.Click("LI[id='ac-gn-bag'].ac-gn-bag:nth-child(10) > A.ac-gn-link-bag") self.Click("A.ac-gn-bagview-nav-link-signIn") self.CheckPwdField( "INPUT[id='login-password'][name='login-password'][type='password']" ".password", is_pwd_creation=False) def test_www_baidu_com(self): self.GoTo("http://www.baidu.com/") self.Click("DIV#u1 > A[name='tj_login'].lb") self.CheckPwdField( "INPUT#TANGRAM__PSP_8__password[name='password'][type='password']" ".pass-text-input-password", is_pwd_creation=False) def test_blogs_forbes_com(self): self.GoTo("http://blogs.forbes.com/account/login/") self.CheckPwdField( "INPUT[id='login_form_password'][name='pass'][type='password']", is_pwd_creation=False) def test_secure_fly_cnet_com(self): self.GoTo("https://secure-fly.cnet.com/uk/user/login/") self.CheckPwdField( "INPUT#login_password[name='login[password]'][type='password']", is_pwd_creation=False) def test_passport_csdn_net(self): self.GoTo("https://passport.csdn.net/account/login") self.CheckPwdField( "INPUT#password[name='password'][type='password'].pass-word", is_pwd_creation=False) def test_signin_ebay_com(self): self.GoTo("https://signin.ebay.com/ws/eBayISAPI.dll?SignIn") self.CheckPwdField("DIV#pri_signin INPUT[type='password']", is_pwd_creation=False) def test_email_163_com(self): self.GoTo("http://email.163.com/") self.CheckPwdField("INPUT#pwdInput[name='password'][type='password']", is_pwd_creation=False) def test_en_softonic_com(self): self.GoTo("http://en.softonic.com/") self.Click("A#header-login-link.header-site-user-btn") # Close overlay. self.Click("A#header-login-link.header-site-user-btn") self.CheckPwdField( "INPUT#password[name='password'][type='password'].field-default", is_pwd_creation=False) def test_id_orange_fr(self): self.GoTo("https://id.orange.fr/auth_user/bin/auth_user.cgi") self.CheckPwdField( "INPUT#default_f_password[name='password'][type='password'].password", is_pwd_creation=False) def test_feedly_com(self): self.GoTo("https://feedly.com/v3/auth/login") self.CheckPwdField("INPUT[name='password'][type='password'].input-bottom", is_pwd_creation=False) def test_gfycat_com(self): self.GoTo("http://gfycat.com/") self.Click("BUTTON.login-btn") self.CheckPwdField("INPUT[type='password']", is_pwd_creation=False) def test_github_com(self): self.GoTo("https://github.com/login") self.CheckPwdField( "INPUT#password[name='password'][type='password'].form-control", is_pwd_creation=False) def test_login_globo_com(self): self.GoTo("https://login.globo.com/login/4728") self.CheckPwdField( "INPUT[id='password'][name='password'][type='password'].password", is_pwd_creation=False) def test_www_gmx_net(self): self.GoTo("http://www.gmx.net/") self.CheckPwdField( "INPUT#inpLoginFreemailPassword[name='password'][type='password']", is_pwd_creation=False) def test_uk_godaddy_com(self): self.GoTo("https://uk.godaddy.com/") self.Click( "DIV[id='sign-in'].pc-menu-item.ux-tray > A.ux-tray-toggle.menu-title") self.CheckPwdField("INPUT[id='password'][name='password'][type='password']", is_pwd_creation=False) def test_id_ifeng_com(self): self.GoTo("https://id.ifeng.com/user/login") self.CheckPwdField( "INPUT#userLogin_pwd[name='userLogin_pwd'][type='password'].txt_270", is_pwd_creation=False) def test_secure_imdb_com(self): self.GoTo("http://www.imdb.com/") self.Click( "LI[id='navUserMenu'].css_nav_menu:nth-child(1) > P.singleLine > " "A[id='nblogin'].cboxElement") self.SwitchTo("IFRAME.cboxIframe") self.Click("A[id='imdb-toggle'].oauth-link") self.CheckPwdField( "INPUT[id='passwordprompt'][name='password'][type='password']", is_pwd_creation=False) def test_imgur_com(self): self.GoTo("https://imgur.com/signin") self.CheckPwdField(" INPUT[name='password'][type='password']", is_pwd_creation=False) def test_secure_indeed_com(self): self.GoTo("https://secure.indeed.com/account/login") self.CheckPwdField( "INPUT#signin_password[name='password'][type='password']" ".input_password", is_pwd_creation=False) def test_www_instagram_com(self): self.GoTo("https://www.instagram.com/") self.Click("A._k6cv7") self.CheckPwdField("INPUT[name='password'][type='password']._kp5f7", is_pwd_creation=False) def test_ssl_kakaku_com(self): self.GoTo("https://ssl.kakaku.com/auth/id/login.asp") self.CheckPwdField( "INPUT#password[name='password'][type='password'].secPwFrm", is_pwd_creation=False) def test_kat_cr(self): self.GoTo("https://kat.cr/") self.Click("DIV.land-login > A.ajaxLink") self.CheckPwdField( "INPUT#field_password[name='password'][type='password'].botmarg5px", is_pwd_creation=False) def test_www_linkedin_com(self): self.GoTo("https://www.linkedin.com/") self.CheckPwdField( "INPUT#login-password[name='session_password'][type='password']", is_pwd_creation=False) def test_login_live_com(self): self.GoTo("https://login.live.com/login.srf") self.CheckPwdField("INPUT[name='passwd'][type='password'].form-control", is_pwd_creation=False) def test_mail_ru(self): self.GoTo("https://mail.ru/") self.CheckPwdField( "INPUT#mailbox__password[name='Password'][type='password']" ".mailbox__password", is_pwd_creation=False) def test_mega_nz(self): self.GoTo("https://mega.nz/") self.Click("A.top-login-button") self.CheckPwdField( "INPUT#login-password[name='login-password'][type='password']", is_pwd_creation=False) def test_member_livedoor_com(self): self.GoTo("https://member.livedoor.com/login/") self.CheckPwdField( "INPUT#password[name='password'][type='password'].password", is_pwd_creation=False) def test_my_outbrain_com(self): self.GoTo("https://my.outbrain.com/") self.CheckPwdField( "INPUT#signin-member-password[name='loginPassword'][type='password']" ".input-text", is_pwd_creation=False) def test_www_naver_com(self): self.GoTo("http://www.naver.com/") self.CheckPwdField("INPUT#pw[name='pw'][type='password']", is_pwd_creation=False) def test_ssl_naver_jp(self): self.GoTo("https://ssl.naver.jp/login") self.CheckPwdField( "INPUT#_passwd[name='password'][type='password'].mdInputTxt03Input", is_pwd_creation=False) def test_www_netflix_com(self): self.GoTo("https://www.netflix.com/gb/") self.Click("DIV.nfHeader > A.authLinks") self.CheckPwdField("INPUT[name='password'][type='password'].ui-text-input", is_pwd_creation=False) def test_passport_bilibili_com(self): self.GoTo("https://passport.bilibili.com/login") self.CheckPwdField("INPUT#passwdTxt[name='pwd'][type='password'].password", is_pwd_creation=False) def test_passport_china_com(self): self.GoTo("http://passport.china.com/") self.CheckPwdField("INPUT[id='55'][name='password'][type='password']", is_pwd_creation=False) def test_www_pinterest_com(self): self.GoTo("https://www.pinterest.com/login/") self.CheckPwdField("INPUT[name='password'][type='password']", is_pwd_creation=False) def test_www_pixnet_net(self): self.GoTo("https://www.pixnet.net/") self.CheckPwdField( "INPUT[id='input-password'][name='password'][type='password']", is_pwd_creation=False) def test_qq_com(self): self.GoTo("http://www.qq.com/") self.Click("A[id='loginGrayLayout'].login") self.SwitchTo("#login_frame") self.CheckPwdField("INPUT[id='p'][name='p'][type='password'].password", is_pwd_creation=False) def test_www_rakuten_co_jp(self): self.GoTo("https://www.rakuten.co.jp/myrakuten/login.html") self.CheckPwdField("INPUT[id='passwd'][name='p'][type='password'].textBox", is_pwd_creation=False) def test_www_reddit_com(self): self.GoTo("https://www.reddit.com/") self.Click("SPAN.user > A.login-required") self.CheckPwdField( "INPUT[id='passwd_login'][name='passwd'][type='password']" ".c-form-control", is_pwd_creation=False) def test_login_sina_com_cn(self): self.GoTo("https://login.sina.com.cn/signup/signin.php") self.CheckPwdField("INPUT[id='password'][name='password'][type='password']", is_pwd_creation=False) def test_login_skype_com(self): self.GoTo("https://login.skype.com/login") self.CheckPwdField("INPUT[id='password'][name='password'][type='password']", is_pwd_creation=False) def test_www_sohu_com(self): self.GoTo("http://www.sohu.com/") self.CheckPwdField("INPUT[name='password'][type='password'].simple-pwd", is_pwd_creation=False) def test_soundcloud_com(self): self.GoTo( "https://soundcloud.com/connect?client_id=02gUJC0hH2ct1EGOcYXQIzRFU91c" "72Ea&response_type=token&scope=non-expiring%20fast-connect%20purchase" "%20upload&display=next&redirect_uri=https%3A//soundcloud.com/" "soundcloud-callback.html") self.CheckPwdField( "INPUT[id='password'][name='password'][type='password'].sc-input", is_pwd_creation=False) def test_ssl_bbc_com(self): self.GoTo("https://ssl.bbc.com/id/signin") self.CheckPwdField( "INPUT[id='bbcid_password'][name='password'][type='password'].password", is_pwd_creation=False) def test_openid_stackexchange_com(self): self.GoTo("https://stackexchange.com/users/login?#log-in") self.SwitchTo("#affiliate-signin-iframe") self.CheckPwdField( "INPUT[id='password'][name='password'][type='password']" ".framed-text-field", is_pwd_creation=False) def test_stackoverflow_com(self): self.GoTo("https://stackoverflow.com/users/login") self.CheckPwdField("INPUT[id='password'][name='password'][type='password']", is_pwd_creation=False) def test_store_steampowered_com(self): self.GoTo("https://store.steampowered.com//login/") self.CheckPwdField( "INPUT[id='input_password'][name='password'][type='password']" ".text_input", is_pwd_creation=False) def test_profile_theguardian_com(self): self.GoTo("https://profile.theguardian.com/signin") self.CheckPwdField( "INPUT[id='signin_field_password'][name='password'][type='password']" ".signin-form__field--password", is_pwd_creation=False) def test_thepiratebay_se(self): self.GoTo("https://thepiratebay.se/login") self.CheckPwdField("INPUT[id='password'][name='password'][type='password']", is_pwd_creation=False) def test_torrentz_eu(self): self.GoTo("http://torrentz.eu/profile") self.CheckPwdField("INPUT[id='lpass'][name='pass'][type='password'].i", is_pwd_creation=False) def test_login_tudou_com(self): self.GoTo("http://login.tudou.com/") self.CheckPwdField( "INPUT[id='password1'][name='password1'][type='password'].password", is_pwd_creation=False) def test_twitter_com(self): self.GoTo("https://twitter.com/") self.CheckPwdField( "INPUT[id='signin-password'][name='session[password]'][type='password']" ".flex-table-input", is_pwd_creation=False) def test_member_udn_com(self): self.GoTo("https://member.udn.com/member/login.jsp") self.CheckPwdField( "INPUT[id='password'][name='password'][type='password'].textfield", is_pwd_creation=False) def test_en_uptodown_com(self): SignInFormsTest.driver.maximize_window() self.GoTo("http://en.uptodown.com/ubuntu") self.Click("A.button > SPAN") self.CheckPwdField("INPUT[id='password'][type='password']", is_pwd_creation=False) def test_vimeo_com(self): self.GoTo("https://vimeo.com/") self.Click("A.js-login_toggle") self.CheckPwdField( "INPUT[id='login_password'][name='password'][type='password']" ".js-login_password", is_pwd_creation=False) def test_web_de(self): self.GoTo("http://web.de/") self.Click("A.icon-freemail") self.CheckPwdField( "INPUT[id='inpFreemailLoginPassword'][name='password']" "[type='password']", is_pwd_creation=False) def test_weibo_com(self): self.GoTo("http://weibo.com/") self.Click("DIV.tab > A:nth-child(2)") self.CheckPwdField("INPUT[name='password'][type='password'].W_input", is_pwd_creation=False) def test_en_wikipedia_org(self): self.GoTo("https://en.wikipedia.org/w/index.php?title=Special:UserLogin") self.CheckPwdField( "INPUT[id='wpPassword1'][name='wpPassword'][type='password']" ".loginPassword", is_pwd_creation=False) def test_www_avito_ru(self): self.GoTo("https://www.avito.ru/profile/login") self.CheckPwdField("INPUT[name='password'][type='password'].password-field", is_pwd_creation=False) def test_www_babytree_com(self): self.GoTo("http://www.babytree.com/reg/login.php") self.CheckPwdField( "INPUT[name='password'][type='password'].login-input-text", is_pwd_creation=False) def test_www_booking_com(self): self.GoTo("http://www.booking.com/") self.Click("li.account_register_option div.sign_in_wrapper") self.CheckPwdField("INPUT[name='password'][type='password']", is_pwd_creation=False) def test_www_buzzfeed_com(self): self.GoTo("http://www.buzzfeed.com/") self.Click( "DIV.page-nav__utilities > DIV[id='nav-signin'].nav-signin > " "DIV[id='usernav-signin'] > A[id='header-signin'].nav-signin-icon") self.CheckPwdField( "INPUT[name='password'][type='password'].js-user-password", is_pwd_creation=False) def test_www_dailymail_co_uk(self): self.GoTo("http://www.dailymail.co.uk/home/index.html") self.Click("A.js-login") self.CheckPwdField( "INPUT[id='reg-lbx-password-lightbox'][name='j_password']" "[type='password']", is_pwd_creation=False) def test_www_deviantart_com(self): self.GoTo("http://www.deviantart.com/") self.Click("TD[id='oh-loginbutton'] > A.oh-touch") self.CheckPwdField( "INPUT[id='login-password'][name='password'][type='password'].itext", is_pwd_creation=False) def test_www_dmm_com(self): self.GoTo("https://www.dmm.com/en/my/-/login/=/path=SgReFg__/") self.CheckPwdField("INPUT[id='password'][name='password'][type='password']", is_pwd_creation=False) def test_www_douyu_com(self): self.GoTo("http://www.douyu.com/") SignInFormsTest.driver.maximize_window() self.Click("A.u-login") self.CheckPwdField("FORM > P > INPUT[name='password'][type='password'].ipt", is_pwd_creation=False) def test_www_dropbox_com(self): self.GoTo("https://www.dropbox.com/") SignInFormsTest.driver.maximize_window() self.Click("A[id='sign-in'].sign-in") self.CheckPwdField("INPUT[name='login_password'][type='password']", is_pwd_creation=False) def test_www_etsy_com(self): self.GoTo("https://www.etsy.com/") self.Click("A[id='sign-in'].signin-header-action") self.CheckPwdField( "INPUT[id='password-existing'][name='password'][type='password'].text", is_pwd_creation=False) def test_www_facebook_com(self): self.GoTo("https://www.facebook.com/") self.CheckPwdField( "INPUT[id='pass'][name='pass'][type='password'].inputtext", is_pwd_creation=False) def test_www_foxnews_com(self): self.GoTo("http://www.foxnews.com/") self.Click("A.login") self.CheckPwdField( "INPUT[id='capture_signIn_traditionalSignIn_password']" "[name='traditionalSignIn_password'][type='password']" ".capture_traditionalSignIn_password", is_pwd_creation=False) def test_www_homedepot_com(self): self.GoTo("http://www.homedepot.com/") self.Click("A[id='headerMyAccount'].headerMyAccount__button") self.Click("A.headerMyAccount__authLink") self.CheckPwdField( "INPUT[id='password'][name='logonPassword'][type='password']" ".width_332px", is_pwd_creation=False) def test_www_livejournal_com(self): self.GoTo("http://www.livejournal.com/") self.Click("A.s-header-item__link--login") self.CheckPwdField( "INPUT[id='lj_loginwidget_password'][name='password'][type='password']" ".b-input", is_pwd_creation=False) def test_www_mediafire_com(self): self.GoTo( "https://www.mediafire.com/templates/login_signup/login_signup.php") self.CheckPwdField( "INPUT[id='widget_login_pass'][name='login_pass'][type='password']", is_pwd_creation=False) def test_www_nytimes_com(self): self.GoTo("http://www.nytimes.com/") self.Click("DIV[id='instl_close'] > A.nytdGrowlNotifyCross" ) # Close overlay. self.Click("BUTTON.button.login-modal-trigger") self.CheckPwdField( "INPUT[id='login-password'][name='password'][type='password']" ".login-password", is_pwd_creation=False) def test_www_popads_net(self): self.GoTo("https://www.popads.net/") self.CheckPwdField( "INPUT[id='UserPassword'][name='data[User][password]']" "[type='password']", is_pwd_creation=False) def test_www_quora_com(self): self.GoTo("https://www.quora.com/") self.CheckPwdField( "INPUT[id*='password'][name='password'][type='password']" ".header_login_text_box", is_pwd_creation=False) def test_www_slideshare_net(self): self.GoTo("https://www.slideshare.net/login") self.CheckPwdField( "INPUT[id='user_password'][name='user_password'][type='password']", is_pwd_creation=False) def test_www_so_com(self): self.GoTo("https://www.so.com/") self.Click("A[id='user-login']") self.CheckPwdField( "INPUT[id*='password'][name='password'][type='password']" ".quc-input-password", is_pwd_creation=False) def test_www_sunmaker_com(self): self.GoTo("https://www.sunmaker.com/de/") self.CheckPwdField("INPUT[name='password'][type='password'].form-control", is_pwd_creation=False) def test_www_tianya_cn(self): self.GoTo("http://www.tianya.cn/") self.CheckPwdField( "INPUT[id='password1'][name='vpassword'][type='password'].text-ipt", is_pwd_creation=False) def test_www_tribunnews_com(self): self.GoTo("http://www.tribunnews.com/") SignInFormsTest.driver.maximize_window() self.Click("A[id='login'].blue") self.CheckPwdField( "FORM[id='logform'].form INPUT[name='password']" "[type='password'].input", is_pwd_creation=False) def test_www_tripadvisor_com(self): self.GoTo("https://www.tripadvisor.com/") self.Click("LI.login:nth-child(4) > SPAN.link.no_cpu") self.SwitchTo("#overlayRegFrame") self.CheckPwdField("INPUT[id='regSignIn.password'][type='password'].text", is_pwd_creation=False) def test_www_walmart_com(self): self.GoTo("https://www.walmart.com/account/login") self.CheckPwdField( "INPUT[id='login-password'][name='login-password'][type='password']" ".js-password", is_pwd_creation=False) def test_www_wittyfeed_com(self): self.GoTo("http://www.wittyfeed.com/") self.Click("A.express") self.CheckPwdField( "DIV.signIn_passwordDiv > " "INPUT[name='password'][type='password'].form-control", is_pwd_creation=False) def test_www_yelp_com(self): self.GoTo("https://www.yelp.com/login") self.CheckPwdField( "FORM[id='ajax-login'].yform > " "INPUT[id='password'][name='password'][type='password']", is_pwd_creation=False) def test_www_zillow_com(self): self.GoTo("https://www.zillow.com") self.Click("A[id='login_opener'].zss-login-link") self.SwitchTo("DIV#login_content > IFRAME") self.CheckPwdField("INPUT[id='password'][name='password'][type='password']", is_pwd_creation=False) def test_yandex_ru(self): self.GoTo("https://yandex.ru/") self.CheckPwdField("INPUT[name='passwd'][type='password'].input__input", is_pwd_creation=False) def test_login_youku_com(self): self.GoTo("http://login.youku.com/user/login_win") self.CheckPwdField( "INPUT[id='password'][name='password'][type='password'].form_input", is_pwd_creation=False) def test_service_zol_com_cn(self): self.GoTo("http://service.zol.com.cn/user/siteLogin.php") self.CheckPwdField("INPUT[id='loginPwd'][type='password']", is_pwd_creation=False) if __name__ == "__main__": unittest.main()
bsd-3-clause
-1,097,490,221,883,458,600
36.047473
80
0.634218
false
Dexhub/MTX
src/arch/x86/isa/insts/simd64/integer/data_reordering/unpack_and_interleave.py
91
4097
# Copyright (c) 2007 The Hewlett-Packard Development Company # All rights reserved. # # The license below extends only to copyright in the software and shall # not be construed as granting a license to any other intellectual # property including but not limited to intellectual property relating # to a hardware implementation of the functionality of the software # licensed hereunder. You may use the software subject to the license # terms below provided that you ensure that this notice is replicated # unmodified and in its entirety in all distributions of the software, # modified or unmodified, in source code or in binary form. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer; # redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution; # neither the name of the copyright holders nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # # Authors: Gabe Black microcode = ''' def macroop PUNPCKLBW_MMX_MMX { unpack mmx, mmx, mmxm, ext=0, size=1 }; def macroop PUNPCKLBW_MMX_M { ldfp ufp1, seg, sib, disp, dataSize=8 unpack mmx, mmx, ufp1, ext=0, size=1 }; def macroop PUNPCKLBW_MMX_P { rdip t7 ldfp ufp1, seg, riprel, disp, dataSize=8 unpack mmx, mmx, ufp1, ext=0, size=1 }; def macroop PUNPCKLWD_MMX_MMX { unpack mmx, mmx, mmxm, ext=0, size=2 }; def macroop PUNPCKLWD_MMX_M { ldfp ufp1, seg, sib, disp, dataSize=8 unpack mmx, mmx, ufp1, ext=0, size=2 }; def macroop PUNPCKLWD_MMX_P { rdip t7 ldfp ufp1, seg, riprel, disp, dataSize=8 unpack mmx, mmx, ufp1, ext=0, size=2 }; def macroop PUNPCKLDQ_MMX_MMX { unpack mmx, mmx, mmxm, ext=0, size=4 }; def macroop PUNPCKLDQ_MMX_M { ldfp ufp1, seg, sib, disp, dataSize=8 unpack mmx, mmx, ufp1, ext=0, size=4 }; def macroop PUNPCKLDQ_MMX_P { rdip t7 ldfp ufp1, seg, riprel, disp, dataSize=8 unpack mmx, mmx, ufp1, ext=0, size=4 }; def macroop PUNPCKHBW_MMX_MMX { unpack mmx, mmx, mmxm, ext=1, size=1 }; def macroop PUNPCKHBW_MMX_M { ldfp ufp1, seg, sib, disp, dataSize=8 unpack mmx, mmx, ufp1, ext=1, size=1 }; def macroop PUNPCKHBW_MMX_P { rdip t7 ldfp ufp1, seg, riprel, disp, dataSize=8 unpack mmx, mmx, ufp1, ext=1, size=1 }; def macroop PUNPCKHWD_MMX_MMX { unpack mmx, mmx, mmxm, ext=1, size=2 }; def macroop PUNPCKHWD_MMX_M { ldfp ufp1, seg, sib, disp, dataSize=8 unpack mmx, mmx, ufp1, ext=1, size=2 }; def macroop PUNPCKHWD_MMX_P { rdip t7 ldfp ufp1, seg, riprel, disp, dataSize=8 unpack mmx, mmx, ufp1, ext=1, size=2 }; def macroop PUNPCKHDQ_MMX_MMX { unpack mmx, mmx, mmxm, ext=1, size=4 }; def macroop PUNPCKHDQ_MMX_M { ldfp ufp1, seg, sib, disp, dataSize=8 unpack mmx, mmx, ufp1, ext=1, size=4 }; def macroop PUNPCKHDQ_MMX_P { rdip t7 ldfp ufp1, seg, riprel, disp, dataSize=8 unpack mmx, mmx, ufp1, ext=1, size=4 }; '''
bsd-3-clause
-3,750,553,566,638,720,500
31.007813
72
0.721504
false
AOSPA-L/android_external_skia
gm/rebaseline_server/compare_rendered_pictures_test.py
67
4021
#!/usr/bin/python """ Copyright 2014 Google Inc. Use of this source code is governed by a BSD-style license that can be found in the LICENSE file. Test compare_rendered_pictures.py TODO(epoger): Create a command to update the expected results (in self._output_dir_expected) when appropriate. For now, you should: 1. examine the results in self._output_dir_actual and make sure they are ok 2. rm -rf self._output_dir_expected 3. mv self._output_dir_actual self._output_dir_expected Although, if you're using an SVN checkout, this will blow away .svn directories within self._output_dir_expected, which wouldn't be good... """ import os import subprocess import sys # Imports from within Skia import base_unittest import compare_rendered_pictures import results import gm_json # must import results first, so that gm_json will be in sys.path class CompareRenderedPicturesTest(base_unittest.TestCase): def test_endToEnd(self): """Generate two sets of SKPs, run render_pictures over both, and compare the results.""" self._generate_skps_and_run_render_pictures( subdir='before_patch', skpdict={ 'changed.skp': 200, 'unchanged.skp': 100, 'only-in-before.skp': 128, }) self._generate_skps_and_run_render_pictures( subdir='after_patch', skpdict={ 'changed.skp': 201, 'unchanged.skp': 100, 'only-in-after.skp': 128, }) results_obj = compare_rendered_pictures.RenderedPicturesComparisons( actuals_root=self._temp_dir, subdirs=('before_patch', 'after_patch'), generated_images_root=self._temp_dir, diff_base_url='/static/generated-images') results_obj.get_timestamp = mock_get_timestamp gm_json.WriteToFile( results_obj.get_packaged_results_of_type( results.KEY__HEADER__RESULTS_ALL), os.path.join(self._output_dir_actual, 'compare_rendered_pictures.json')) def _generate_skps_and_run_render_pictures(self, subdir, skpdict): """Generate SKPs and run render_pictures on them. Args: subdir: subdirectory (within self._temp_dir) to write all files into skpdict: {skpname: redvalue} dictionary describing the SKP files to render """ out_path = os.path.join(self._temp_dir, subdir) os.makedirs(out_path) for skpname, redvalue in skpdict.iteritems(): self._run_skpmaker( output_path=os.path.join(out_path, skpname), red=redvalue) # TODO(epoger): Add --mode tile 256 256 --writeWholeImage to the unittest, # and fix its result! (imageURLs within whole-image entries are wrong when # I tried adding that) binary = self.find_path_to_program('render_pictures') return subprocess.check_output([ binary, '--clone', '1', '--config', '8888', '-r', out_path, '--writeChecksumBasedFilenames', '--writeJsonSummaryPath', os.path.join(out_path, 'summary.json'), '--writePath', out_path]) def _run_skpmaker(self, output_path, red=0, green=0, blue=0, width=640, height=400): """Runs the skpmaker binary to generate SKP with known characteristics. Args: output_path: Filepath to write the SKP into. red: Value of red color channel in image, 0-255. green: Value of green color channel in image, 0-255. blue: Value of blue color channel in image, 0-255. width: Width of canvas to create. height: Height of canvas to create. """ binary = self.find_path_to_program('skpmaker') return subprocess.check_output([ binary, '--red', str(red), '--green', str(green), '--blue', str(blue), '--width', str(width), '--height', str(height), '--writePath', str(output_path)]) def mock_get_timestamp(): """Mock version of BaseComparisons.get_timestamp() for testing.""" return 12345678 def main(): base_unittest.main(CompareRenderedPicturesTest) if __name__ == '__main__': main()
bsd-3-clause
18,097,560,818,985,228
32.508333
80
0.65705
false
quarkonics/zstack-woodpecker
zstackwoodpecker/zstackwoodpecker/operations/deploy_operations.py
1
47036
''' deploy operations for setup zstack database. @author: Youyk ''' import zstackwoodpecker.test_util as test_util import apibinding.api_actions as api_actions import account_operations import resource_operations as res_ops import zstacklib.utils.sizeunit as sizeunit import zstacklib.utils.jsonobject as jsonobject import zstacklib.utils.xmlobject as xmlobject import zstacklib.utils.lock as lock import apibinding.inventory as inventory import sys import traceback import threading import time #global exception information for thread usage exc_info = [] AddKVMHostTimeOut = 10*60*1000 IMAGE_THREAD_LIMIT = 2 DEPLOY_THREAD_LIMIT = 500 def get_first_item_from_list(list_obj, list_obj_name, list_obj_value, action_name): ''' Judge if list is empty. If not, return the 1st item. list_obj: the list for judgment and return; list_obj_name: the list item type name; list_obj_value: the list item's value when do previous query; action_name: which action is calling this function ''' if not isinstance(list_obj, list): raise test_util.TestError("The first parameter is not a [list] type") if not list_obj: raise test_util.TestError("Did not find %s: [%s], when adding %s" % (list_obj_name, list_obj_value, action_name)) if len(list_obj) > 1: raise test_util.TestError("Find more than 1 [%s] resource with name: [%s], when adding %s. Please check your deploy.xml and make sure resource do NOT have duplicated name " % (list_obj_name, list_obj_value, action_name)) return list_obj[0] #Add Backup Storage def add_backup_storage(deployConfig, session_uuid): if xmlobject.has_element(deployConfig, 'backupStorages.sftpBackupStorage'): for bs in xmlobject.safe_list(deployConfig.backupStorages.sftpBackupStorage): action = api_actions.AddSftpBackupStorageAction() action.sessionUuid = session_uuid action.name = bs.name_ action.description = bs.description__ action.url = bs.url_ action.username = bs.username_ action.password = bs.password_ action.hostname = bs.hostname_ action.timeout = AddKVMHostTimeOut #for some platform slowly salt execution action.type = inventory.SFTP_BACKUP_STORAGE_TYPE thread = threading.Thread(target = _thread_for_action, args = (action, )) wait_for_thread_queue() thread.start() if xmlobject.has_element(deployConfig, 'backupStorages.cephBackupStorage'): for bs in xmlobject.safe_list(deployConfig.backupStorages.cephBackupStorage): action = api_actions.AddCephBackupStorageAction() action.sessionUuid = session_uuid action.name = bs.name_ action.description = bs.description__ action.monUrls = bs.monUrls_.split(';') if bs.poolName__: action.poolName = bs.poolName_ action.timeout = AddKVMHostTimeOut #for some platform slowly salt execution action.type = inventory.CEPH_BACKUP_STORAGE_TYPE thread = threading.Thread(target = _thread_for_action, args = (action, )) wait_for_thread_queue() thread.start() if xmlobject.has_element(deployConfig, 'backupStorages.simulatorBackupStorage'): for bs in xmlobject.safe_list(deployConfig.backupStorages.simulatorBackupStorage): action = api_actions.AddSimulatorBackupStorageAction() action.sessionUuid = session_uuid action.name = bs.name_ action.description = bs.description__ action.url = bs.url_ action.type = inventory.SIMULATOR_BACKUP_STORAGE_TYPE action.totalCapacity = sizeunit.get_size(bs.totalCapacity_) action.availableCapacity = sizeunit.get_size(bs.availableCapacity_) thread = threading.Thread(target = _thread_for_action, args = (action, )) wait_for_thread_queue() thread.start() wait_for_thread_done() #Add Zones def add_zone(deployConfig, session_uuid, zone_name = None): def _add_zone(zone, zone_duplication): action = api_actions.CreateZoneAction() action.sessionUuid = session_uuid if zone_duplication == 0: action.name = zone.name_ action.description = zone.description__ else: action.name = generate_dup_name(zone.name_, zone_duplication, 'z') action.description = generate_dup_name(zone.description__, zone_duplication, 'zone') try: evt = action.run() test_util.test_logger(jsonobject.dumps(evt)) zinv = evt.inventory except: exc_info.append(sys.exc_info()) if xmlobject.has_element(zone, 'backupStorageRef'): for ref in xmlobject.safe_list(zone.backupStorageRef): bss = res_ops.get_resource(res_ops.BACKUP_STORAGE, session_uuid, name=ref.text_) bs = get_first_item_from_list(bss, 'Backup Storage', ref.text_, 'attach backup storage to zone') action = api_actions.AttachBackupStorageToZoneAction() action.sessionUuid = session_uuid action.backupStorageUuid = bs.uuid action.zoneUuid = zinv.uuid try: evt = action.run() test_util.test_logger(jsonobject.dumps(evt)) except: exc_info.append(sys.exc_info()) if not xmlobject.has_element(deployConfig, 'zones.zone'): return for zone in xmlobject.safe_list(deployConfig.zones.zone): if zone_name and zone_name != zone.name_: continue if zone.duplication__ == None: duplication = 1 else: duplication = int(zone.duplication__) for i in range(duplication): thread = threading.Thread(target=_add_zone, args=(zone, i, )) wait_for_thread_queue() thread.start() wait_for_thread_done() #Add L2 network def add_l2_network(deployConfig, session_uuid, l2_name = None, zone_name = None): ''' If providing name, it will only add L2 network with the same name. ''' if not xmlobject.has_element(deployConfig, "zones.zone"): return def _deploy_l2_network(zone, is_vlan): if is_vlan: if not xmlobject.has_element(zone, "l2Networks.l2VlanNetwork"): return l2Network = zone.l2Networks.l2VlanNetwork else: if not xmlobject.has_element(zone, \ "l2Networks.l2NoVlanNetwork"): return l2Network = zone.l2Networks.l2NoVlanNetwork if zone.duplication__ == None: zone_dup = 1 else: zone_dup = int(zone.duplication__) for zone_ref in range(zone_dup): zoneName = generate_dup_name(zone.name_, zone_ref, 'z') zinvs = res_ops.get_resource(res_ops.ZONE, session_uuid, name=zoneName) zinv = get_first_item_from_list(zinvs, 'Zone', zoneName, 'L2 network') #can only deal with single cluster duplication case. cluster = xmlobject.safe_list(zone.clusters.cluster)[0] if cluster.duplication__ == None: cluster_duplication = 1 else: cluster_duplication = int(cluster.duplication__) for cluster_ref in range(cluster_duplication): for l2 in xmlobject.safe_list(l2Network): if l2_name and l2_name != l2.name_: continue if not is_vlan or l2.duplication__ == None: l2_dup = 1 else: l2_dup = int(l2.duplication__) for j in range(l2_dup): l2Name = generate_dup_name(\ generate_dup_name(\ generate_dup_name(\ l2.name_, zone_ref, 'z')\ , cluster_ref, 'c')\ , j, 'n') l2Des = generate_dup_name(\ generate_dup_name(\ generate_dup_name(\ l2.description_, zone_ref, 'z')\ , cluster_ref, 'c')\ , j, 'n') if is_vlan: l2_vlan = int(l2.vlan_) + j if is_vlan: action = api_actions.CreateL2VlanNetworkAction() else: action = api_actions.CreateL2NoVlanNetworkAction() action.sessionUuid = session_uuid action.name = l2Name action.description = l2Des action.physicalInterface = l2.physicalInterface_ action.zoneUuid = zinv.uuid if is_vlan: action.vlan = l2_vlan thread = threading.Thread(\ target=_thread_for_action, \ args=(action,)) wait_for_thread_queue() thread.start() for zone in xmlobject.safe_list(deployConfig.zones.zone): if zone_name and zone.name_ != zone_name: continue _deploy_l2_network(zone, False) _deploy_l2_network(zone, True) wait_for_thread_done() #Add Primary Storage def add_primary_storage(deployConfig, session_uuid, ps_name = None, \ zone_name = None): if not xmlobject.has_element(deployConfig, 'zones.zone'): test_util.test_logger('Not find zones.zone in config, skip primary storage deployment') return def _generate_sim_ps_action(zone, pr, zone_ref, cluster_ref): if zone_ref == 0: zone_name = zone.name_ else: zone_name = generate_dup_name(zone.name_, zone_ref, 'z') zinvs = res_ops.get_resource(res_ops.ZONE, session_uuid, name=zone_name) zinv = get_first_item_from_list(zinvs, 'Zone', zone_name, 'primary storage') action = api_actions.AddSimulatorPrimaryStorageAction() action.sessionUuid = session_uuid action.name = generate_dup_name(generate_dup_name(pr.name_, zone_ref, 'z'), cluster_ref, 'c') action.description = generate_dup_name(generate_dup_name(pr.description__, zone_ref, 'zone'), cluster_ref, 'cluster') action.url = generate_dup_name(generate_dup_name(pr.url_, zone_ref, 'z'), cluster_ref, 'c') action.type = inventory.SIMULATOR_PRIMARY_STORAGE_TYPE action.zoneUuid = zinv.uuid action.totalCapacity = sizeunit.get_size(pr.totalCapacity_) action.availableCapacity = sizeunit.get_size(pr.availableCapacity_) return action def _deploy_primary_storage(zone): if xmlobject.has_element(zone, 'primaryStorages.IscsiFileSystemBackendPrimaryStorage'): zinvs = res_ops.get_resource(res_ops.ZONE, session_uuid, \ name=zone.name_) zinv = get_first_item_from_list(zinvs, 'Zone', zone.name_, 'primary storage') for pr in xmlobject.safe_list(zone.primaryStorages.IscsiFileSystemBackendPrimaryStorage): if ps_name and ps_name != pr.name_: continue action = api_actions.AddIscsiFileSystemBackendPrimaryStorageAction() action.sessionUuid = session_uuid action.name = pr.name_ action.description = pr.description__ action.type = inventory.ISCSI_FILE_SYSTEM_BACKEND_PRIMARY_STORAGE_TYPE action.url = pr.url_ action.zoneUuid = zinv.uuid action.chapPassword = pr.chapPassword_ action.chapUsername = pr.chapUsername_ action.sshPassword = pr.sshPassword_ action.sshUsername = pr.sshUsername_ action.hostname = pr.hostname_ action.filesystemType = pr.filesystemType_ thread = threading.Thread(target=_thread_for_action, args=(action,)) wait_for_thread_queue() thread.start() if xmlobject.has_element(zone, 'primaryStorages.localPrimaryStorage'): zinvs = res_ops.get_resource(res_ops.ZONE, session_uuid, \ name=zone.name_) zinv = get_first_item_from_list(zinvs, 'Zone', zone.name_, 'primary storage') for pr in xmlobject.safe_list(zone.primaryStorages.localPrimaryStorage): if ps_name and ps_name != pr.name_: continue action = api_actions.AddLocalPrimaryStorageAction() action.sessionUuid = session_uuid action.name = pr.name_ action.description = pr.description__ action.type = inventory.LOCAL_STORAGE_TYPE action.url = pr.url_ action.zoneUuid = zinv.uuid thread = threading.Thread(target=_thread_for_action, args=(action,)) wait_for_thread_queue() thread.start() if xmlobject.has_element(zone, 'primaryStorages.cephPrimaryStorage'): zinvs = res_ops.get_resource(res_ops.ZONE, session_uuid, \ name=zone.name_) zinv = get_first_item_from_list(zinvs, 'Zone', zone.name_, 'primary storage') for pr in xmlobject.safe_list(zone.primaryStorages.cephPrimaryStorage): if ps_name and ps_name != pr.name_: continue action = api_actions.AddCephPrimaryStorageAction() action.sessionUuid = session_uuid action.name = pr.name_ action.description = pr.description__ action.type = inventory.CEPH_PRIMARY_STORAGE_TYPE action.monUrls = pr.monUrls_.split(';') if pr.dataVolumePoolName__: action.dataVolumePoolName = pr.dataVolumePoolName__ if pr.rootVolumePoolName__: action.rootVolumePoolName = pr.rootVolumePoolName__ if pr.imageCachePoolName__: action.imageCachePoolName = pr.imageCachePoolName__ action.zoneUuid = zinv.uuid thread = threading.Thread(target=_thread_for_action, args=(action,)) wait_for_thread_queue() thread.start() if xmlobject.has_element(zone, 'primaryStorages.nfsPrimaryStorage'): zinvs = res_ops.get_resource(res_ops.ZONE, session_uuid, \ name=zone.name_) zinv = get_first_item_from_list(zinvs, 'Zone', zone.name_, 'primary storage') for pr in xmlobject.safe_list(zone.primaryStorages.nfsPrimaryStorage): if ps_name and ps_name != pr.name_: continue action = api_actions.AddNfsPrimaryStorageAction() action.sessionUuid = session_uuid action.name = pr.name_ action.description = pr.description__ action.type = inventory.NFS_PRIMARY_STORAGE_TYPE action.url = pr.url_ action.zoneUuid = zinv.uuid thread = threading.Thread(target=_thread_for_action, args=(action,)) wait_for_thread_queue() thread.start() if xmlobject.has_element(zone, 'primaryStorages.simulatorPrimaryStorage'): if zone.duplication__ == None: duplication = 1 else: duplication = int(zone.duplication__) for pr in xmlobject.safe_list(zone.primaryStorages.simulatorPrimaryStorage): for zone_ref in range(duplication): for cluster in xmlobject.safe_list(zone.clusters.cluster): for pref in xmlobject.safe_list(cluster.primaryStorageRef): if pref.text_ == pr.name_: if cluster.duplication__ == None: cluster_duplication = 1 else: cluster_duplication = int(cluster.duplication__) for cluster_ref in range(cluster_duplication): action = _generate_sim_ps_action(zone, pr, zone_ref, cluster_ref) thread = threading.Thread(target=_thread_for_action, args=(action,)) wait_for_thread_queue() thread.start() for zone in xmlobject.safe_list(deployConfig.zones.zone): if zone_name and zone.name_ != zone_name: continue _deploy_primary_storage(zone) wait_for_thread_done() #Add Cluster def add_cluster(deployConfig, session_uuid, cluster_name = None, \ zone_name = None): if not xmlobject.has_element(deployConfig, "zones.zone"): return def _add_cluster(action, zone_ref, cluster, cluster_ref): evt = action.run() test_util.test_logger(jsonobject.dumps(evt)) cinv = evt.inventory try: if xmlobject.has_element(cluster, 'primaryStorageRef'): for pref in xmlobject.safe_list(cluster.primaryStorageRef): ps_name = generate_dup_name(generate_dup_name(pref.text_, zone_ref, 'z'), cluster_ref, 'c') pinvs = res_ops.get_resource(res_ops.PRIMARY_STORAGE, session_uuid, name=ps_name) pinv = get_first_item_from_list(pinvs, 'Primary Storage', ps_name, 'Cluster') action_ps = api_actions.AttachPrimaryStorageToClusterAction() action_ps.sessionUuid = session_uuid action_ps.clusterUuid = cinv.uuid action_ps.primaryStorageUuid = pinv.uuid evt = action_ps.run() test_util.test_logger(jsonobject.dumps(evt)) except: exc_info.append(sys.exc_info()) if cluster.allL2NetworkRef__ == 'true': #find all L2 network in zone and attach to cluster cond = res_ops.gen_query_conditions('zoneUuid', '=', \ action.zoneUuid) l2_count = res_ops.query_resource_count(res_ops.L2_NETWORK, \ cond, session_uuid) l2invs = res_ops.query_resource_fields(res_ops.L2_NETWORK, \ [{'name':'zoneUuid', 'op':'=', 'value':action.zoneUuid}], \ session_uuid, ['uuid'], 0, l2_count) else: l2invs = [] if xmlobject.has_element(cluster, 'l2NetworkRef'): for l2ref in xmlobject.safe_list(cluster.l2NetworkRef): l2_name = generate_dup_name(generate_dup_name(l2ref.text_, zone_ref, 'z'), cluster_ref, 'c') cond = res_ops.gen_query_conditions('zoneUuid', '=', \ action.zoneUuid) cond = res_ops.gen_query_conditions('name', '=', l2_name, \ cond) l2inv = res_ops.query_resource_fields(res_ops.L2_NETWORK, \ cond, session_uuid, ['uuid']) if not l2inv: raise test_util.TestError("Can't find l2 network [%s] in database." % l2_name) l2invs.extend(l2inv) for l2inv in l2invs: action = api_actions.AttachL2NetworkToClusterAction() action.sessionUuid = session_uuid action.clusterUuid = cinv.uuid action.l2NetworkUuid = l2inv.uuid thread = threading.Thread(target=_thread_for_action, args=(action,)) wait_for_thread_queue() thread.start() def _deploy_cluster(zone): if not xmlobject.has_element(zone, "clusters.cluster"): return if zone.duplication__ == None: zone_duplication = 1 else: zone_duplication = int(zone.duplication__) for zone_ref in range(zone_duplication): for cluster in xmlobject.safe_list(zone.clusters.cluster): if cluster_name and cluster_name != cluster.name_: continue if cluster.duplication__ == None: cluster_duplication = 1 else: cluster_duplication = int(cluster.duplication__) for cluster_ref in range(cluster_duplication): action = api_actions.CreateClusterAction() action.sessionUuid = session_uuid action.name = generate_dup_name(generate_dup_name(cluster.name_, zone_ref, 'z'), cluster_ref, 'c') action.description = generate_dup_name(generate_dup_name(cluster.description__, zone_ref, 'z'), cluster_ref, 'c') action.hypervisorType = cluster.hypervisorType_ zone_name = generate_dup_name(zone.name_, zone_ref, 'z') zinvs = res_ops.get_resource(res_ops.ZONE, session_uuid, name=zone_name) zinv = get_first_item_from_list(zinvs, 'Zone', zone_name, 'Cluster') action.zoneUuid = zinv.uuid thread = threading.Thread(target=_add_cluster, args=(action, zone_ref, cluster, cluster_ref, )) wait_for_thread_queue() thread.start() for zone in xmlobject.safe_list(deployConfig.zones.zone): if zone_name and zone_name != zone.name_: continue _deploy_cluster(zone) wait_for_thread_done() #Add Host def add_host(deployConfig, session_uuid, host_ip = None, zone_name = None, \ cluster_name = None): ''' Base on an xml deploy config object to add hosts. If providing giving zone_name, cluster_name or host_ip, this function will only add related hosts. ''' if not xmlobject.has_element(deployConfig, "zones.zone"): return def _deploy_host(cluster, zone_ref, cluster_ref): if not xmlobject.has_element(cluster, "hosts.host"): return if zone_ref == 0 and cluster_ref == 0: cluster_name = cluster.name_ else: cluster_name = generate_dup_name(generate_dup_name(cluster.name_, zone_ref, 'z'), cluster_ref, 'c') cinvs = res_ops.get_resource(res_ops.CLUSTER, session_uuid, name=cluster_name) cinv = get_first_item_from_list(cinvs, 'Cluster', cluster_name, 'L3 network') for host in xmlobject.safe_list(cluster.hosts.host): if host_ip and host_ip != host.managementIp_: continue if host.duplication__ == None: host_duplication = 1 else: host_duplication = int(host.duplication__) for i in range(host_duplication): if cluster.hypervisorType_ == inventory.KVM_HYPERVISOR_TYPE: action = api_actions.AddKVMHostAction() action.username = host.username_ action.password = host.password_ action.timeout = AddKVMHostTimeOut elif cluster.hypervisorType_ == inventory.SIMULATOR_HYPERVISOR_TYPE: action = api_actions.AddSimulatorHostAction() action.cpuCapacity = host.cpuCapacity_ action.memoryCapacity = sizeunit.get_size(host.memoryCapacity_) action.sessionUuid = session_uuid action.clusterUuid = cinv.uuid action.hostTags = host.hostTags__ if zone_ref == 0 and cluster_ref == 0 and i == 0: action.name = host.name_ action.description = host.description__ action.managementIp = host.managementIp_ else: action.name = generate_dup_name(generate_dup_name(generate_dup_name(host.name_, zone_ref, 'z'), cluster_ref, 'c'), i, 'h') action.description = generate_dup_name(generate_dup_name(generate_dup_name(host.description__, zone_ref, 'z'), cluster_ref, 'c'), i, 'h') action.managementIp = generate_dup_host_ip(host.managementIp_, zone_ref, cluster_ref, i) thread = threading.Thread(target=_thread_for_action, args = (action, )) wait_for_thread_queue() thread.start() for zone in xmlobject.safe_list(deployConfig.zones.zone): if zone_name and zone_name != zone.name_: continue if not xmlobject.has_element(zone, 'clusters.cluster'): continue if zone.duplication__ == None: zone_duplication = 1 else: zone_duplication = int(zone.duplication__) for zone_ref in range(zone_duplication): for cluster in xmlobject.safe_list(zone.clusters.cluster): if cluster_name and cluster_name != cluster.name_: continue if cluster.duplication__ == None: cluster_duplication = 1 else: cluster_duplication = int(cluster.duplication__) for cluster_ref in range(cluster_duplication): _deploy_host(cluster, zone_ref, cluster_ref) wait_for_thread_done() test_util.test_logger('All add KVM host actions are done.') #Add L3 network def add_l3_network(deployConfig, session_uuid, l3_name = None, l2_name = None, \ zone_name = None): ''' add_l3_network will add L3 network and also add related DNS, IpRange and network services. ''' if not xmlobject.has_element(deployConfig, "zones.zone"): return def _deploy_l3_network(l2, zone_ref, cluster_ref): if not xmlobject.has_element(l2, "l3Networks.l3BasicNetwork"): return if not l2.duplication__: l2_dup = 1 else: l2_dup = int(l2.duplication__) for l2_num in range(l2_dup): for l3 in xmlobject.safe_list(l2.l3Networks.l3BasicNetwork): if l3_name and l3_name != l3.name_: continue l2Name = generate_dup_name(generate_dup_name(generate_dup_name(l2.name_, zone_ref, 'z'), cluster_ref, 'c'), l2_num, 'n') l3Name = generate_dup_name(generate_dup_name(generate_dup_name(l3.name_, zone_ref, 'z'), cluster_ref, 'c'), l2_num, 'n') l2invs = res_ops.get_resource(res_ops.L2_NETWORK, \ session_uuid, \ name=l2Name) l2inv = get_first_item_from_list(l2invs, \ 'L2 Network', l2Name, 'L3 Network') thread = threading.Thread(target=_do_l3_deploy, \ args=(l3, l2inv.uuid, l3Name, session_uuid, )) wait_for_thread_queue() thread.start() def _do_l3_deploy(l3, l2inv_uuid, l3Name, session_uuid): action = api_actions.CreateL3NetworkAction() action.sessionUuid = session_uuid action.description = l3.description__ if l3.system__ and l3.system__ != 'False': action.system = 'true' action.l2NetworkUuid = l2inv_uuid action.name = l3Name action.type = inventory.L3_BASIC_NETWORK_TYPE if l3.domain_name__: action.dnsDomain = l3.domain_name__ try: evt = action.run() except: exc_info.append(sys.exc_info()) test_util.test_logger(jsonobject.dumps(evt)) l3_inv = evt.inventory #add dns if xmlobject.has_element(l3, 'dns'): for dns in xmlobject.safe_list(l3.dns): action = api_actions.AddDnsToL3NetworkAction() action.sessionUuid = session_uuid action.dns = dns.text_ action.l3NetworkUuid = l3_inv.uuid try: evt = action.run() except: exc_info.append(sys.exc_info()) test_util.test_logger(jsonobject.dumps(evt)) #add ip range. if xmlobject.has_element(l3, 'ipRange'): do_add_ip_range(l3.ipRange, l3_inv.uuid, session_uuid) #add network service. providers = {} action = api_actions.QueryNetworkServiceProviderAction() action.sessionUuid = session_uuid action.conditions = [] try: reply = action.run() except: exc_info.append(sys.exc_info()) for pinv in reply: providers[pinv.name] = pinv.uuid if xmlobject.has_element(l3, 'networkService'): do_add_network_service(l3.networkService, l3_inv.uuid, \ providers, session_uuid) for zone in xmlobject.safe_list(deployConfig.zones.zone): if zone_name and zone_name != zone.name_: continue l2networks = [] if xmlobject.has_element(zone, 'l2Networks.l2NoVlanNetwork'): l2networks.extend(xmlobject.safe_list(zone.l2Networks.l2NoVlanNetwork)) if xmlobject.has_element(zone, 'l2Networks.l2VlanNetwork'): l2networks.extend(xmlobject.safe_list(zone.l2Networks.l2VlanNetwork)) for l2 in l2networks: if l2_name and l2_name != l2.name_: continue if zone.duplication__ == None: duplication = 1 else: duplication = int(zone.duplication__) for zone_ref in range(duplication): for cluster in xmlobject.safe_list(zone.clusters.cluster): if cluster.duplication__ == None: cluster_duplication = 1 else: cluster_duplication = int(cluster.duplication__) for cluster_ref in range(cluster_duplication): if zone_ref == 1 and cluster_ref == 1: zone_ref = 0 cluster_ref = 0 _deploy_l3_network(l2, zone_ref, cluster_ref) wait_for_thread_done() test_util.test_logger('All add L3 Network actions are done.') #Add Iprange def add_ip_range(deployConfig, session_uuid, ip_range_name = None, \ zone_name= None, l3_name = None): ''' Call by only adding an IP range. If the IP range is in L3 config, add_l3_network will add ip range direclty. deployConfig is a xmlobject. If using standard net_operation, please check net_operations.add_ip_range(test_util.IpRangeOption()) ''' if not xmlobject.has_element(deployConfig, "zones.zone"): return l3networks = [] for zone in xmlobject.safe_list(deployConfig.zones.zone): if zone_name and zone_name != zone.name_: continue l2networks = [] if xmlobject.has_element(zone, 'l2Networks.l2NoVlanNetwork'): l2networks.extend(xmlobject.safe_list(zone.l2Networks.l2NoVlanNetwork)) if xmlobject.has_element(zone, 'l2Networks.l2VlanNetwork'): l2networks.extend(xmlobject.safe_list(zone.l2Networks.l2VlanNetwork)) for l2 in l2networks: if xmlobject.has_element(l2, 'l3Networks.l3BasicNetwork'): l3networks.extend(xmlobject.safe_list(l2.l3Networks.l3BasicNetwork)) if zone.duplication__ == None: duplication = 1 else: duplication = int(zone.duplication__) for zone_duplication in range(duplication): for l3 in l3networks: if l3_name and l3_name != l3.name_: continue if not xmlobject.has_element(l3, 'ipRange'): continue if zone_duplication == 0: l3Name = l3.name_ else: l3Name = generate_dup_name(l3.name_, zone_duplication, 'z') l3_invs = res_ops.get_resource(res_ops.L3_NETWORK, session_uuid, name = l3Name) l3_inv = get_first_item_from_list(l3_invs, 'L3 Network', l3Name, 'IP range') do_add_ip_range(l3.ipRange, l3_inv.uuid, session_uuid, \ ip_range_name) def do_add_ip_range(ip_range_xml_obj, l3_uuid, session_uuid, \ ip_range_name = None): for ir in xmlobject.safe_list(ip_range_xml_obj): if ip_range_name and ip_range_name != ir.name_: continue action = api_actions.AddIpRangeAction() action.sessionUuid = session_uuid action.description = ir.description__ action.endIp = ir.endIp_ action.gateway = ir.gateway_ action.l3NetworkUuid = l3_uuid action.name = ir.name_ action.netmask = ir.netmask_ action.startIp = ir.startIp_ try: evt = action.run() except Exception as e: exc_info.append(sys.exc_info()) raise e test_util.test_logger(jsonobject.dumps(evt)) #Add Network Service def add_network_service(deployConfig, session_uuid): if not xmlobject.has_element(deployConfig, "zones.zone"): return l3networks = [] for zone in xmlobject.safe_list(deployConfig.zones.zone): l2networks = [] if xmlobject.has_element(zone, 'l2Networks.l2NoVlanNetwork'): l2networks.extend(xmlobject.safe_list(zone.l2Networks.l2NoVlanNetwork)) if xmlobject.has_element(zone, 'l2Networks.l2VlanNetwork'): l2networks.extend(xmlobject.safe_list(zone.l2Networks.l2VlanNetwork)) for l2 in l2networks: if xmlobject.has_element(l2, 'l3Networks.l3BasicNetwork'): l3networks.extend(xmlobject.safe_list(l2.l3Networks.l3BasicNetwork)) providers = {} action = api_actions.QueryNetworkServiceProviderAction() action.sessionUuid = session_uuid action.conditions = [] try: reply = action.run() except Exception as e: exc_info.append(sys.exc_info()) raise e for pinv in reply: providers[pinv.name] = pinv.uuid if zone.duplication__ == None: duplication = 1 else: duplication = int(zone.duplication__) for zone_duplication in range(duplication): for l3 in l3networks: if not xmlobject.has_element(l3, 'networkService'): continue if zone_duplication == 0: l3_name = l3.name_ else: l3_name = generate_dup_name(l3.name_, zone_duplication, 'z') l3_invs = res_ops.get_resource(res_ops.L3_NETWORK, session_uuid, name = l3_name) l3_inv = get_first_item_from_list(l3_invs, 'L3 Network', l3_name, 'Network Service') do_add_network_service(l3.networkService, l3_inv.uuid, \ providers, session_uuid) def do_add_network_service(net_service_xml_obj, l3_uuid, providers, \ session_uuid): allservices = {} for ns in xmlobject.safe_list(net_service_xml_obj): puuid = providers.get(ns.provider_) if not puuid: raise test_util.TestError('cannot find network service provider[%s], it may not have been added' % ns.provider_) servs = [] for nst in xmlobject.safe_list(ns.serviceType): servs.append(nst.text_) allservices[puuid] = servs action = api_actions.AttachNetworkServiceToL3NetworkAction() action.sessionUuid = session_uuid action.l3NetworkUuid = l3_uuid action.networkServices = allservices try: evt = action.run() except Exception as e: exc_info.append(sys.exc_info()) raise e test_util.test_logger(jsonobject.dumps(evt)) #Add Image def add_image(deployConfig, session_uuid): def _add_image(action): increase_image_thread() try: evt = action.run() test_util.test_logger(jsonobject.dumps(evt)) except: exc_info.append(sys.exc_info()) finally: decrease_image_thread() if not xmlobject.has_element(deployConfig, 'images.image'): return for i in xmlobject.safe_list(deployConfig.images.image): for bsref in xmlobject.safe_list(i.backupStorageRef): bss = res_ops.get_resource(res_ops.BACKUP_STORAGE, session_uuid, name=bsref.text_) bs = get_first_item_from_list(bss, 'backup storage', bsref.text_, 'image') action = api_actions.AddImageAction() action.sessionUuid = session_uuid #TODO: account uuid will be removed later. action.accountUuid = inventory.INITIAL_SYSTEM_ADMIN_UUID action.backupStorageUuids = [bs.uuid] action.bits = i.bits__ if not action.bits: action.bits = 64 action.description = i.description__ action.format = i.format_ action.mediaType = i.mediaType_ action.guestOsType = i.guestOsType__ if not action.guestOsType: action.guestOsType = 'unknown' action.hypervisorType = i.hypervisorType__ action.name = i.name_ action.url = i.url_ action.timeout = 1800000 thread = threading.Thread(target = _add_image, args = (action, )) print 'before add image1: %s' % i.url_ wait_for_image_thread_queue() print 'before add image2: %s' % i.url_ thread.start() print 'add image: %s' % i.url_ print 'all images add command are executed' wait_for_thread_done(True) print 'all images have been added' #Add Disk Offering def add_disk_offering(deployConfig, session_uuid): def _add_disk_offering(disk_offering_xml_obj, session_uuid): action = api_actions.CreateDiskOfferingAction() action.sessionUuid = session_uuid action.name = disk_offering_xml_obj.name_ action.description = disk_offering_xml_obj.description_ action.diskSize = sizeunit.get_size(disk_offering_xml_obj.diskSize_) try: evt = action.run() test_util.test_logger(jsonobject.dumps(evt)) except: exc_info.append(sys.exc_info()) if not xmlobject.has_element(deployConfig, 'diskOfferings.diskOffering'): return for disk_offering_xml_obj in \ xmlobject.safe_list(deployConfig.diskOfferings.diskOffering): thread = threading.Thread(target = _add_disk_offering, \ args = (disk_offering_xml_obj, session_uuid)) wait_for_thread_queue() thread.start() wait_for_thread_done() #Add Instance Offering def add_instance_offering(deployConfig, session_uuid): def _add_io(instance_offering_xml_obj, session_uuid): action = api_actions.CreateInstanceOfferingAction() action.sessionUuid = session_uuid action.name = instance_offering_xml_obj.name_ action.description = instance_offering_xml_obj.description__ action.cpuNum = instance_offering_xml_obj.cpuNum_ action.cpuSpeed = instance_offering_xml_obj.cpuSpeed_ if instance_offering_xml_obj.memorySize__: action.memorySize = sizeunit.get_size(instance_offering_xml_obj.memorySize_) elif instance_offering_xml_obj.memoryCapacity_: action.memorySize = sizeunit.get_size(instance_offering_xml_obj.memoryCapacity_) try: evt = action.run() test_util.test_logger(jsonobject.dumps(evt)) except: exc_info.append(sys.exc_info()) if not xmlobject.has_element(deployConfig, \ 'instanceOfferings.instanceOffering'): return for instance_offering_xml_obj in \ xmlobject.safe_list(deployConfig.instanceOfferings.instanceOffering): thread = threading.Thread(target = _add_io, \ args = (instance_offering_xml_obj, session_uuid, )) wait_for_thread_queue() thread.start() wait_for_thread_done() #Add VM -- Pass def _thread_for_action(action): try: evt = action.run() test_util.test_logger(jsonobject.dumps(evt)) except: exc_info.append(sys.exc_info()) #Add Virtual Router Offering def add_virtual_router(deployConfig, session_uuid, l3_name = None, \ zone_name = None): if not xmlobject.has_element(deployConfig, 'instanceOfferings.virtualRouterOffering'): return for i in xmlobject.safe_list(deployConfig.instanceOfferings.virtualRouterOffering): if l3_name and l3_name != i.managementL3NetworkRef.text_: continue if zone_name and zone_name != i.zoneRef.text_: continue print "continue l3_name: %s; zone_name: %s" % (l3_name, zone_name) action = api_actions.CreateVirtualRouterOfferingAction() action.sessionUuid = session_uuid action.name = i.name_ action.description = i.description__ action.cpuNum = i.cpuNum_ action.cpuSpeed = i.cpuSpeed_ if i.memorySize__: action.memorySize = sizeunit.get_size(i.memorySize_) elif i.memoryCapacity_: action.memorySize = sizeunit.get_size(i.memoryCapacity_) action.isDefault = i.isDefault__ action.type = 'VirtualRouter' zinvs = res_ops.get_resource(res_ops.ZONE, session_uuid, name=i.zoneRef.text_) zinv = get_first_item_from_list(zinvs, 'zone', i.zoneRef.text_, 'virtual router offering') action.zoneUuid = zinv.uuid cond = res_ops.gen_query_conditions('zoneUuid', '=', zinv.uuid) cond1 = res_ops.gen_query_conditions('name', '=', \ i.managementL3NetworkRef.text_, cond) minvs = res_ops.query_resource(res_ops.L3_NETWORK, cond1, \ session_uuid) minv = get_first_item_from_list(minvs, 'Management L3 Network', i.managementL3NetworkRef.text_, 'virtualRouterOffering') action.managementNetworkUuid = minv.uuid if xmlobject.has_element(i, 'publicL3NetworkRef'): cond1 = res_ops.gen_query_conditions('name', '=', \ i.publicL3NetworkRef.text_, cond) pinvs = res_ops.query_resource(res_ops.L3_NETWORK, cond1, \ session_uuid) pinv = get_first_item_from_list(pinvs, 'Public L3 Network', i.publicL3NetworkRef.text_, 'virtualRouterOffering') action.publicNetworkUuid = pinv.uuid iinvs = res_ops.get_resource(res_ops.IMAGE, session_uuid, \ name=i.imageRef.text_) iinv = get_first_item_from_list(iinvs, 'Image', i.imageRef.text_, 'virtualRouterOffering') action.imageUuid = iinv.uuid thread = threading.Thread(target = _thread_for_action, args = (action, )) wait_for_thread_queue() thread.start() wait_for_thread_done() def deploy_initial_database(deploy_config): operations = [ add_backup_storage, add_zone, add_l2_network, add_primary_storage, add_cluster, add_host, add_l3_network, add_image, add_disk_offering, add_instance_offering, add_virtual_router ] for operation in operations: session_uuid = account_operations.login_as_admin() try: operation(deploy_config, session_uuid) except Exception as e: test_util.test_logger('[Error] zstack deployment meets exception when doing: %s . The real exception are:.' % operation.__name__) print('----------------------Exception Reason------------------------') traceback.print_exc(file=sys.stdout) print('-------------------------Reason End---------------------------\n') raise e finally: account_operations.logout(session_uuid) test_util.test_logger('[Done] zstack initial database was created successfully.') def generate_dup_name(origin_name, num, prefix=None): if num == 0: return origin_name if prefix: return str(origin_name) + '-' + str(prefix) + str(num) else: return str(origin_name) + '-' + str(num) def generate_dup_host_ip(origin_ip, zone_ref, cluster_ref, host_ref): ip_fields = origin_ip.split('.') ip_fields[1] = str(int(ip_fields[1]) + zone_ref) ip_fields[2] = str(int(ip_fields[2]) + cluster_ref) ip_fields[3] = str(int(ip_fields[3]) + host_ref) return '.'.join(ip_fields) image_thread_queue = 0 @lock.lock('image_thread') def increase_image_thread(): global image_thread_queue image_thread_queue += 1 @lock.lock('image_thread') def decrease_image_thread(): global image_thread_queue image_thread_queue -= 1 def wait_for_image_thread_queue(): while image_thread_queue >= IMAGE_THREAD_LIMIT: time.sleep(1) print 'image_thread_queue: %d' % image_thread_queue def wait_for_thread_queue(): while threading.active_count() > DEPLOY_THREAD_LIMIT: check_thread_exception() time.sleep(1) def cleanup_exc_info(): exc_info = [] def check_thread_exception(): if exc_info: info1 = exc_info[0][1] info2 = exc_info[0][2] cleanup_exc_info() raise info1, None, info2 def wait_for_thread_done(report = False): while threading.active_count() > 1: check_thread_exception() time.sleep(1) if report: print 'thread count: %d' % threading.active_count() check_thread_exception()
apache-2.0
-89,130,814,888,051,550
39.698582
228
0.566268
false
nooperpudd/pulsar
examples/httpbin/manage.py
5
12153
'''Pulsar HTTP test application:: python manage.py Implementation ====================== .. autoclass:: HttpBin :members: :member-order: bysource Server Hooks =================== This example shows how to use :ref:`server hooks <setting-section-application-hooks>` to log each request .. automodule:: examples.httpbin.config :members: ''' import os import sys import string import mimetypes from itertools import repeat, chain from random import random from pulsar import HttpRedirect, HttpException, version, JAPANESE, CHINESE from pulsar.utils.httpurl import (Headers, ENCODE_URL_METHODS, ENCODE_BODY_METHODS) from pulsar.utils.html import escape from pulsar.apps import wsgi, ws from pulsar.apps.wsgi import (route, Html, Json, HtmlDocument, GZipMiddleware, AsyncString) from pulsar.utils.structures import MultiValueDict from pulsar.utils.system import json METHODS = frozenset(chain((m.lower() for m in ENCODE_URL_METHODS), (m.lower() for m in ENCODE_BODY_METHODS))) pyversion = '.'.join(map(str, sys.version_info[:3])) ASSET_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'assets') FAVICON = os.path.join(ASSET_DIR, 'favicon.ico') characters = string.ascii_letters + string.digits def asset(name, mode='r', chunk_size=None): name = os.path.join(ASSET_DIR, name) if os.path.isfile(name): content_type, encoding = mimetypes.guess_type(name) if chunk_size: def _chunks(): with open(name, mode) as file: while True: data = file.read(chunk_size) if not data: break yield data data = _chunks() else: with open(name, mode) as file: data = file.read() return data, content_type, encoding class BaseRouter(wsgi.Router): ######################################################################## # INTERNALS def bind_server_event(self, request, event, handler): consumer = request.environ['pulsar.connection'].current_consumer() consumer.bind_event(event, handler) def info_data_response(self, request, **params): data = self.info_data(request, **params) return Json(data).http_response(request) def info_data(self, request, **params): headers = self.getheaders(request) data = {'method': request.method, 'headers': headers, 'pulsar': self.pulsar_info(request)} if request.method in ENCODE_URL_METHODS: data['args'] = dict(request.url_data) else: args, files = request.data_and_files() jfiles = MultiValueDict() for name, parts in files.lists(): for part in parts: try: part = part.string() except UnicodeError: part = part.base64() jfiles[name] = part data.update((('args', dict(args)), ('files', dict(jfiles)))) data.update(params) return data def getheaders(self, request): headers = Headers(kind='client') for k in request.environ: if k.startswith('HTTP_'): headers[k[5:].replace('_', '-')] = request.environ[k] return dict(headers) def pulsar_info(self, request): return request.get('pulsar.connection').info() class HttpBin(BaseRouter): '''The main :class:`.Router` for the HttpBin application ''' def get(self, request): '''The home page of this router''' ul = Html('ul') for router in sorted(self.routes, key=lambda r: r.creation_count): a = router.link(escape(router.route.path)) a.addClass(router.name) for method in METHODS: if router.getparam(method): a.addClass(method) li = Html('li', a, ' %s' % router.getparam('title', '')) ul.append(li) title = 'Pulsar' html = request.html_document html.head.title = title html.head.links.append('httpbin.css') html.head.links.append('favicon.ico', rel="icon", type='image/x-icon') html.head.scripts.append('httpbin.js') ul = ul.render(request) templ, _, _ = asset('template.html') body = templ % (title, JAPANESE, CHINESE, version, pyversion, ul) html.body.append(body) return html.http_response(request) @route(title='Returns GET data') def get_get(self, request): return self.info_data_response(request) @route(title='Returns POST data') def post_post(self, request): return self.info_data_response(request) @route(title='Returns PATCH data') def patch_patch(self, request): return self.info_data_response(request) @route(title='Returns PUT data') def put_put(self, request): return self.info_data_response(request) @route(title='Returns DELETE data') def delete_delete(self, request): return self.info_data_response(request) @route('redirect/<int(min=1,max=10):times>', defaults={'times': 5}, title='302 Redirect n times') def redirect(self, request): num = request.urlargs['times'] - 1 if num: raise HttpRedirect('/redirect/%s' % num) else: raise HttpRedirect('/get') @route('getsize/<int(min=1,max=8388608):size>', defaults={'size': 150000}, title='Returns a preset size of data (limit at 8MB)') def getsize(self, request): size = request.urlargs['size'] data = {'size': size, 'data': 'd' * size} return self.info_data_response(request, **data) @route(title='Returns gzip encoded data') def gzip(self, request): response = self.info_data_response(request, gzipped=True) return GZipMiddleware(10)(request.environ, response) @route(title='Returns cookie data') def cookies(self, request): cookies = request.cookies d = dict(((c.key, c.value) for c in cookies.values())) return Json({'cookies': d}).http_response(request) @route('cookies/set/<name>/<value>', title='Sets a simple cookie', defaults={'name': 'package', 'value': 'pulsar'}) def request_cookies_set(self, request): key = request.urlargs['name'] value = request.urlargs['value'] request.response.set_cookie(key, value=value) request.response.status_code = 302 request.response.headers['location'] = '/cookies' return request.response @route('status/<int(min=100,max=505):status>', title='Returns given HTTP Status code', defaults={'status': 418}) def status(self, request): request.response.content_type = 'text/html' raise HttpException(status=request.urlargs['status']) @route(title='Returns response headers') def response_headers(self, request): class Gen: headers = None def __call__(self, server, **kw): self.headers = server.headers def generate(self): # yield a byte so that headers are sent yield b'' # we must have the headers now yield json.dumps(dict(self.headers)) gen = Gen() self.bind_server_event(request, 'on_headers', gen) request.response.content = gen.generate() request.response.content_type = 'application/json' return request.response @route('basic-auth/<username>/<password>', title='Challenges HTTPBasic Auth', defaults={'username': 'username', 'password': 'password'}) def challenge_auth(self, request): auth = request.get('http.authorization') if auth and auth.authenticated(request.environ, **request.urlargs): return Json({'authenticated': True, 'username': auth.username}).http_response(request) raise wsgi.HttpAuthenticate('basic') @route('digest-auth/<username>/<password>/<qop>', title='Challenges HTTP Digest Auth', defaults={'username': 'username', 'password': 'password', 'qop': 'auth'}) def challenge_digest_auth(self, request): auth = request.get('http.authorization') if auth and auth.authenticated(request.environ, **request.urlargs): return Json({'authenticated': True, 'username': auth.username}).http_response(request) raise wsgi.HttpAuthenticate('digest', qop=[request.urlargs['qop']]) @route('stream/<int(min=1):m>/<int(min=1):n>', title='Stream m chunk of data n times', defaults={'m': 300, 'n': 20}) def request_stream(self, request): m = request.urlargs['m'] n = request.urlargs['n'] request.response.content_type = 'text/plain' request.response.content = repeat(b'a' * m, n) return request.response @route(title='A web socket graph') def websocket(self, request): data = open(os.path.join(os.path.dirname(__file__), 'assets', 'websocket.html')).read() scheme = 'wss' if request.is_secure else 'ws' host = request.get('HTTP_HOST') data = data % {'address': '%s://%s/graph-data' % (scheme, host)} request.response.content_type = 'text/html' request.response.content = data return request.response @route(title='Live server statistics') def stats(self, request): '''Live stats for the server. Try sending lots of requests ''' # scheme = 'wss' if request.is_secure else 'ws' # host = request.get('HTTP_HOST') # address = '%s://%s/stats' % (scheme, host) doc = HtmlDocument(title='Live server stats', media_path='/assets/') # docs.head.scripts return doc.http_response(request) @route('clip/<int(min=256,max=16777216):chunk_size>', defaults={'chunk_size': 4096}, title='Show a video clip') def clip(self, request): c = request.urlargs['chunk_size'] data, ct, encoding = asset('clip.mp4', 'rb', chunk_size=c) response = request.response response.content_type = ct response.encoding = encoding response.content = data return response ######################################################################## # BENCHMARK ROUTES @route() def json(self, request): return Json({'message': "Hello, World!"}).http_response(request) @route() def plaintext(self, request): return AsyncString('Hello, World!').http_response(request) class ExpectFail(BaseRouter): def post(self, request): chunk = request.get('wsgi.input') if not chunk.done(): chunk.fail() else: return self.info_data_response(request) class Graph(ws.WS): def on_message(self, websocket, msg): websocket.write(json.dumps([(i, random()) for i in range(100)])) class Site(wsgi.LazyWsgi): def setup(self, environ): router = HttpBin('/') return wsgi.WsgiHandler([ExpectFail('expect'), wsgi.wait_for_body_middleware, wsgi.clean_path_middleware, wsgi.authorization_middleware, wsgi.MediaRouter('media', ASSET_DIR, show_indexes=True), ws.WebSocket('/graph-data', Graph()), router], async=True) def server(description=None, **kwargs): description = description or 'Pulsar HttpBin' return wsgi.WSGIServer(Site(), description=description, **kwargs) if __name__ == '__main__': # pragma nocover server().start()
bsd-3-clause
-6,857,042,363,070,801,000
35.277612
78
0.570888
false
s20121035/rk3288_android5.1_repo
external/lldb/test/functionalities/inferior-changed/TestInferiorChanged.py
2
3689
"""Test lldb reloads the inferior after it was changed during the session.""" import os, time import unittest2 import lldb from lldbtest import * import lldbutil class ChangedInferiorTestCase(TestBase): mydir = os.path.join("functionalities", "inferior-changed") @unittest2.skipUnless(sys.platform.startswith("darwin"), "requires Darwin") def test_inferior_crashing_dsym(self): """Test lldb reloads the inferior after it was changed during the session.""" self.buildDsym() self.inferior_crashing() self.cleanup() d = {'C_SOURCES': 'main2.c'} self.buildDsym(dictionary=d) self.setTearDownCleanup(dictionary=d) self.inferior_not_crashing() def test_inferior_crashing_dwarf(self): """Test lldb reloads the inferior after it was changed during the session.""" self.buildDwarf() self.inferior_crashing() self.cleanup() # lldb needs to recognize the inferior has changed. If lldb needs to check the # new module timestamp, make sure it is not the same as the old one, so add a # 1 second delay. time.sleep(1) d = {'C_SOURCES': 'main2.c'} self.buildDwarf(dictionary=d) self.setTearDownCleanup(dictionary=d) self.inferior_not_crashing() def setUp(self): # Call super's setUp(). TestBase.setUp(self) # Find the line number of the crash. self.line1 = line_number('main.c', '// Crash here.') self.line2 = line_number('main2.c', '// Not crash here.') def inferior_crashing(self): """Inferior crashes upon launching; lldb should catch the event and stop.""" exe = os.path.join(os.getcwd(), "a.out") self.runCmd("file " + exe, CURRENT_EXECUTABLE_SET) self.runCmd("run", RUN_SUCCEEDED) if sys.platform.startswith("darwin"): stop_reason = 'stop reason = EXC_BAD_ACCESS' else: stop_reason = 'stop reason = invalid address' # The stop reason of the thread should be a bad access exception. self.expect("thread list", STOPPED_DUE_TO_EXC_BAD_ACCESS, substrs = ['stopped', stop_reason]) # And it should report the correct line number. self.expect("thread backtrace all", substrs = [stop_reason, 'main.c:%d' % self.line1]) def inferior_not_crashing(self): """Test lldb reloads the inferior after it was changed during the session.""" self.runCmd("process kill") self.runCmd("run", RUN_SUCCEEDED) self.runCmd("process status") if sys.platform.startswith("darwin"): stop_reason = 'EXC_BAD_ACCESS' else: stop_reason = 'invalid address' if stop_reason in self.res.GetOutput(): self.fail("Inferior changed, but lldb did not perform a reload") # Break inside the main. lldbutil.run_break_set_by_file_and_line (self, "main2.c", self.line2, num_expected_locations=1, loc_exact=True) self.runCmd("run", RUN_SUCCEEDED) # The stop reason of the thread should be breakpoint. self.expect("thread list", STOPPED_DUE_TO_BREAKPOINT, substrs = ['stopped', 'stop reason = breakpoint']) self.runCmd("frame variable int_ptr") self.expect("frame variable *int_ptr", substrs = ['= 7']) self.expect("expression *int_ptr", substrs = ['= 7']) if __name__ == '__main__': import atexit lldb.SBDebugger.Initialize() atexit.register(lambda: lldb.SBDebugger.Terminate()) unittest2.main()
gpl-3.0
-7,694,408,415,580,739,000
35.166667
119
0.611277
false
EliotBerriot/django
django/utils/inspect.py
323
4195
from __future__ import absolute_import import inspect from django.utils import six def getargspec(func): if six.PY2: return inspect.getargspec(func) sig = inspect.signature(func) args = [ p.name for p in sig.parameters.values() if p.kind == inspect.Parameter.POSITIONAL_OR_KEYWORD ] varargs = [ p.name for p in sig.parameters.values() if p.kind == inspect.Parameter.VAR_POSITIONAL ] varargs = varargs[0] if varargs else None varkw = [ p.name for p in sig.parameters.values() if p.kind == inspect.Parameter.VAR_KEYWORD ] varkw = varkw[0] if varkw else None defaults = [ p.default for p in sig.parameters.values() if p.kind == inspect.Parameter.POSITIONAL_OR_KEYWORD and p.default is not p.empty ] or None return args, varargs, varkw, defaults def get_func_args(func): if six.PY2: argspec = inspect.getargspec(func) return argspec.args[1:] # ignore 'self' sig = inspect.signature(func) return [ arg_name for arg_name, param in sig.parameters.items() if param.kind == inspect.Parameter.POSITIONAL_OR_KEYWORD ] def get_func_full_args(func): """ Return a list of (argument name, default value) tuples. If the argument does not have a default value, omit it in the tuple. Arguments such as *args and **kwargs are also included. """ if six.PY2: argspec = inspect.getargspec(func) args = argspec.args[1:] # ignore 'self' defaults = argspec.defaults or [] # Split args into two lists depending on whether they have default value no_default = args[:len(args) - len(defaults)] with_default = args[len(args) - len(defaults):] # Join the two lists and combine it with default values args = [(arg,) for arg in no_default] + zip(with_default, defaults) # Add possible *args and **kwargs and prepend them with '*' or '**' varargs = [('*' + argspec.varargs,)] if argspec.varargs else [] kwargs = [('**' + argspec.keywords,)] if argspec.keywords else [] return args + varargs + kwargs sig = inspect.signature(func) args = [] for arg_name, param in sig.parameters.items(): name = arg_name # Ignore 'self' if name == 'self': continue if param.kind == inspect.Parameter.VAR_POSITIONAL: name = '*' + name elif param.kind == inspect.Parameter.VAR_KEYWORD: name = '**' + name if param.default != inspect.Parameter.empty: args.append((name, param.default)) else: args.append((name,)) return args def func_accepts_kwargs(func): if six.PY2: # Not all callables are inspectable with getargspec, so we'll # try a couple different ways but in the end fall back on assuming # it is -- we don't want to prevent registration of valid but weird # callables. try: argspec = inspect.getargspec(func) except TypeError: try: argspec = inspect.getargspec(func.__call__) except (TypeError, AttributeError): argspec = None return not argspec or argspec[2] is not None return any( p for p in inspect.signature(func).parameters.values() if p.kind == p.VAR_KEYWORD ) def func_accepts_var_args(func): """ Return True if function 'func' accepts positional arguments *args. """ if six.PY2: return inspect.getargspec(func)[1] is not None return any( p for p in inspect.signature(func).parameters.values() if p.kind == p.VAR_POSITIONAL ) def func_has_no_args(func): args = inspect.getargspec(func)[0] if six.PY2 else [ p for p in inspect.signature(func).parameters.values() if p.kind == p.POSITIONAL_OR_KEYWORD ] return len(args) == 1 def func_supports_parameter(func, parameter): if six.PY3: return parameter in inspect.signature(func).parameters else: args, varargs, varkw, defaults = inspect.getargspec(func) return parameter in args
bsd-3-clause
2,578,453,951,049,575,400
31.022901
89
0.613111
false
fluxw42/youtube-dl
youtube_dl/extractor/laola1tv.py
14
6314
# coding: utf-8 from __future__ import unicode_literals from .common import InfoExtractor from ..utils import ( ExtractorError, unified_strdate, urlencode_postdata, xpath_element, xpath_text, urljoin, update_url_query, ) class Laola1TvEmbedIE(InfoExtractor): IE_NAME = 'laola1tv:embed' _VALID_URL = r'https?://(?:www\.)?laola1\.tv/titanplayer\.php\?.*?\bvideoid=(?P<id>\d+)' _TEST = { # flashvars.premium = "false"; 'url': 'https://www.laola1.tv/titanplayer.php?videoid=708065&type=V&lang=en&portal=int&customer=1024', 'info_dict': { 'id': '708065', 'ext': 'mp4', 'title': 'MA Long CHN - FAN Zhendong CHN', 'uploader': 'ITTF - International Table Tennis Federation', 'upload_date': '20161211', }, } def _real_extract(self, url): video_id = self._match_id(url) webpage = self._download_webpage(url, video_id) flash_vars = self._search_regex( r'(?s)flashvars\s*=\s*({.+?});', webpage, 'flash vars') def get_flashvar(x, *args, **kwargs): flash_var = self._search_regex( r'%s\s*:\s*"([^"]+)"' % x, flash_vars, x, default=None) if not flash_var: flash_var = self._search_regex([ r'flashvars\.%s\s*=\s*"([^"]+)"' % x, r'%s\s*=\s*"([^"]+)"' % x], webpage, x, *args, **kwargs) return flash_var hd_doc = self._download_xml( 'http://www.laola1.tv/server/hd_video.php', video_id, query={ 'play': get_flashvar('streamid'), 'partner': get_flashvar('partnerid'), 'portal': get_flashvar('portalid'), 'lang': get_flashvar('sprache'), 'v5ident': '', }) _v = lambda x, **k: xpath_text(hd_doc, './/video/' + x, **k) title = _v('title', fatal=True) token_url = None premium = get_flashvar('premium', default=None) if premium: token_url = update_url_query( _v('url', fatal=True), { 'timestamp': get_flashvar('timestamp'), 'auth': get_flashvar('auth'), }) else: data_abo = urlencode_postdata( dict((i, v) for i, v in enumerate(_v('req_liga_abos').split(',')))) token_url = self._download_json( 'https://club.laola1.tv/sp/laola1/api/v3/user/session/premium/player/stream-access', video_id, query={ 'videoId': _v('id'), 'target': self._search_regex(r'vs_target = (\d+);', webpage, 'vs target'), 'label': _v('label'), 'area': _v('area'), }, data=data_abo)['data']['stream-access'][0] token_doc = self._download_xml( token_url, video_id, 'Downloading token', headers=self.geo_verification_headers()) token_attrib = xpath_element(token_doc, './/token').attrib if token_attrib['status'] != '0': raise ExtractorError( 'Token error: %s' % token_attrib['comment'], expected=True) formats = self._extract_akamai_formats( '%s?hdnea=%s' % (token_attrib['url'], token_attrib['auth']), video_id) self._sort_formats(formats) categories_str = _v('meta_sports') categories = categories_str.split(',') if categories_str else [] is_live = _v('islive') == 'true' return { 'id': video_id, 'title': self._live_title(title) if is_live else title, 'upload_date': unified_strdate(_v('time_date')), 'uploader': _v('meta_organisation'), 'categories': categories, 'is_live': is_live, 'formats': formats, } class Laola1TvIE(InfoExtractor): IE_NAME = 'laola1tv' _VALID_URL = r'https?://(?:www\.)?laola1\.tv/[a-z]+-[a-z]+/[^/]+/(?P<id>[^/?#&]+)' _TESTS = [{ 'url': 'http://www.laola1.tv/de-de/video/straubing-tigers-koelner-haie/227883.html', 'info_dict': { 'id': '227883', 'display_id': 'straubing-tigers-koelner-haie', 'ext': 'flv', 'title': 'Straubing Tigers - Kölner Haie', 'upload_date': '20140912', 'is_live': False, 'categories': ['Eishockey'], }, 'params': { 'skip_download': True, }, }, { 'url': 'http://www.laola1.tv/de-de/video/straubing-tigers-koelner-haie', 'info_dict': { 'id': '464602', 'display_id': 'straubing-tigers-koelner-haie', 'ext': 'flv', 'title': 'Straubing Tigers - Kölner Haie', 'upload_date': '20160129', 'is_live': False, 'categories': ['Eishockey'], }, 'params': { 'skip_download': True, }, }, { 'url': 'http://www.laola1.tv/de-de/livestream/2016-03-22-belogorie-belgorod-trentino-diatec-lde', 'info_dict': { 'id': '487850', 'display_id': '2016-03-22-belogorie-belgorod-trentino-diatec-lde', 'ext': 'flv', 'title': 'Belogorie BELGOROD - TRENTINO Diatec', 'upload_date': '20160322', 'uploader': 'CEV - Europäischer Volleyball Verband', 'is_live': True, 'categories': ['Volleyball'], }, 'params': { 'skip_download': True, }, 'skip': 'This live stream has already finished.', }] def _real_extract(self, url): display_id = self._match_id(url) webpage = self._download_webpage(url, display_id) if 'Dieser Livestream ist bereits beendet.' in webpage: raise ExtractorError('This live stream has already finished.', expected=True) iframe_url = urljoin(url, self._search_regex( r'<iframe[^>]*?id="videoplayer"[^>]*?src="([^"]+)"', webpage, 'iframe url')) return { '_type': 'url', 'display_id': display_id, 'url': iframe_url, 'ie_key': 'Laola1TvEmbed', }
unlicense
768,122,792,356,735,400
34.857955
110
0.495484
false
maciekcc/tensorflow
tensorflow/python/estimator/export/export.py
19
7374
# Copyright 2017 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Configuration and utilities for receiving inputs at serving time.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import collections import os import time import six from tensorflow.python.framework import dtypes from tensorflow.python.framework import ops from tensorflow.python.framework import sparse_tensor from tensorflow.python.framework import tensor_shape from tensorflow.python.ops import array_ops from tensorflow.python.ops import parsing_ops from tensorflow.python.util import compat _SINGLE_FEATURE_DEFAULT_NAME = 'feature' _SINGLE_RECEIVER_DEFAULT_NAME = 'input' class ServingInputReceiver(collections.namedtuple('ServingInputReceiver', ['features', 'receiver_tensors'])): """A return type for a serving_input_receiver_fn. The expected return values are: features: A dict of string to `Tensor` or `SparseTensor`, specifying the features to be passed to the model. receiver_tensors: a `Tensor`, or a dict of string to `Tensor`, specifying input nodes where this receiver expects to be fed. Typically, this is a single placeholder expecting serialized `tf.Example` protos. """ # TODO(soergel): add receiver_alternatives when supported in serving. def __new__(cls, features, receiver_tensors): if features is None: raise ValueError('features must be defined.') if not isinstance(features, dict): features = {_SINGLE_FEATURE_DEFAULT_NAME: features} for name, tensor in features.items(): if not isinstance(name, six.string_types): raise ValueError('feature keys must be strings: {}.'.format(name)) if not (isinstance(tensor, ops.Tensor) or isinstance(tensor, sparse_tensor.SparseTensor)): raise ValueError( 'feature {} must be a Tensor or SparseTensor.'.format(name)) if receiver_tensors is None: raise ValueError('receiver_tensors must be defined.') if not isinstance(receiver_tensors, dict): receiver_tensors = {_SINGLE_RECEIVER_DEFAULT_NAME: receiver_tensors} for name, tensor in receiver_tensors.items(): if not isinstance(name, six.string_types): raise ValueError( 'receiver_tensors keys must be strings: {}.'.format(name)) if not isinstance(tensor, ops.Tensor): raise ValueError( 'receiver_tensor {} must be a Tensor.'.format(name)) return super(ServingInputReceiver, cls).__new__( cls, features=features, receiver_tensors=receiver_tensors) def build_parsing_serving_input_receiver_fn(feature_spec, default_batch_size=None): """Build a serving_input_receiver_fn expecting fed tf.Examples. Creates a serving_input_receiver_fn that expects a serialized tf.Example fed into a string placeholder. The function parses the tf.Example according to the provided feature_spec, and returns all parsed Tensors as features. Args: feature_spec: a dict of string to `VarLenFeature`/`FixedLenFeature`. default_batch_size: the number of query examples expected per batch. Leave unset for variable batch size (recommended). Returns: A serving_input_receiver_fn suitable for use in serving. """ def serving_input_receiver_fn(): """An input_fn that expects a serialized tf.Example.""" serialized_tf_example = array_ops.placeholder(dtype=dtypes.string, shape=[default_batch_size], name='input_example_tensor') receiver_tensors = {'examples': serialized_tf_example} features = parsing_ops.parse_example(serialized_tf_example, feature_spec) return ServingInputReceiver(features, receiver_tensors) return serving_input_receiver_fn def build_raw_serving_input_receiver_fn(features, default_batch_size=None): """Build a serving_input_receiver_fn expecting feature Tensors. Creates an serving_input_receiver_fn that expects all features to be fed directly. Args: features: a dict of string to `Tensor`. default_batch_size: the number of query examples expected per batch. Leave unset for variable batch size (recommended). Returns: A serving_input_receiver_fn. """ def serving_input_receiver_fn(): """A serving_input_receiver_fn that expects features to be fed directly.""" receiver_tensors = {} for name, t in features.items(): shape_list = t.get_shape().as_list() shape_list[0] = default_batch_size shape = tensor_shape.TensorShape(shape_list) # Reuse the feature tensor name for the placeholder, excluding the index placeholder_name = t.name.split(':')[0] receiver_tensors[name] = array_ops.placeholder(dtype=t.dtype, shape=shape, name=placeholder_name) # TODO(b/34885899): remove the unnecessary copy # The features provided are simply the placeholders, but we defensively copy # the dict because it may be mutated. return ServingInputReceiver(receiver_tensors, receiver_tensors.copy()) return serving_input_receiver_fn ### Below utilities are specific to SavedModel exports. def build_all_signature_defs(receiver_tensors, export_outputs): """Build `SignatureDef`s for all export outputs.""" if not isinstance(receiver_tensors, dict): receiver_tensors = {'receiver': receiver_tensors} if export_outputs is None or not isinstance(export_outputs, dict): raise ValueError('export_outputs must be a dict.') signature_def_map = { '{}'.format(output_key or 'None'): export_output.as_signature_def(receiver_tensors) for output_key, export_output in export_outputs.items()} return signature_def_map def get_timestamped_export_dir(export_dir_base): """Builds a path to a new subdirectory within the base directory. Each export is written into a new subdirectory named using the current time. This guarantees monotonically increasing version numbers even across multiple runs of the pipeline. The timestamp used is the number of seconds since epoch UTC. Args: export_dir_base: A string containing a directory to write the exported graph and checkpoints. Returns: The full path of the new subdirectory (which is not actually created yet). """ export_timestamp = int(time.time()) export_dir = os.path.join( compat.as_bytes(export_dir_base), compat.as_bytes(str(export_timestamp))) return export_dir
apache-2.0
6,457,649,978,876,341,000
38.859459
80
0.685788
false
sridevikoushik31/openstack
nova/availability_zones.py
3
3309
# Copyright (c) 2012 OpenStack Foundation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Availability zone helper functions.""" from oslo.config import cfg from nova import db from nova.openstack.common import log as logging availability_zone_opts = [ cfg.StrOpt('internal_service_availability_zone', default='internal', help='availability_zone to show internal services under'), cfg.StrOpt('default_availability_zone', # deprecated in Grizzly release deprecated_name='node_availability_zone', default='nova', help='default compute node availability_zone'), ] CONF = cfg.CONF CONF.register_opts(availability_zone_opts) LOG = logging.getLogger(__name__) def set_availability_zones(context, services): # Makes sure services isn't a sqlalchemy object services = [dict(service.iteritems()) for service in services] metadata = db.aggregate_host_get_by_metadata_key(context, key='availability_zone') for service in services: az = CONF.internal_service_availability_zone if service['topic'] == "compute": if metadata.get(service['host']): az = u','.join(list(metadata[service['host']])) else: az = CONF.default_availability_zone service['availability_zone'] = az return services def get_host_availability_zone(context, host, conductor_api=None): if conductor_api: metadata = conductor_api.aggregate_metadata_get_by_host( context, host, key='availability_zone') else: metadata = db.aggregate_metadata_get_by_host( context, host, key='availability_zone') if 'availability_zone' in metadata: return list(metadata['availability_zone'])[0] else: return CONF.default_availability_zone def get_availability_zones(context): """Return available and unavailable zones.""" enabled_services = db.service_get_all(context, False) disabled_services = db.service_get_all(context, True) enabled_services = set_availability_zones(context, enabled_services) disabled_services = set_availability_zones(context, disabled_services) available_zones = [] for zone in [service['availability_zone'] for service in enabled_services]: if zone not in available_zones: available_zones.append(zone) not_available_zones = [] zones = [service['available_zones'] for service in disabled_services if service['available_zones'] not in available_zones] for zone in zones: if zone not in not_available_zones: not_available_zones.append(zone) return (available_zones, not_available_zones)
apache-2.0
4,454,980,183,463,405,600
36.602273
78
0.6712
false
mohseniaref/adore-doris
gui/snaphuConfigEditor.py
2
11609
#!/usr/bin/env python # example basictreeview.py import pygtk pygtk.require('2.0') import gtk import os import dialogs class SnaphuConfigEditor: def snaphuParser(self, set=None, setFile=None): if setFile is None: setFile=self.setFile; if set is None: set=self.set; f=open(setFile, 'r') for l in f: wl=l.split('#')[0].strip() #remove comments if wl!='': #skip empty lines key=wl.split()[0].strip() #get the keyword val=''.join(wl.split()[1:]) #get value #print [key, val] set[key]=val f.close() def advancedChkBtnToggled(self, widget, liststore): #widget.get_active() liststore.clear() self.displayOptions(self.setFile, liststore); def displayOptions(self, setFile, liststore): # self.set.read(setFile) # we'll add some data now - 4 rows with 3 child rows each #for section in self.set.sections(): # sectionId = self.liststore.append(None, (False,section, '')) # for option,value in self.set.items(section): # if "_rel_" in option and not self.advancedChkBtn.get_active(): # continue; # self.liststore.append(sectionId, (False,option,value)) k=0; if os.path.exists(self.setFile): f=open(self.setFile, 'r') for l in f: wl=l.split('#')[0].strip() #remove comments if wl!='': #skip empty lines k=k+1; key=wl.split()[0].strip() #get the keyword val=''.join(wl.split()[1:]) #get value #print [key, val] self.liststore.append((False, key, val)) f.close() self.window.set_title(str('%d settings: %s' % (k, self.setFile) )) def chkbx_toggled_cb(self, cell, path, liststore): liststore[path][0]=not liststore[path][0] return # Handle edited value def edited_cb2(self, cell, path, new_text, liststore): #print path #print new_text #print liststore liststore[path][2] = new_text liststore[path][0] = True self.window.set_title(str('! %s' % ( self.setFile) )) return # def row_inserted(self, widget, path, iter): # print widget # print path # print iter # # self.treeview.set_cursor(path, focus_column=self.tvcolumn2, start_editing=True) # close the window and quit def delete_event(self, widget, event, data=None): #gtk.main_quit() del self.set del self.liststore self.window.destroy() return False def saveAsButtonClicked(self, widget, liststore): chooser = gtk.FileChooserDialog(title=None,action=gtk.FILE_CHOOSER_ACTION_SAVE, buttons=(gtk.STOCK_CANCEL,gtk.RESPONSE_CANCEL,gtk.STOCK_OPEN,gtk.RESPONSE_OK)) response = chooser.run() if response == gtk.RESPONSE_OK: filename=chooser.get_filename(); chooser.destroy() self.setFile=filename f=open(self.setFile, 'w') for row in liststore: f.write(str('%s\t%s\n' %(row[1], row[2]))) f.close() self.window.set_title(str('%s' % ( self.setFile) )) def saveButtonClicked(self, widget, liststore): f=open(self.setFile, 'w') for row in liststore: f.write(str('%s\t%s\n' %(row[1], row[2]))) f.close() self.window.set_title(str('%s' % ( self.setFile) )) #Let's see if this will stop the constant crashing #self.window.destroy(); def addButtonClicked(self, widget, liststore): dropdownlist=self.set.keys(); for row in liststore: if row[1] in dropdownlist: dropdownlist.remove(row[1]); if len(dropdownlist)>0: response,param=dialogs.dropdown(dropdownlist, '<b>Add</b>'); if response == gtk.RESPONSE_OK: liststore.prepend((False, param, self.set[param])) self.window.set_title(str('! %s' % ( self.setFile) )) return else: dialogs.error('No more keywords to add.') return def removeButtonClicked(self, widget, liststore): for row in liststore: if row[0] == True: liststore.remove(row.iter) self.window.set_title(str('! %s' % (self.setFile) )) def openButtonClicked(self, widget, liststore): liststore.clear() chooser = gtk.FileChooserDialog(title=None,action=gtk.FILE_CHOOSER_ACTION_OPEN, buttons=(gtk.STOCK_CANCEL,gtk.RESPONSE_CANCEL,gtk.STOCK_OPEN,gtk.RESPONSE_OK)) response = chooser.run() if response == gtk.RESPONSE_OK: filename=chooser.get_filename(); chooser.destroy() self.setFile=filename self.displayOptions(self.setFile, liststore); def __init__(self,mainWindow): #Load settings #self.set=ConfigParser.ConfigParser() self.set={} #Make settings case sensitive #self.set.optionxform = str # mainWindow.readSet(); self.confFull=os.path.join(mainWindow.set.get('adore','ADOREFOLDER').strip('"'),'set/snaphu.conf.full') self.snaphuParser(setFile=self.confFull); #Initialize the set object. self.setFile=os.path.join(mainWindow.set.get('adore','outputFolder').strip('"'),'snaphu.conf') self.runcmd=mainWindow.runcmd; # self.set=ConfigParser.ConfigParser() # self.set.read(setFile) # Create a new window self.window = gtk.Window()#hadjustment=None, vadjustment=None) self.swindow = gtk.ScrolledWindow(hadjustment=None, vadjustment=None) self.swindow.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_ALWAYS) self.window.set_title("AGOOEY Snaphu Configuration Editor") self.window.set_size_request(500, 600) self.window.connect("delete_event", self.delete_event) self.vbox = gtk.VBox(homogeneous=False, spacing=0); self.hbox = gtk.HBox(homogeneous=False, spacing=0); # create a TreeStore with one string column to use as the model self.liststore = gtk.ListStore(bool, str, str) ##### SET THE HBOX ##### self.saveButton=gtk.Button(label='Save', stock=None, use_underline=True); self.saveButton.connect("clicked", self.saveButtonClicked, self.liststore) self.saveButton.set_flags(gtk.CAN_DEFAULT); self.saveButton.show(); self.saveAsButton=gtk.Button(label='Save As', stock=None, use_underline=True); self.saveAsButton.connect("clicked", self.saveAsButtonClicked, self.liststore) self.saveAsButton.set_flags(gtk.CAN_DEFAULT); self.saveAsButton.show(); # self.refreshButton=gtk.Button(label='Refresh', stock=None, use_underline=True); # self.refreshButton.connect("clicked", self.refreshButtonClicked, self.liststore) # self.refreshButton.set_flags(gtk.CAN_DEFAULT); # self.refreshButton.show(); self.openButton=gtk.Button(label='Open', stock=None, use_underline=True); self.openButton.connect("clicked", self.openButtonClicked, self.liststore) self.openButton.set_flags(gtk.CAN_DEFAULT); self.openButton.show(); self.addButton=gtk.Button(label='Add', stock=None, use_underline=True); self.addButton.connect("clicked", self.addButtonClicked, self.liststore) self.addButton.set_flags(gtk.CAN_DEFAULT); self.addButton.show(); self.removeButton=gtk.Button(label='Remove', stock=None, use_underline=True); self.removeButton.connect("clicked", self.removeButtonClicked, self.liststore) self.removeButton.set_flags(gtk.CAN_DEFAULT); self.removeButton.show(); # self.advancedChkBtn=gtk.CheckButton("Advanced"); # self.advancedChkBtn.connect("toggled", self.advancedChkBtnToggled, self.liststore) # self.advancedChkBtn.show(); self.hbox.pack_start(self.openButton, expand = False, fill = False, padding = 5); self.hbox.pack_start(self.saveButton, expand = False, fill = False, padding = 5); self.hbox.pack_start(self.saveAsButton, expand = False, fill = False, padding = 5); self.hbox.pack_start(self.addButton, expand = False, fill = False, padding = 5); self.hbox.pack_start(self.removeButton, expand = False, fill = False, padding = 5); # self.hbox.pack_start(self.refreshButton, expand = False, fill = False, padding = 5); # self.hbox.pack_start(self.advancedChkBtn, expand = False, fill = False, padding = 20); ##### SET THE VBOX ##### # adj = gtk.Adjustment(0.0, 0.0, 100.0, 1.0, 10.0, 0.0) # scrollbar = gtk.HScale(adj) # self.vbox.pack_start(scrollbar, False, False, 0) #Add some data now self.displayOptions(self.setFile, self.liststore); # create the TreeView using liststore self.treeview = gtk.TreeView(self.liststore) # create a CellRendererText to render the data self.chkbx= gtk.CellRendererToggle(); self.cell = gtk.CellRendererText() self.cell2 = gtk.CellRendererText() #Make chkbox col activatable self.chkbx.set_property('activatable', True) #Make col1 editable self.cell2.set_property('editable', True) # connect the edit handling function self.cell2.connect('edited', self.edited_cb2, self.liststore) self.chkbx.connect('toggled', self.chkbx_toggled_cb, self.liststore) #self.liststore.connect('row_inserted', self.row_inserted) # create the TreeViewColumn to display the data self.tvcolumn0 = gtk.TreeViewColumn('Remove', self.chkbx) self.tvcolumn1 = gtk.TreeViewColumn('Settings', self.cell, text=1) self.tvcolumn2 = gtk.TreeViewColumn('Values', self.cell2, text=2) # add tvcolumn to treeview self.treeview.append_column(self.tvcolumn0) self.treeview.append_column(self.tvcolumn1) self.treeview.append_column(self.tvcolumn2) # add the cell to the tvcolumn and allow it to expand #self.tvcolumn.pack_start(self.cell, True) #self.tvcolumn2.pack_start(self.cell2, True) # set the cell "text" attribute to column 0 - retrieve text # from that column in liststore self.tvcolumn0.add_attribute(self.chkbx, 'active', 0) self.tvcolumn1.add_attribute(self.cell, 'text', 1) self.tvcolumn2.add_attribute(self.cell2, 'text', 2) # make it searchable self.treeview.set_search_column(1) # Allow sorting on the column self.tvcolumn1.set_sort_column_id(1) # Allow drag and drop reordering of rows self.treeview.set_reorderable(True) self.treeview.show() self.vbox.pack_start(self.hbox); self.vbox.pack_end(self.treeview); self.window.set_default(self.saveButton); self.swindow.add_with_viewport(self.vbox) self.window.add(self.swindow) #self.vbox.show() self.window.show_all(); def main(): gtk.main() return 0 if __name__ == "__main__": se = SnaphuConfigEditor() main()
gpl-2.0
-5,694,778,346,069,999,000
40.021201
118
0.600224
false
ldtp/pyatom
atomac/ldtpd/text.py
2
16670
# Copyright (c) 2012 VMware, Inc. All Rights Reserved. # This file is part of ATOMac. #@author: Nagappan Alagappan <[email protected]> #@copyright: Copyright (c) 2009-12 Nagappan Alagappan #http://ldtp.freedesktop.org # ATOMac is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by the Free # Software Foundation version 2 and no later version. # ATOMac is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or # FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License version 2 # for more details. # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 Franklin # St, Fifth Floor, Boston, MA 02110-1301 USA. """Text class.""" import re import fnmatch import atomac.Clipboard as Clipboard from utils import Utils from keypress_actions import KeyComboAction, KeyPressAction, KeyReleaseAction from server_exception import LdtpServerException class Text(Utils): def generatekeyevent(self, data): """ Generates key event to the system, this simulates the best user like interaction via keyboard. @param data: data to type. @type data: string @return: 1 on success. @rtype: integer """ KeyComboAction(data) return 1 def keypress(self, data): """ Press key. NOTE: keyrelease should be called @param data: data to type. @type data: string @return: 1 on success. @rtype: integer """ try: window=self._get_front_most_window() except (IndexError, ): window=self._get_any_window() key_press_action = KeyPressAction(window, data) return 1 def keyrelease(self, data): """ Release key. NOTE: keypress should be called before this @param data: data to type. @type data: string @return: 1 on success. @rtype: integer """ try: window=self._get_front_most_window() except (IndexError, ): window=self._get_any_window() key_release_action = KeyReleaseAction(window, data) return 1 def enterstring(self, window_name, object_name='', data=''): """ Type string sequence. @param window_name: Window name to focus on, either full name, LDTP's name convention, or a Unix glob. @type window_name: string @param object_name: Object name to focus on, either full name, LDTP's name convention, or a Unix glob. @type object_name: string @param data: data to type. @type data: string @return: 1 on success. @rtype: integer """ if not object_name and not data: return self.generatekeyevent(window_name) else: object_handle=self._get_object_handle(window_name, object_name) if not object_handle.AXEnabled: raise LdtpServerException(u"Object %s state disabled" % object_name) self._grabfocus(object_handle) object_handle.sendKeys(data) return 1 def settextvalue(self, window_name, object_name, data): """ Type string sequence. @param window_name: Window name to type in, either full name, LDTP's name convention, or a Unix glob. @type window_name: string @param object_name: Object name to type in, either full name, LDTP's name convention, or a Unix glob. @type object_name: string @param data: data to type. @type data: string @return: 1 on success. @rtype: integer """ object_handle=self._get_object_handle(window_name, object_name) if not object_handle.AXEnabled: raise LdtpServerException(u"Object %s state disabled" % object_name) object_handle.AXValue=data return 1 def gettextvalue(self, window_name, object_name, startPosition=0, endPosition=0): """ Get text value @param window_name: Window name to type in, either full name, LDTP's name convention, or a Unix glob. @type window_name: string @param object_name: Object name to type in, either full name, LDTP's name convention, or a Unix glob. @type object_name: string @param startPosition: Starting position of text to fetch @type: startPosition: int @param endPosition: Ending position of text to fetch @type: endPosition: int @return: text on success. @rtype: string """ object_handle=self._get_object_handle(window_name, object_name) if not object_handle.AXEnabled: raise LdtpServerException(u"Object %s state disabled" % object_name) return object_handle.AXValue def inserttext(self, window_name, object_name, position, data): """ Insert string sequence in given position. @param window_name: Window name to type in, either full name, LDTP's name convention, or a Unix glob. @type window_name: string @param object_name: Object name to type in, either full name, LDTP's name convention, or a Unix glob. @type object_name: string @param position: position where text has to be entered. @type data: int @param data: data to type. @type data: string @return: 1 on success. @rtype: integer """ object_handle=self._get_object_handle(window_name, object_name) if not object_handle.AXEnabled: raise LdtpServerException(u"Object %s state disabled" % object_name) existing_data=object_handle.AXValue size=len(existing_data) if position < 0: position=0 if position > size: position=size object_handle.AXValue=existing_data[:position] + data + \ existing_data[position:] return 1 def verifypartialmatch(self, window_name, object_name, partial_text): """ Verify partial text @param window_name: Window name to type in, either full name, LDTP's name convention, or a Unix glob. @type window_name: string @param object_name: Object name to type in, either full name, LDTP's name convention, or a Unix glob. @type object_name: string @param partial_text: Partial text to match @type object_name: string @return: 1 on success. @rtype: integer """ try: if re.search(fnmatch.translate(partial_text), self.gettextvalue(window_name, object_name)): return 1 except: pass return 0 def verifysettext(self, window_name, object_name, text): """ Verify text is set correctly @param window_name: Window name to type in, either full name, LDTP's name convention, or a Unix glob. @type window_name: string @param object_name: Object name to type in, either full name, LDTP's name convention, or a Unix glob. @type object_name: string @param text: text to match @type object_name: string @return: 1 on success. @rtype: integer """ try: return int(re.match(fnmatch.translate(text), self.gettextvalue(window_name, object_name))) except: return 0 def istextstateenabled(self, window_name, object_name): """ Verifies text state enabled or not @param window_name: Window name to type in, either full name, LDTP's name convention, or a Unix glob. @type window_name: string @param object_name: Object name to type in, either full name, LDTP's name convention, or a Unix glob. @type object_name: string @return: 1 on success 0 on failure. @rtype: integer """ try: object_handle=self._get_object_handle(window_name, object_name) if object_handle.AXEnabled: return 1 except LdtpServerException: pass return 0 def getcharcount(self, window_name, object_name): """ Get character count @param window_name: Window name to type in, either full name, LDTP's name convention, or a Unix glob. @type window_name: string @param object_name: Object name to type in, either full name, LDTP's name convention, or a Unix glob. @type object_name: string @return: 1 on success. @rtype: integer """ object_handle=self._get_object_handle(window_name, object_name) if not object_handle.AXEnabled: raise LdtpServerException(u"Object %s state disabled" % object_name) return object_handle.AXNumberOfCharacters def appendtext(self, window_name, object_name, data): """ Append string sequence. @param window_name: Window name to type in, either full name, LDTP's name convention, or a Unix glob. @type window_name: string @param object_name: Object name to type in, either full name, LDTP's name convention, or a Unix glob. @type object_name: string @param data: data to type. @type data: string @return: 1 on success. @rtype: integer """ object_handle=self._get_object_handle(window_name, object_name) if not object_handle.AXEnabled: raise LdtpServerException(u"Object %s state disabled" % object_name) object_handle.AXValue += data return 1 def getcursorposition(self, window_name, object_name): """ Get cursor position @param window_name: Window name to type in, either full name, LDTP's name convention, or a Unix glob. @type window_name: string @param object_name: Object name to type in, either full name, LDTP's name convention, or a Unix glob. @type object_name: string @return: Cursor position on success. @rtype: integer """ object_handle=self._get_object_handle(window_name, object_name) if not object_handle.AXEnabled: raise LdtpServerException(u"Object %s state disabled" % object_name) return object_handle.AXSelectedTextRange.loc def setcursorposition(self, window_name, object_name, cursor_position): """ Set cursor position @param window_name: Window name to type in, either full name, LDTP's name convention, or a Unix glob. @type window_name: string @param object_name: Object name to type in, either full name, LDTP's name convention, or a Unix glob. @type object_name: string @param cursor_position: Cursor position to be set @type object_name: string @return: 1 on success. @rtype: integer """ object_handle=self._get_object_handle(window_name, object_name) if not object_handle.AXEnabled: raise LdtpServerException(u"Object %s state disabled" % object_name) object_handle.AXSelectedTextRange.loc=cursor_position return 1 def cuttext(self, window_name, object_name, start_position, end_position=-1): """ cut text from start position to end position @param window_name: Window name to type in, either full name, LDTP's name convention, or a Unix glob. @type window_name: string @param object_name: Object name to type in, either full name, LDTP's name convention, or a Unix glob. @type object_name: string @param start_position: Start position @type object_name: integer @param end_position: End position, default -1 Cut all the text from start position till end @type object_name: integer @return: 1 on success. @rtype: integer """ object_handle=self._get_object_handle(window_name, object_name) if not object_handle.AXEnabled: raise LdtpServerException(u"Object %s state disabled" % object_name) size=object_handle.AXNumberOfCharacters if end_position == -1 or end_position > size: end_position=size if start_position < 0: start_position=0 data=object_handle.AXValue Clipboard.copy(data[start_position:end_position]) object_handle.AXValue=data[:start_position] + data[end_position:] return 1 def copytext(self, window_name, object_name, start_position, end_position=-1): """ copy text from start position to end position @param window_name: Window name to type in, either full name, LDTP's name convention, or a Unix glob. @type window_name: string @param object_name: Object name to type in, either full name, LDTP's name convention, or a Unix glob. @type object_name: string @param start_position: Start position @type object_name: integer @param end_position: End position, default -1 Copy all the text from start position till end @type object_name: integer @return: 1 on success. @rtype: integer """ object_handle=self._get_object_handle(window_name, object_name) if not object_handle.AXEnabled: raise LdtpServerException(u"Object %s state disabled" % object_name) size=object_handle.AXNumberOfCharacters if end_position == -1 or end_position > size: end_position=size if start_position < 0: start_position=0 data=object_handle.AXValue Clipboard.copy(data[start_position:end_position]) return 1 def deletetext(self, window_name, object_name, start_position, end_position=-1): """ delete text from start position to end position @param window_name: Window name to type in, either full name, LDTP's name convention, or a Unix glob. @type window_name: string @param object_name: Object name to type in, either full name, LDTP's name convention, or a Unix glob. @type object_name: string @param start_position: Start position @type object_name: integer @param end_position: End position, default -1 Delete all the text from start position till end @type object_name: integer @return: 1 on success. @rtype: integer """ object_handle=self._get_object_handle(window_name, object_name) if not object_handle.AXEnabled: raise LdtpServerException(u"Object %s state disabled" % object_name) size=object_handle.AXNumberOfCharacters if end_position == -1 or end_position > size: end_position=size if start_position < 0: start_position=0 data=object_handle.AXValue object_handle.AXValue=data[:start_position] + data[end_position:] return 1 def pastetext(self, window_name, object_name, position=0): """ paste text from start position to end position @param window_name: Window name to type in, either full name, LDTP's name convention, or a Unix glob. @type window_name: string @param object_name: Object name to type in, either full name, LDTP's name convention, or a Unix glob. @type object_name: string @param position: Position to paste the text, default 0 @type object_name: integer @return: 1 on success. @rtype: integer """ object_handle=self._get_object_handle(window_name, object_name) if not object_handle.AXEnabled: raise LdtpServerException(u"Object %s state disabled" % object_name) size=object_handle.AXNumberOfCharacters if position > size: position=size if position < 0: position=0 clipboard=Clipboard.paste() data=object_handle.AXValue object_handle.AXValue=data[:position] + clipboard + data[position:] return 1
gpl-2.0
-7,937,828,098,198,052,000
35.557018
85
0.611938
false
initzx/aobot
commands/private/changepic.py
1
1135
import aiohttp from registrar import AbstractCommand, bot_command @bot_command class Command(AbstractCommand): """ Template for bot command classes. """ _name = 'changepic' _aliases = ['changepic'] _enabled = True tags = {'misc': ['private'], 'args': {'req': ['pic'], 'nreq': []}} @staticmethod async def execute(client, msg, **kwargs): """ Executes this command. :type client: bot.Bot :type msg: discord.Message """ with aiohttp.Timeout(10): async with aiohttp.get(kwargs.get('pic')) as res: await client.edit_profile(avatar=await res.read()) @property def name(self): """ The name of this command """ return self._name @property def aliases(self): """ The aliases that can be used to call this command """ return self._aliases @property def enabled(self): """ Controls whether the command is allowed to be executed. """ return self._enabled @enabled.setter def enabled(self, value): """ Setter for `enabled` """ self.enabled = value
gpl-3.0
-8,128,590,722,962,857,000
26.02381
71
0.586784
false
TheTimmy/spack
var/spack/repos/builtin/packages/gflags/package.py
3
1900
############################################################################## # Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC. # Produced at the Lawrence Livermore National Laboratory. # # This file is part of Spack. # Created by Todd Gamblin, [email protected], All rights reserved. # LLNL-CODE-647188 # # For details, see https://github.com/llnl/spack # Please also see the NOTICE and LICENSE files for our notice and the LGPL. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License (as # published by the Free Software Foundation) version 2.1, February 1999. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and # conditions of the GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from spack import * class Gflags(CMakePackage): """The gflags package contains a C++ library that implements commandline flags processing. It includes built-in support for standard types such as string and the ability to define flags in the source file in which they are used. Online documentation available at: https://gflags.github.io/gflags/""" homepage = "https://gflags.github.io/gflags" url = "https://github.com/gflags/gflags/archive/v2.1.2.tar.gz" version('2.1.2', 'ac432de923f9de1e9780b5254884599f') depends_on('[email protected]:', type='build') def cmake_args(self): return ['-DBUILD_SHARED_LIBS=ON']
lgpl-2.1
-6,356,458,891,907,912,000
43.186047
78
0.680526
false
data-exp-lab/girder
girder/utility/search.py
4
3228
#!/usr/bin/env python # -*- coding: utf-8 -*- ############################################################################### # Copyright 2013 Kitware Inc. # # Licensed under the Apache License, Version 2.0 ( the "License" ); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ############################################################################### from functools import partial from girder.models.model_base import ModelImporter from girder.exceptions import GirderException _allowedSearchMode = {} def getSearchModeHandler(mode): """ Get the handler function for a search mode :param mode: A search mode identifier. :type mode: str :returns: A search mode handler function, or None. :rtype: function or None """ return _allowedSearchMode.get(mode) def addSearchMode(mode, handler): """ Register a search mode. New searches made for the registered mode will call the handler function. The handler function must take parameters: `query`, `types`, `user`, `level`, `limit`, `offset`, and return the search results. :param mode: A search mode identifier. :type mode: str :param handler: A search mode handler function. :type handler: function """ if _allowedSearchMode.get(mode) is not None: raise GirderException('A search mode %r already exists.' % mode) _allowedSearchMode[mode] = handler def removeSearchMode(mode): """ Remove a search mode. This will fail gracefully (returning `False`) if no search mode `mode` was registered. :param mode: A search mode identifier. :type mode: str :returns: Whether the search mode was actually removed. :rtype: bool """ return _allowedSearchMode.pop(mode, None) is not None def _commonSearchModeHandler(mode, query, types, user, level, limit, offset): """ The common handler for `text` and `prefix` search modes. """ # Avoid circular import from girder.api.v1.resource import allowedSearchTypes method = '%sSearch' % mode results = {} for modelName in types: if modelName not in allowedSearchTypes: continue if '.' in modelName: name, plugin = modelName.rsplit('.', 1) model = ModelImporter.model(name, plugin) else: model = ModelImporter.model(modelName) if model is not None: results[modelName] = [ model.filter(d, user) for d in getattr(model, method)( query=query, user=user, limit=limit, offset=offset, level=level) ] return results # Add dynamically the default search mode addSearchMode('text', partial(_commonSearchModeHandler, mode='text')) addSearchMode('prefix', partial(_commonSearchModeHandler, mode='prefix'))
apache-2.0
3,221,596,797,205,632,500
30.960396
98
0.644672
false
Manuel4131/youtube-dl
youtube_dl/extractor/onionstudios.py
109
2787
# coding: utf-8 from __future__ import unicode_literals import re from .common import InfoExtractor from ..utils import determine_ext class OnionStudiosIE(InfoExtractor): _VALID_URL = r'https?://(?:www\.)?onionstudios\.com/(?:videos/[^/]+-|embed\?.*\bid=)(?P<id>\d+)(?!-)' _TESTS = [{ 'url': 'http://www.onionstudios.com/videos/hannibal-charges-forward-stops-for-a-cocktail-2937', 'md5': 'd4851405d31adfadf71cd7a487b765bb', 'info_dict': { 'id': '2937', 'ext': 'mp4', 'title': 'Hannibal charges forward, stops for a cocktail', 'description': 'md5:545299bda6abf87e5ec666548c6a9448', 'thumbnail': 're:^https?://.*\.jpg$', 'uploader': 'The A.V. Club', 'uploader_id': 'TheAVClub', }, }, { 'url': 'http://www.onionstudios.com/embed?id=2855&autoplay=true', 'only_matching': True, }] @staticmethod def _extract_url(webpage): mobj = re.search( r'<iframe[^>]+?src=(["\'])(?P<url>(?:https?:)?//(?:www\.)?onionstudios\.com/embed.+?)\1', webpage) if mobj: return mobj.group('url') def _real_extract(self, url): video_id = self._match_id(url) webpage = self._download_webpage( 'http://www.onionstudios.com/embed?id=%s' % video_id, video_id) formats = [] for src in re.findall(r'<source[^>]+src="([^"]+)"', webpage): if determine_ext(src) != 'm3u8': # m3u8 always results in 403 formats.append({ 'url': src, }) self._sort_formats(formats) title = self._search_regex( r'share_title\s*=\s*(["\'])(?P<title>[^\1]+?)\1', webpage, 'title', group='title') description = self._search_regex( r'share_description\s*=\s*(["\'])(?P<description>[^\1]+?)\1', webpage, 'description', default=None, group='description') thumbnail = self._search_regex( r'poster\s*=\s*(["\'])(?P<thumbnail>[^\1]+?)\1', webpage, 'thumbnail', default=False, group='thumbnail') uploader_id = self._search_regex( r'twitter_handle\s*=\s*(["\'])(?P<uploader_id>[^\1]+?)\1', webpage, 'uploader id', fatal=False, group='uploader_id') uploader = self._search_regex( r'window\.channelName\s*=\s*(["\'])Embedded:(?P<uploader>[^\1]+?)\1', webpage, 'uploader', default=False, group='uploader') return { 'id': video_id, 'title': title, 'description': description, 'thumbnail': thumbnail, 'uploader': uploader, 'uploader_id': uploader_id, 'formats': formats, }
unlicense
-3,026,676,659,829,315,600
35.671053
110
0.52099
false
Mashape/dd-agent
tests/core/test_histogram.py
40
4851
# stdlib import unittest # project from aggregator import Histogram, MetricsAggregator from config import get_histogram_aggregates, get_histogram_percentiles class TestHistogram(unittest.TestCase): def test_default(self): stats = MetricsAggregator('myhost') for i in xrange(20): stats.submit_packets('myhistogram:{0}|h'.format(i)) metrics = stats.flush() self.assertEquals(len(metrics), 5, metrics) value_by_type = {} for k in metrics: value_by_type[k['metric'][len('myhistogram')+1:]] = k['points'][0][1] self.assertEquals( sorted(value_by_type.keys()), ['95percentile', 'avg', 'count', 'max', 'median'], value_by_type ) self.assertEquals(value_by_type['max'], 19, value_by_type) self.assertEquals(value_by_type['median'], 9, value_by_type) self.assertEquals(value_by_type['avg'], 9.5, value_by_type) self.assertEquals(value_by_type['count'], 20.0, value_by_type) self.assertEquals(value_by_type['95percentile'], 18, value_by_type) def test_custom_single_percentile(self): configstr = '0.40' stats = MetricsAggregator( 'myhost', histogram_percentiles=get_histogram_percentiles(configstr) ) self.assertEquals( stats.metric_config[Histogram]['percentiles'], [0.40], stats.metric_config[Histogram] ) for i in xrange(20): stats.submit_packets('myhistogram:{0}|h'.format(i)) metrics = stats.flush() self.assertEquals(len(metrics), 5, metrics) value_by_type = {} for k in metrics: value_by_type[k['metric'][len('myhistogram')+1:]] = k['points'][0][1] self.assertEquals(value_by_type['40percentile'], 7, value_by_type) def test_custom_multiple_percentile(self): configstr = '0.4, 0.65, 0.999' stats = MetricsAggregator( 'myhost', histogram_percentiles=get_histogram_percentiles(configstr) ) self.assertEquals( stats.metric_config[Histogram]['percentiles'], [0.4, 0.65, 0.99], stats.metric_config[Histogram] ) for i in xrange(20): stats.submit_packets('myhistogram:{0}|h'.format(i)) metrics = stats.flush() self.assertEquals(len(metrics), 7, metrics) value_by_type = {} for k in metrics: value_by_type[k['metric'][len('myhistogram')+1:]] = k['points'][0][1] self.assertEquals(value_by_type['40percentile'], 7, value_by_type) self.assertEquals(value_by_type['65percentile'], 12, value_by_type) self.assertEquals(value_by_type['99percentile'], 19, value_by_type) def test_custom_invalid_percentile(self): configstr = '1.2342' stats = MetricsAggregator( 'myhost', histogram_percentiles=get_histogram_percentiles(configstr) ) self.assertEquals( stats.metric_config[Histogram]['percentiles'], [], stats.metric_config[Histogram] ) def test_custom_invalid_percentile2(self): configstr = 'aoeuoeu' stats = MetricsAggregator( 'myhost', histogram_percentiles=get_histogram_percentiles(configstr) ) self.assertEquals( stats.metric_config[Histogram]['percentiles'], [], stats.metric_config[Histogram] ) def test_custom_invalid_percentile3skip(self): configstr = 'aoeuoeu, 2.23, 0.8, 23' stats = MetricsAggregator( 'myhost', histogram_percentiles=get_histogram_percentiles(configstr) ) self.assertEquals( stats.metric_config[Histogram]['percentiles'], [0.8], stats.metric_config[Histogram] ) def test_custom_aggregate(self): configstr = 'median, max' stats = MetricsAggregator( 'myhost', histogram_aggregates=get_histogram_aggregates(configstr) ) self.assertEquals( sorted(stats.metric_config[Histogram]['aggregates']), ['max', 'median'], stats.metric_config[Histogram] ) for i in xrange(20): stats.submit_packets('myhistogram:{0}|h'.format(i)) metrics = stats.flush() self.assertEquals(len(metrics), 3, metrics) value_by_type = {} for k in metrics: value_by_type[k['metric'][len('myhistogram')+1:]] = k['points'][0][1] self.assertEquals(value_by_type['median'], 9, value_by_type) self.assertEquals(value_by_type['max'], 19, value_by_type) self.assertEquals(value_by_type['95percentile'], 18, value_by_type)
bsd-3-clause
3,797,583,063,153,037,300
30.705882
81
0.582148
false
lxwvictor/flask-ask
samples/session/session.py
2
1928
import logging import os from flask import Flask, json, render_template from flask_ask import Ask, request, session, question, statement app = Flask(__name__) ask = Ask(app, "/") logging.getLogger('flask_ask').setLevel(logging.DEBUG) COLOR_KEY = "COLOR" @ask.launch def launch(): card_title = render_template('card_title') question_text = render_template('welcome') reprompt_text = render_template('welcome_reprompt') return question(question_text).reprompt(reprompt_text).simple_card(card_title, question_text) @ask.intent('MyColorIsIntent', mapping={'color': 'Color'}) def my_color_is(color): card_title = render_template('card_title') if color is not None: session.attributes[COLOR_KEY] = color question_text = render_template('known_color', color=color) reprompt_text = render_template('known_color_reprompt') else: question_text = render_template('unknown_color') reprompt_text = render_template('unknown_color_reprompt') return question(question_text).reprompt(reprompt_text).simple_card(card_title, question_text) @ask.intent('WhatsMyColorIntent') def whats_my_color(): card_title = render_template('card_title') color = session.attributes.get(COLOR_KEY) if color is not None: statement_text = render_template('known_color_bye', color=color) return statement(statement_text).simple_card(card_title, statement_text) else: question_text = render_template('unknown_color_reprompt') return question(question_text).reprompt(question_text).simple_card(card_title, question_text) @ask.session_ended def session_ended(): return "{}", 200 if __name__ == '__main__': if 'ASK_VERIFY_REQUESTS' in os.environ: verify = str(os.environ.get('ASK_VERIFY_REQUESTS', '')).lower() if verify == 'false': app.config['ASK_VERIFY_REQUESTS'] = False app.run(debug=True)
apache-2.0
8,475,155,738,597,935,000
31.133333
101
0.685685
false
cmdunkers/DeeperMind
PythonEnv/lib/python2.7/site-packages/scipy/io/harwell_boeing/hb.py
23
18482
""" Implementation of Harwell-Boeing read/write. At the moment not the full Harwell-Boeing format is supported. Supported features are: - assembled, non-symmetric, real matrices - integer for pointer/indices - exponential format for float values, and int format """ from __future__ import division, print_function, absolute_import # TODO: # - Add more support (symmetric/complex matrices, non-assembled matrices ?) # XXX: reading is reasonably efficient (>= 85 % is in numpy.fromstring), but # takes a lot of memory. Being faster would require compiled code. # write is not efficient. Although not a terribly exciting task, # having reusable facilities to efficiently read/write fortran-formatted files # would be useful outside this module. import warnings import numpy as np from scipy.sparse import csc_matrix from scipy.io.harwell_boeing._fortran_format_parser import \ FortranFormatParser, IntFormat, ExpFormat from scipy._lib.six import string_types __all__ = ["MalformedHeader", "hb_read", "hb_write", "HBInfo", "HBFile", "HBMatrixType"] class MalformedHeader(Exception): pass class LineOverflow(Warning): pass def _nbytes_full(fmt, nlines): """Return the number of bytes to read to get every full lines for the given parsed fortran format.""" return (fmt.repeat * fmt.width + 1) * (nlines - 1) class HBInfo(object): @classmethod def from_data(cls, m, title="Default title", key="0", mxtype=None, fmt=None): """Create a HBInfo instance from an existing sparse matrix. Parameters ---------- m : sparse matrix the HBInfo instance will derive its parameters from m title : str Title to put in the HB header key : str Key mxtype : HBMatrixType type of the input matrix fmt : dict not implemented Returns ------- hb_info : HBInfo instance """ pointer = m.indptr indices = m.indices values = m.data nrows, ncols = m.shape nnon_zeros = m.nnz if fmt is None: # +1 because HB use one-based indexing (Fortran), and we will write # the indices /pointer as such pointer_fmt = IntFormat.from_number(np.max(pointer+1)) indices_fmt = IntFormat.from_number(np.max(indices+1)) if values.dtype.kind in np.typecodes["AllFloat"]: values_fmt = ExpFormat.from_number(-np.max(np.abs(values))) elif values.dtype.kind in np.typecodes["AllInteger"]: values_fmt = IntFormat.from_number(-np.max(np.abs(values))) else: raise NotImplementedError("type %s not implemented yet" % values.dtype.kind) else: raise NotImplementedError("fmt argument not supported yet.") if mxtype is None: if not np.isrealobj(values): raise ValueError("Complex values not supported yet") if values.dtype.kind in np.typecodes["AllInteger"]: tp = "integer" elif values.dtype.kind in np.typecodes["AllFloat"]: tp = "real" else: raise NotImplementedError("type %s for values not implemented" % values.dtype) mxtype = HBMatrixType(tp, "unsymmetric", "assembled") else: raise ValueError("mxtype argument not handled yet.") def _nlines(fmt, size): nlines = size // fmt.repeat if nlines * fmt.repeat != size: nlines += 1 return nlines pointer_nlines = _nlines(pointer_fmt, pointer.size) indices_nlines = _nlines(indices_fmt, indices.size) values_nlines = _nlines(values_fmt, values.size) total_nlines = pointer_nlines + indices_nlines + values_nlines return cls(title, key, total_nlines, pointer_nlines, indices_nlines, values_nlines, mxtype, nrows, ncols, nnon_zeros, pointer_fmt.fortran_format, indices_fmt.fortran_format, values_fmt.fortran_format) @classmethod def from_file(cls, fid): """Create a HBInfo instance from a file object containg a matrix in the HB format. Parameters ---------- fid : file-like matrix File or file-like object containing a matrix in the HB format. Returns ------- hb_info : HBInfo instance """ # First line line = fid.readline().strip("\n") if not len(line) > 72: raise ValueError("Expected at least 72 characters for first line, " "got: \n%s" % line) title = line[:72] key = line[72:] # Second line line = fid.readline().strip("\n") if not len(line.rstrip()) >= 56: raise ValueError("Expected at least 56 characters for second line, " "got: \n%s" % line) total_nlines = _expect_int(line[:14]) pointer_nlines = _expect_int(line[14:28]) indices_nlines = _expect_int(line[28:42]) values_nlines = _expect_int(line[42:56]) rhs_nlines = line[56:72].strip() if rhs_nlines == '': rhs_nlines = 0 else: rhs_nlines = _expect_int(rhs_nlines) if not rhs_nlines == 0: raise ValueError("Only files without right hand side supported for " "now.") # Third line line = fid.readline().strip("\n") if not len(line) >= 70: raise ValueError("Expected at least 72 character for third line, got:\n" "%s" % line) mxtype_s = line[:3].upper() if not len(mxtype_s) == 3: raise ValueError("mxtype expected to be 3 characters long") mxtype = HBMatrixType.from_fortran(mxtype_s) if mxtype.value_type not in ["real", "integer"]: raise ValueError("Only real or integer matrices supported for " "now (detected %s)" % mxtype) if not mxtype.structure == "unsymmetric": raise ValueError("Only unsymmetric matrices supported for " "now (detected %s)" % mxtype) if not mxtype.storage == "assembled": raise ValueError("Only assembled matrices supported for now") if not line[3:14] == " " * 11: raise ValueError("Malformed data for third line: %s" % line) nrows = _expect_int(line[14:28]) ncols = _expect_int(line[28:42]) nnon_zeros = _expect_int(line[42:56]) nelementals = _expect_int(line[56:70]) if not nelementals == 0: raise ValueError("Unexpected value %d for nltvl (last entry of line 3)" % nelementals) # Fourth line line = fid.readline().strip("\n") ct = line.split() if not len(ct) == 3: raise ValueError("Expected 3 formats, got %s" % ct) return cls(title, key, total_nlines, pointer_nlines, indices_nlines, values_nlines, mxtype, nrows, ncols, nnon_zeros, ct[0], ct[1], ct[2], rhs_nlines, nelementals) def __init__(self, title, key, total_nlines, pointer_nlines, indices_nlines, values_nlines, mxtype, nrows, ncols, nnon_zeros, pointer_format_str, indices_format_str, values_format_str, right_hand_sides_nlines=0, nelementals=0): """Do not use this directly, but the class ctrs (from_* functions).""" self.title = title self.key = key if title is None: title = "No Title" if len(title) > 72: raise ValueError("title cannot be > 72 characters") if key is None: key = "|No Key" if len(key) > 8: warnings.warn("key is > 8 characters (key is %s)" % key, LineOverflow) self.total_nlines = total_nlines self.pointer_nlines = pointer_nlines self.indices_nlines = indices_nlines self.values_nlines = values_nlines parser = FortranFormatParser() pointer_format = parser.parse(pointer_format_str) if not isinstance(pointer_format, IntFormat): raise ValueError("Expected int format for pointer format, got %s" % pointer_format) indices_format = parser.parse(indices_format_str) if not isinstance(indices_format, IntFormat): raise ValueError("Expected int format for indices format, got %s" % indices_format) values_format = parser.parse(values_format_str) if isinstance(values_format, ExpFormat): if mxtype.value_type not in ["real", "complex"]: raise ValueError("Inconsistency between matrix type %s and " "value type %s" % (mxtype, values_format)) values_dtype = np.float64 elif isinstance(values_format, IntFormat): if mxtype.value_type not in ["integer"]: raise ValueError("Inconsistency between matrix type %s and " "value type %s" % (mxtype, values_format)) # XXX: fortran int -> dtype association ? values_dtype = np.int else: raise ValueError("Unsupported format for values %r" % (values_format,)) self.pointer_format = pointer_format self.indices_format = indices_format self.values_format = values_format self.pointer_dtype = np.int32 self.indices_dtype = np.int32 self.values_dtype = values_dtype self.pointer_nlines = pointer_nlines self.pointer_nbytes_full = _nbytes_full(pointer_format, pointer_nlines) self.indices_nlines = indices_nlines self.indices_nbytes_full = _nbytes_full(indices_format, indices_nlines) self.values_nlines = values_nlines self.values_nbytes_full = _nbytes_full(values_format, values_nlines) self.nrows = nrows self.ncols = ncols self.nnon_zeros = nnon_zeros self.nelementals = nelementals self.mxtype = mxtype def dump(self): """Gives the header corresponding to this instance as a string.""" header = [self.title.ljust(72) + self.key.ljust(8)] header.append("%14d%14d%14d%14d" % (self.total_nlines, self.pointer_nlines, self.indices_nlines, self.values_nlines)) header.append("%14s%14d%14d%14d%14d" % (self.mxtype.fortran_format.ljust(14), self.nrows, self.ncols, self.nnon_zeros, 0)) pffmt = self.pointer_format.fortran_format iffmt = self.indices_format.fortran_format vffmt = self.values_format.fortran_format header.append("%16s%16s%20s" % (pffmt.ljust(16), iffmt.ljust(16), vffmt.ljust(20))) return "\n".join(header) def _expect_int(value, msg=None): try: return int(value) except ValueError: if msg is None: msg = "Expected an int, got %s" raise ValueError(msg % value) def _read_hb_data(content, header): # XXX: look at a way to reduce memory here (big string creation) ptr_string = "".join([content.read(header.pointer_nbytes_full), content.readline()]) ptr = np.fromstring(ptr_string, dtype=np.int, sep=' ') ind_string = "".join([content.read(header.indices_nbytes_full), content.readline()]) ind = np.fromstring(ind_string, dtype=np.int, sep=' ') val_string = "".join([content.read(header.values_nbytes_full), content.readline()]) val = np.fromstring(val_string, dtype=header.values_dtype, sep=' ') try: return csc_matrix((val, ind-1, ptr-1), shape=(header.nrows, header.ncols)) except ValueError as e: raise e def _write_data(m, fid, header): def write_array(f, ar, nlines, fmt): # ar_nlines is the number of full lines, n is the number of items per # line, ffmt the fortran format pyfmt = fmt.python_format pyfmt_full = pyfmt * fmt.repeat # for each array to write, we first write the full lines, and special # case for partial line full = ar[:(nlines - 1) * fmt.repeat] for row in full.reshape((nlines-1, fmt.repeat)): f.write(pyfmt_full % tuple(row) + "\n") nremain = ar.size - full.size if nremain > 0: f.write((pyfmt * nremain) % tuple(ar[ar.size - nremain:]) + "\n") fid.write(header.dump()) fid.write("\n") # +1 is for fortran one-based indexing write_array(fid, m.indptr+1, header.pointer_nlines, header.pointer_format) write_array(fid, m.indices+1, header.indices_nlines, header.indices_format) write_array(fid, m.data, header.values_nlines, header.values_format) class HBMatrixType(object): """Class to hold the matrix type.""" # q2f* translates qualified names to fortran character _q2f_type = { "real": "R", "complex": "C", "pattern": "P", "integer": "I", } _q2f_structure = { "symmetric": "S", "unsymmetric": "U", "hermitian": "H", "skewsymmetric": "Z", "rectangular": "R" } _q2f_storage = { "assembled": "A", "elemental": "E", } _f2q_type = dict([(j, i) for i, j in _q2f_type.items()]) _f2q_structure = dict([(j, i) for i, j in _q2f_structure.items()]) _f2q_storage = dict([(j, i) for i, j in _q2f_storage.items()]) @classmethod def from_fortran(cls, fmt): if not len(fmt) == 3: raise ValueError("Fortran format for matrix type should be 3 " "characters long") try: value_type = cls._f2q_type[fmt[0]] structure = cls._f2q_structure[fmt[1]] storage = cls._f2q_storage[fmt[2]] return cls(value_type, structure, storage) except KeyError: raise ValueError("Unrecognized format %s" % fmt) def __init__(self, value_type, structure, storage="assembled"): self.value_type = value_type self.structure = structure self.storage = storage if value_type not in self._q2f_type: raise ValueError("Unrecognized type %s" % value_type) if structure not in self._q2f_structure: raise ValueError("Unrecognized structure %s" % structure) if storage not in self._q2f_storage: raise ValueError("Unrecognized storage %s" % storage) @property def fortran_format(self): return self._q2f_type[self.value_type] + \ self._q2f_structure[self.structure] + \ self._q2f_storage[self.storage] def __repr__(self): return "HBMatrixType(%s, %s, %s)" % \ (self.value_type, self.structure, self.storage) class HBFile(object): def __init__(self, file, hb_info=None): """Create a HBFile instance. Parameters ---------- file : file-object StringIO work as well hb_info : HBInfo, optional Should be given as an argument for writing, in which case the file should be writable. """ self._fid = file if hb_info is None: self._hb_info = HBInfo.from_file(file) else: #raise IOError("file %s is not writable, and hb_info " # "was given." % file) self._hb_info = hb_info @property def title(self): return self._hb_info.title @property def key(self): return self._hb_info.key @property def type(self): return self._hb_info.mxtype.value_type @property def structure(self): return self._hb_info.mxtype.structure @property def storage(self): return self._hb_info.mxtype.storage def read_matrix(self): return _read_hb_data(self._fid, self._hb_info) def write_matrix(self, m): return _write_data(m, self._fid, self._hb_info) def hb_read(file): """Read HB-format file. Parameters ---------- file : str-like or file-like If a string-like object, file is the name of the file to read. If a file-like object, the data are read from it. Returns ------- data : scipy.sparse.csc_matrix instance The data read from the HB file as a sparse matrix. Notes ----- At the moment not the full Harwell-Boeing format is supported. Supported features are: - assembled, non-symmetric, real matrices - integer for pointer/indices - exponential format for float values, and int format """ def _get_matrix(fid): hb = HBFile(fid) return hb.read_matrix() if isinstance(file, string_types): fid = open(file) try: return _get_matrix(fid) finally: fid.close() else: return _get_matrix(file) def hb_write(file, m, hb_info=None): """Write HB-format file. Parameters ---------- file : str-like or file-like if a string-like object, file is the name of the file to read. If a file-like object, the data are read from it. m : sparse-matrix the sparse matrix to write hb_info : HBInfo contains the meta-data for write Returns ------- None Notes ----- At the moment not the full Harwell-Boeing format is supported. Supported features are: - assembled, non-symmetric, real matrices - integer for pointer/indices - exponential format for float values, and int format """ if hb_info is None: hb_info = HBInfo.from_data(m) def _set_matrix(fid): hb = HBFile(fid, hb_info) return hb.write_matrix(m) if isinstance(file, string_types): fid = open(file, "w") try: return _set_matrix(fid) finally: fid.close() else: return _set_matrix(file)
bsd-3-clause
-529,123,285,604,996,100
32.664845
92
0.571096
false
danielreed/python-hpOneView
hpOneView/resources/networking/logical_downlinks.py
1
5419
# -*- coding: utf-8 -*- ### # (C) Copyright (2012-2016) Hewlett Packard Enterprise Development LP # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. ### from __future__ import print_function from __future__ import unicode_literals from __future__ import division from __future__ import absolute_import from future import standard_library standard_library.install_aliases() __title__ = 'logical-downlinks' __version__ = '0.0.1' __copyright__ = '(C) Copyright (2012-2016) Hewlett Packard Enterprise ' \ ' Development LP' __license__ = 'MIT' __status__ = 'Development' from hpOneView.resources.resource import ResourceClient class LogicalDownlinks(object): URI = '/rest/logical-downlinks' def __init__(self, con): self._connection = con self._client = ResourceClient(con, self.URI) def get_all(self, start=0, count=-1, filter='', sort=''): """ Gets a paginated collection of logical downlinks. The collection is based on optional sorting and filtering, and constrained by start and count parameters. Args: start: The first item to return, using 0-based indexing. If not specified, the default is 0 - start with the first available item. count: The number of resources to return. A count of -1 requests all the items. The actual number of items in the response may differ from the requested count if the sum of start and count exceed the total number of items. filter: A general filter/query string to narrow the list of items returned. The default is no filter - all resources are returned. sort: The sort order of the returned data set. By default, the sort order is based on create time, with the oldest entry first. Returns: list: A list of logical downlinks. """ return self._client.get_all(start, count, filter=filter, sort=sort) def get(self, id_or_uri): """ Gets a logical downlink by ID or by uri. Args: id_or_uri: Could be either the logical downlink id or the logical downlink uri. Returns: dict: The logical downlink. """ return self._client.get(id_or_uri) def get_by(self, field, value): """ Get all logical downlinks that match the filter. The search is case insensitive. Args: field: Field name to filter. value: Value to filter. Returns: list: A list of logical downlinks. """ return self._client.get_by(field, value) def get_all_without_ethernet(self, start=0, count=-1, filter='', sort=''): """ Gets a paginated collection of logical downlinks without ethernet. The collection is based on optional sorting and filtering, and constrained by start and count parameters. Args: start: The first item to return, using 0-based indexing. If not specified, the default is 0 - start with the first available item. count: The number of resources to return. A count of -1 requests all the items. The actual number of items in the response may differ from the requested count if the sum of start and count exceed the total number of items. filter: A general filter/query string to narrow the list of items returned. The default is no filter - all resources are returned. sort: The sort order of the returned data set. By default, the sort order is based on create time, with the oldest entry first. Returns: dict """ without_ethernet_client = ResourceClient( self._connection, "/rest/logical-downlinks/withoutEthernet") return without_ethernet_client.get_all(start, count, filter=filter, sort=sort) def get_without_ethernet(self, id): """ Gets the logical downlink with the specified ID without ethernet. Args: id: ID of logical downlink. Returns: dict """ uri = "/rest/logical-downlinks/%s/withoutEthernet" % (id) return self._client.get(uri)
mit
6,708,738,407,590,560,000
37.985612
95
0.640709
false
sholloway/Jitterbug
ext/vendor/glm-0.9.2.3/util/gen_external_templates.py
18
5678
__author__ = "eloraiby" __date__ = "$5-Sep-2010 9:35:29 PM$" atomic_types = ["unsigned char", "unsigned short", "unsigned int", "signed char", "signed short", "signed int", "float", "double"] glsl_vector_types = ["tvec2", "tvec3", "tvec4"] glsl_matrix_types = ["tmat2x2", "tmat2x3", "tmat2x4", "tmat3x2", "tmat3x3", "tmat3x4", "tmat4x2", "tmat4x3", "tmat4x4"] glsl_matrix_member_operators = ["+=", "-=", "*=", "/="] glsl_matrix_out_op_dic = { "tmat2x2":"tmat2x2", "tmat2x3":"tmat3x3", "tmat2x4":"tmat4x4", "tmat3x2":"tmat2x2", "tmat3x3":"tmat3x3", "tmat3x4":"tmat4x4", "tmat4x2":"tmat2x2", "tmat4x3":"tmat3x3", "tmat4x4":"tmat4x4", } glsl_matrix_right_op_dic = { "tmat2x2":"tmat2x2", "tmat2x3":"tmat3x2", "tmat2x4":"tmat4x2", "tmat3x2":"tmat2x3", "tmat3x3":"tmat3x3", "tmat3x4":"tmat4x3", "tmat4x2":"tmat2x4", "tmat4x3":"tmat3x4", "tmat4x4":"tmat4x4", } def gen_vectors(): for v in glsl_vector_types: print print "//" print "// " + v + " type explicit instantiation" print "//" for a in atomic_types: print "template struct " + v + "<" + a + ">;" print def gen_matrices_member_operators(): for m in glsl_matrix_types: print print "//" print "// " + m + " type member operator instantiation" print "//" for a in atomic_types: #print "template " + m + "<" + a + ">::col_type;" #print "template " + m + "<" + a + ">::row_type;" for c in atomic_types: if a != c: print "template " + m + "<" + a + ">::" + m + "(" + m + "<" + c + "> const &m);" """for b in glsl_matrix_member_operators: for cm in atomic_types: print "template " + m + "<" + a + ">& " + m + "<" + a + ">::operator " + b + "( " + m + "<" + cm + "> const &m);" print "template " + m + "<" + a + ">& " + m + "<" + a + ">::operator " + b + "( " + cm + " const &s);" """ print print "//" print "// Binary operators" print "//" print "template " + m + "<" + a + "> operator + (" + m + "<" + a + "> const &m, " + a + " const &s);" if m == "tmat2x2" or m == "tmat3x3" or m == "tmat4x4": print "template " + m + "<" + a + "> operator + (" + a + " const &s, " + m + "<" + a + "> const &m);" print "template " + m + "<" + a + "> operator + (" + m + "<" + a + "> const &m1, " + m + "<" + a + "> const &m2);" print "template " + m + "<" + a + "> operator - (" + m + "<" + a + "> const &m, " + a + " const &s);" if m == "tmat2x2" or m == "tmat3x3" or m == "tmat4x4": print "template " + m + "<" + a + "> operator - (" + a + " const &s, " + m + "<" + a + "> const &m);" print "template " + m + "<" + a + "> operator - (" + m + "<" + a + "> const &m1, " + m + "<" + a + "> const &m2);" out_op = glsl_matrix_out_op_dic[m] right_op = glsl_matrix_right_op_dic[m] print "template " + m + "<" + a + "> operator * (" + m + "<" + a + "> const &m, " + a + " const &s);" if m == "tmat2x2" or m == "tmat3x3" or m == "tmat4x4": print "template " + m + "<" + a + "> operator * ( " + a + " const &s, " + m + "<" + a + "> const &m);" print "template " + out_op + "<" + a + "> operator * (" + m + "<" + a + "> const &m1, " + right_op + "<" + a + "> const &m2);" print "template " + m + "<" + a + ">::col_type" + " operator * ( " + m + "<" + a + "> const &m, " + m + "<" + a + ">::row_type" + " const &s);" print "template " + m + "<" + a + ">::row_type" + " operator * ( " + m + "<" + a + ">::col_type const &s, " + m + "<" + a + "> const &m);" print "template " + m + "<" + a + "> operator / (" + m + "<" + a + "> const &m, " + a + " const &s);" #print "template " + right_op + "<" + a + "> operator / ( " + a + " const &s, " + m + "<" + a + "> const &m);" if m == "tmat2x2" or m == "tmat3x3" or m == "tmat4x4": print "template " + m + "<" + a + "> operator / ( " + a + " const &s, " + m + "<" + a + "> const &m);" #print "template " + m + "<" + a + "> operator / (" + m + "<" + a + "> const &m1, " + m + "<" + a + "> const &m2);" else: print "template " + m + "<" + a + "> operator / ( " + a + " const &s, " + m + "<" + a + "> const &m);" #print "template " + m + "<" + a + ">" + " operator / ( " + m + "<" + a + "> const &m, " + a + " const &s);" #print "template " + m + "<" + a + ">" + " operator / ( " + a + " const &s, " + m + "<" + a + "> const &m);" print print "//" print "// Unary constant operators" print "//" print "template " + m + "<" + a + "> const operator -(" + m + "<" + a + "> const &m);" print "template " + m + "<" + a + "> const operator --(" + m + "<" + a + "> const &m, int);" print "template " + m + "<" + a + "> const operator ++(" + m + "<" + a + "> const &m, int);" print def gen_matrices(): for m in glsl_matrix_types: print print "//" print "// " + m + " type explicit instantiation" print "//" for a in atomic_types: print "template struct " + m + "<" + a + ">;" print if __name__ == "__main__": print "//" print "// GLM External templates generator script version 0.1 for GLM core" print "//" print "// atomic types:", atomic_types print "// GLSL vector types:", glsl_vector_types; print "// GLSL matrix types:", glsl_matrix_types; print "//" print print "#include <glm/glm.hpp>" print print "namespace glm {" print "namespace detail {" gen_vectors() gen_matrices() gen_matrices_member_operators() print "} // namespace detail" print "} // namespace glm"
mit
-8,502,481,359,433,495,000
36.62585
146
0.451567
false
googleapis/python-speech
setup.py
1
2804
# Copyright 2018 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import io import os import setuptools # Package metadata. name = "google-cloud-speech" description = "Google Cloud Speech API client library" version = "2.5.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' # 'Development Status :: 5 - Production/Stable' release_status = "Development Status :: 5 - Production/Stable" dependencies = [ "google-api-core[grpc] >= 1.26.0, < 2.0.0dev", "libcst >= 0.2.5", "proto-plus >= 1.4.0", "packaging >= 14.3", ] extras = {} # Setup boilerplate below this line. package_root = os.path.abspath(os.path.dirname(__file__)) readme_filename = os.path.join(package_root, "README.rst") with io.open(readme_filename, encoding="utf-8") as readme_file: readme = readme_file.read() # Only include packages under the 'google' namespace. Do not include tests, # benchmarks, etc. packages = [ package for package in setuptools.PEP420PackageFinder.find() if package.startswith("google") ] # Determine which namespaces are needed. namespaces = ["google"] if "google.cloud" in packages: namespaces.append("google.cloud") setuptools.setup( name=name, version=version, description=description, long_description=readme, author="Google LLC", author_email="[email protected]", license="Apache 2.0", url="https://github.com/googleapis/python-speech", classifiers=[ release_status, "Intended Audience :: Developers", "License :: OSI Approved :: Apache Software License", "Programming Language :: Python", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Operating System :: OS Independent", "Topic :: Internet", ], platforms="Posix; MacOS X; Windows", packages=packages, namespace_packages=namespaces, install_requires=dependencies, extras_require=extras, python_requires=">=3.6", scripts=[ "scripts/fixup_speech_v1_keywords.py", "scripts/fixup_speech_v1p1beta1_keywords.py", ], include_package_data=True, zip_safe=False, )
apache-2.0
-668,839,975,031,349,000
28.515789
75
0.67903
false
asm-products/movie-database-service
ani/lib/python2.7/site-packages/django/db/models/sql/query.py
13
84272
""" Create SQL statements for QuerySets. The code in here encapsulates all of the SQL construction so that QuerySets themselves do not have to (and could be backed by things other than SQL databases). The abstraction barrier only works one way: this module has to know all about the internals of models in order to get the information it needs. """ import copy from django.utils.datastructures import SortedDict from django.utils.encoding import force_text from django.utils.tree import Node from django.utils import six from django.db import connections, DEFAULT_DB_ALIAS from django.db.models.constants import LOOKUP_SEP from django.db.models.aggregates import refs_aggregate from django.db.models.expressions import ExpressionNode from django.db.models.fields import FieldDoesNotExist from django.db.models.related import PathInfo from django.db.models.sql import aggregates as base_aggregates_module from django.db.models.sql.constants import (QUERY_TERMS, ORDER_DIR, SINGLE, ORDER_PATTERN, JoinInfo, SelectInfo) from django.db.models.sql.datastructures import EmptyResultSet, Empty, MultiJoin from django.db.models.sql.expressions import SQLEvaluator from django.db.models.sql.where import (WhereNode, Constraint, EverythingNode, ExtraWhere, AND, OR, EmptyWhere) from django.core.exceptions import FieldError __all__ = ['Query', 'RawQuery'] class RawQuery(object): """ A single raw SQL query """ def __init__(self, sql, using, params=None): self.params = params or () self.sql = sql self.using = using self.cursor = None # Mirror some properties of a normal query so that # the compiler can be used to process results. self.low_mark, self.high_mark = 0, None # Used for offset/limit self.extra_select = {} self.aggregate_select = {} def clone(self, using): return RawQuery(self.sql, using, params=self.params) def convert_values(self, value, field, connection): """Convert the database-returned value into a type that is consistent across database backends. By default, this defers to the underlying backend operations, but it can be overridden by Query classes for specific backends. """ return connection.ops.convert_values(value, field) def get_columns(self): if self.cursor is None: self._execute_query() converter = connections[self.using].introspection.table_name_converter return [converter(column_meta[0]) for column_meta in self.cursor.description] def __iter__(self): # Always execute a new query for a new iterator. # This could be optimized with a cache at the expense of RAM. self._execute_query() if not connections[self.using].features.can_use_chunked_reads: # If the database can't use chunked reads we need to make sure we # evaluate the entire query up front. result = list(self.cursor) else: result = self.cursor return iter(result) def __repr__(self): return "<RawQuery: %r>" % (self.sql % tuple(self.params)) def _execute_query(self): self.cursor = connections[self.using].cursor() self.cursor.execute(self.sql, self.params) class Query(object): """ A single SQL query. """ # SQL join types. These are part of the class because their string forms # vary from database to database and can be customised by a subclass. INNER = 'INNER JOIN' LOUTER = 'LEFT OUTER JOIN' alias_prefix = 'T' query_terms = QUERY_TERMS aggregates_module = base_aggregates_module compiler = 'SQLCompiler' def __init__(self, model, where=WhereNode): self.model = model self.alias_refcount = {} # alias_map is the most important data structure regarding joins. # It's used for recording which joins exist in the query and what # type they are. The key is the alias of the joined table (possibly # the table name) and the value is JoinInfo from constants.py. self.alias_map = {} self.table_map = {} # Maps table names to list of aliases. self.join_map = {} self.default_cols = True self.default_ordering = True self.standard_ordering = True self.used_aliases = set() self.filter_is_sticky = False self.included_inherited_models = {} # SQL-related attributes # Select and related select clauses as SelectInfo instances. # The select is used for cases where we want to set up the select # clause to contain other than default fields (values(), annotate(), # subqueries...) self.select = [] # The related_select_cols is used for columns needed for # select_related - this is populated in compile stage. self.related_select_cols = [] self.tables = [] # Aliases in the order they are created. self.where = where() self.where_class = where self.group_by = None self.having = where() self.order_by = [] self.low_mark, self.high_mark = 0, None # Used for offset/limit self.distinct = False self.distinct_fields = [] self.select_for_update = False self.select_for_update_nowait = False self.select_related = False # SQL aggregate-related attributes self.aggregates = SortedDict() # Maps alias -> SQL aggregate function self.aggregate_select_mask = None self._aggregate_select_cache = None # Arbitrary maximum limit for select_related. Prevents infinite # recursion. Can be changed by the depth parameter to select_related(). self.max_depth = 5 # These are for extensions. The contents are more or less appended # verbatim to the appropriate clause. self.extra = SortedDict() # Maps col_alias -> (col_sql, params). self.extra_select_mask = None self._extra_select_cache = None self.extra_tables = () self.extra_order_by = () # A tuple that is a set of model field names and either True, if these # are the fields to defer, or False if these are the only fields to # load. self.deferred_loading = (set(), True) def __str__(self): """ Returns the query as a string of SQL with the parameter values substituted in (use sql_with_params() to see the unsubstituted string). Parameter values won't necessarily be quoted correctly, since that is done by the database interface at execution time. """ sql, params = self.sql_with_params() return sql % params def sql_with_params(self): """ Returns the query as an SQL string and the parameters that will be subsituted into the query. """ return self.get_compiler(DEFAULT_DB_ALIAS).as_sql() def __deepcopy__(self, memo): result = self.clone(memo=memo) memo[id(self)] = result return result def prepare(self): return self def get_compiler(self, using=None, connection=None): if using is None and connection is None: raise ValueError("Need either using or connection") if using: connection = connections[using] # Check that the compiler will be able to execute the query for alias, aggregate in self.aggregate_select.items(): connection.ops.check_aggregate_support(aggregate) return connection.ops.compiler(self.compiler)(self, connection, using) def get_meta(self): """ Returns the Options instance (the model._meta) from which to start processing. Normally, this is self.model._meta, but it can be changed by subclasses. """ return self.model._meta def clone(self, klass=None, memo=None, **kwargs): """ Creates a copy of the current instance. The 'kwargs' parameter can be used by clients to update attributes after copying has taken place. """ obj = Empty() obj.__class__ = klass or self.__class__ obj.model = self.model obj.alias_refcount = self.alias_refcount.copy() obj.alias_map = self.alias_map.copy() obj.table_map = self.table_map.copy() obj.join_map = self.join_map.copy() obj.default_cols = self.default_cols obj.default_ordering = self.default_ordering obj.standard_ordering = self.standard_ordering obj.included_inherited_models = self.included_inherited_models.copy() obj.select = self.select[:] obj.related_select_cols = [] obj.tables = self.tables[:] obj.where = self.where.clone() obj.where_class = self.where_class if self.group_by is None: obj.group_by = None else: obj.group_by = self.group_by[:] obj.having = self.having.clone() obj.order_by = self.order_by[:] obj.low_mark, obj.high_mark = self.low_mark, self.high_mark obj.distinct = self.distinct obj.distinct_fields = self.distinct_fields[:] obj.select_for_update = self.select_for_update obj.select_for_update_nowait = self.select_for_update_nowait obj.select_related = self.select_related obj.related_select_cols = [] obj.aggregates = self.aggregates.copy() if self.aggregate_select_mask is None: obj.aggregate_select_mask = None else: obj.aggregate_select_mask = self.aggregate_select_mask.copy() # _aggregate_select_cache cannot be copied, as doing so breaks the # (necessary) state in which both aggregates and # _aggregate_select_cache point to the same underlying objects. # It will get re-populated in the cloned queryset the next time it's # used. obj._aggregate_select_cache = None obj.max_depth = self.max_depth obj.extra = self.extra.copy() if self.extra_select_mask is None: obj.extra_select_mask = None else: obj.extra_select_mask = self.extra_select_mask.copy() if self._extra_select_cache is None: obj._extra_select_cache = None else: obj._extra_select_cache = self._extra_select_cache.copy() obj.extra_tables = self.extra_tables obj.extra_order_by = self.extra_order_by obj.deferred_loading = copy.copy(self.deferred_loading[0]), self.deferred_loading[1] if self.filter_is_sticky and self.used_aliases: obj.used_aliases = self.used_aliases.copy() else: obj.used_aliases = set() obj.filter_is_sticky = False obj.__dict__.update(kwargs) if hasattr(obj, '_setup_query'): obj._setup_query() return obj def convert_values(self, value, field, connection): """Convert the database-returned value into a type that is consistent across database backends. By default, this defers to the underlying backend operations, but it can be overridden by Query classes for specific backends. """ return connection.ops.convert_values(value, field) def resolve_aggregate(self, value, aggregate, connection): """Resolve the value of aggregates returned by the database to consistent (and reasonable) types. This is required because of the predisposition of certain backends to return Decimal and long types when they are not needed. """ if value is None: if aggregate.is_ordinal: return 0 # Return None as-is return value elif aggregate.is_ordinal: # Any ordinal aggregate (e.g., count) returns an int return int(value) elif aggregate.is_computed: # Any computed aggregate (e.g., avg) returns a float return float(value) else: # Return value depends on the type of the field being processed. return self.convert_values(value, aggregate.field, connection) def get_aggregation(self, using): """ Returns the dictionary with the values of the existing aggregations. """ if not self.aggregate_select: return {} # If there is a group by clause, aggregating does not add useful # information but retrieves only the first row. Aggregate # over the subquery instead. if self.group_by is not None: from django.db.models.sql.subqueries import AggregateQuery query = AggregateQuery(self.model) obj = self.clone() # Remove any aggregates marked for reduction from the subquery # and move them to the outer AggregateQuery. for alias, aggregate in self.aggregate_select.items(): if aggregate.is_summary: query.aggregate_select[alias] = aggregate del obj.aggregate_select[alias] try: query.add_subquery(obj, using) except EmptyResultSet: return dict( (alias, None) for alias in query.aggregate_select ) else: query = self self.select = [] self.default_cols = False self.extra = {} self.remove_inherited_models() query.clear_ordering(True) query.clear_limits() query.select_for_update = False query.select_related = False query.related_select_cols = [] result = query.get_compiler(using).execute_sql(SINGLE) if result is None: result = [None for q in query.aggregate_select.items()] return dict([ (alias, self.resolve_aggregate(val, aggregate, connection=connections[using])) for (alias, aggregate), val in zip(query.aggregate_select.items(), result) ]) def get_count(self, using): """ Performs a COUNT() query using the current filter constraints. """ obj = self.clone() if len(self.select) > 1 or self.aggregate_select or (self.distinct and self.distinct_fields): # If a select clause exists, then the query has already started to # specify the columns that are to be returned. # In this case, we need to use a subquery to evaluate the count. from django.db.models.sql.subqueries import AggregateQuery subquery = obj subquery.clear_ordering(True) subquery.clear_limits() obj = AggregateQuery(obj.model) try: obj.add_subquery(subquery, using=using) except EmptyResultSet: # add_subquery evaluates the query, if it's an EmptyResultSet # then there are can be no results, and therefore there the # count is obviously 0 return 0 obj.add_count_column() number = obj.get_aggregation(using=using)[None] # Apply offset and limit constraints manually, since using LIMIT/OFFSET # in SQL (in variants that provide them) doesn't change the COUNT # output. number = max(0, number - self.low_mark) if self.high_mark is not None: number = min(number, self.high_mark - self.low_mark) return number def has_results(self, using): q = self.clone() q.clear_select_clause() q.clear_ordering(True) q.set_limits(high=1) compiler = q.get_compiler(using=using) return compiler.has_results() def combine(self, rhs, connector): """ Merge the 'rhs' query into the current one (with any 'rhs' effects being applied *after* (that is, "to the right of") anything in the current query. 'rhs' is not modified during a call to this function. The 'connector' parameter describes how to connect filters from the 'rhs' query. """ assert self.model == rhs.model, \ "Cannot combine queries on two different base models." assert self.can_filter(), \ "Cannot combine queries once a slice has been taken." assert self.distinct == rhs.distinct, \ "Cannot combine a unique query with a non-unique query." assert self.distinct_fields == rhs.distinct_fields, \ "Cannot combine queries with different distinct fields." self.remove_inherited_models() # Work out how to relabel the rhs aliases, if necessary. change_map = {} conjunction = (connector == AND) # Determine which existing joins can be reused. When combining the # query with AND we must recreate all joins for m2m filters. When # combining with OR we can reuse joins. The reason is that in AND # case a single row can't fulfill a condition like: # revrel__col=1 & revrel__col=2 # But, there might be two different related rows matching this # condition. In OR case a single True is enough, so single row is # enough, too. # # Note that we will be creating duplicate joins for non-m2m joins in # the AND case. The results will be correct but this creates too many # joins. This is something that could be fixed later on. reuse = set() if conjunction else set(self.tables) # Base table must be present in the query - this is the same # table on both sides. self.get_initial_alias() # Now, add the joins from rhs query into the new query (skipping base # table). for alias in rhs.tables[1:]: table, _, join_type, lhs, join_cols, nullable, join_field = rhs.alias_map[alias] promote = (join_type == self.LOUTER) # If the left side of the join was already relabeled, use the # updated alias. lhs = change_map.get(lhs, lhs) new_alias = self.join( (lhs, table, join_cols), reuse=reuse, outer_if_first=not conjunction, nullable=nullable, join_field=join_field) if promote: self.promote_joins([new_alias]) # We can't reuse the same join again in the query. If we have two # distinct joins for the same connection in rhs query, then the # combined query must have two joins, too. reuse.discard(new_alias) change_map[alias] = new_alias if not rhs.alias_refcount[alias]: # The alias was unused in the rhs query. Unref it so that it # will be unused in the new query, too. We have to add and # unref the alias so that join promotion has information of # the join type for the unused alias. self.unref_alias(new_alias) # So that we don't exclude valid results in an OR query combination, # all joins exclusive to either the lhs or the rhs must be converted # to an outer join. RHS joins were already set to outer joins above, # so check which joins were used only in the lhs query. if not conjunction: rhs_used_joins = set(change_map.values()) to_promote = [alias for alias in self.tables if alias not in rhs_used_joins] self.promote_joins(to_promote, True) # Now relabel a copy of the rhs where-clause and add it to the current # one. if rhs.where: w = rhs.where.clone() w.relabel_aliases(change_map) if not self.where: # Since 'self' matches everything, add an explicit "include # everything" where-constraint so that connections between the # where clauses won't exclude valid results. self.where.add(EverythingNode(), AND) elif self.where: # rhs has an empty where clause. w = self.where_class() w.add(EverythingNode(), AND) else: w = self.where_class() self.where.add(w, connector) # Selection columns and extra extensions are those provided by 'rhs'. self.select = [] for col, field in rhs.select: if isinstance(col, (list, tuple)): new_col = change_map.get(col[0], col[0]), col[1] self.select.append(SelectInfo(new_col, field)) else: new_col = col.relabeled_clone(change_map) self.select.append(SelectInfo(new_col, field)) if connector == OR: # It would be nice to be able to handle this, but the queries don't # really make sense (or return consistent value sets). Not worth # the extra complexity when you can write a real query instead. if self.extra and rhs.extra: raise ValueError("When merging querysets using 'or', you " "cannot have extra(select=...) on both sides.") self.extra.update(rhs.extra) extra_select_mask = set() if self.extra_select_mask is not None: extra_select_mask.update(self.extra_select_mask) if rhs.extra_select_mask is not None: extra_select_mask.update(rhs.extra_select_mask) if extra_select_mask: self.set_extra_mask(extra_select_mask) self.extra_tables += rhs.extra_tables # Ordering uses the 'rhs' ordering, unless it has none, in which case # the current ordering is used. self.order_by = rhs.order_by[:] if rhs.order_by else self.order_by self.extra_order_by = rhs.extra_order_by or self.extra_order_by def deferred_to_data(self, target, callback): """ Converts the self.deferred_loading data structure to an alternate data structure, describing the field that *will* be loaded. This is used to compute the columns to select from the database and also by the QuerySet class to work out which fields are being initialised on each model. Models that have all their fields included aren't mentioned in the result, only those that have field restrictions in place. The "target" parameter is the instance that is populated (in place). The "callback" is a function that is called whenever a (model, field) pair need to be added to "target". It accepts three parameters: "target", and the model and list of fields being added for that model. """ field_names, defer = self.deferred_loading if not field_names: return orig_opts = self.get_meta() seen = {} must_include = {orig_opts.concrete_model: set([orig_opts.pk])} for field_name in field_names: parts = field_name.split(LOOKUP_SEP) cur_model = self.model opts = orig_opts for name in parts[:-1]: old_model = cur_model source = opts.get_field_by_name(name)[0] if is_reverse_o2o(source): cur_model = source.model else: cur_model = source.rel.to opts = cur_model._meta # Even if we're "just passing through" this model, we must add # both the current model's pk and the related reference field # (if it's not a reverse relation) to the things we select. if not is_reverse_o2o(source): must_include[old_model].add(source) add_to_dict(must_include, cur_model, opts.pk) field, model, _, _ = opts.get_field_by_name(parts[-1]) if model is None: model = cur_model if not is_reverse_o2o(field): add_to_dict(seen, model, field) if defer: # We need to load all fields for each model, except those that # appear in "seen" (for all models that appear in "seen"). The only # slight complexity here is handling fields that exist on parent # models. workset = {} for model, values in six.iteritems(seen): for field, m in model._meta.get_fields_with_model(): if field in values: continue add_to_dict(workset, m or model, field) for model, values in six.iteritems(must_include): # If we haven't included a model in workset, we don't add the # corresponding must_include fields for that model, since an # empty set means "include all fields". That's why there's no # "else" branch here. if model in workset: workset[model].update(values) for model, values in six.iteritems(workset): callback(target, model, values) else: for model, values in six.iteritems(must_include): if model in seen: seen[model].update(values) else: # As we've passed through this model, but not explicitly # included any fields, we have to make sure it's mentioned # so that only the "must include" fields are pulled in. seen[model] = values # Now ensure that every model in the inheritance chain is mentioned # in the parent list. Again, it must be mentioned to ensure that # only "must include" fields are pulled in. for model in orig_opts.get_parent_list(): if model not in seen: seen[model] = set() for model, values in six.iteritems(seen): callback(target, model, values) def deferred_to_columns_cb(self, target, model, fields): """ Callback used by deferred_to_columns(). The "target" parameter should be a set instance. """ table = model._meta.db_table if table not in target: target[table] = set() for field in fields: target[table].add(field.column) def table_alias(self, table_name, create=False): """ Returns a table alias for the given table_name and whether this is a new alias or not. If 'create' is true, a new alias is always created. Otherwise, the most recently created alias for the table (if one exists) is reused. """ current = self.table_map.get(table_name) if not create and current: alias = current[0] self.alias_refcount[alias] += 1 return alias, False # Create a new alias for this table. if current: alias = '%s%d' % (self.alias_prefix, len(self.alias_map) + 1) current.append(alias) else: # The first occurence of a table uses the table name directly. alias = table_name self.table_map[alias] = [alias] self.alias_refcount[alias] = 1 self.tables.append(alias) return alias, True def ref_alias(self, alias): """ Increases the reference count for this alias. """ self.alias_refcount[alias] += 1 def unref_alias(self, alias, amount=1): """ Decreases the reference count for this alias. """ self.alias_refcount[alias] -= amount def promote_joins(self, aliases, unconditional=False): """ Promotes recursively the join type of given aliases and its children to an outer join. If 'unconditional' is False, the join is only promoted if it is nullable or the parent join is an outer join. Note about join promotion: When promoting any alias, we make sure all joins which start from that alias are promoted, too. When adding a join in join(), we make sure any join added to already existing LOUTER join is generated as LOUTER. This ensures we don't ever have broken join chains which contain first a LOUTER join, then an INNER JOIN, that is this kind of join should never be generated: a LOUTER b INNER c. The reason for avoiding this type of join chain is that the INNER after the LOUTER will effectively remove any effect the LOUTER had. """ aliases = list(aliases) while aliases: alias = aliases.pop(0) if self.alias_map[alias].join_cols[0][1] is None: # This is the base table (first FROM entry) - this table # isn't really joined at all in the query, so we should not # alter its join type. continue parent_alias = self.alias_map[alias].lhs_alias parent_louter = (parent_alias and self.alias_map[parent_alias].join_type == self.LOUTER) already_louter = self.alias_map[alias].join_type == self.LOUTER if ((unconditional or self.alias_map[alias].nullable or parent_louter) and not already_louter): data = self.alias_map[alias]._replace(join_type=self.LOUTER) self.alias_map[alias] = data # Join type of 'alias' changed, so re-examine all aliases that # refer to this one. aliases.extend( join for join in self.alias_map.keys() if (self.alias_map[join].lhs_alias == alias and join not in aliases)) def reset_refcounts(self, to_counts): """ This method will reset reference counts for aliases so that they match the value passed in :param to_counts:. """ for alias, cur_refcount in self.alias_refcount.copy().items(): unref_amount = cur_refcount - to_counts.get(alias, 0) self.unref_alias(alias, unref_amount) def promote_disjunction(self, aliases_before, alias_usage_counts, num_childs): """ This method is to be used for promoting joins in ORed filters. The principle for promotion is: any alias which is used (it is in alias_usage_counts), is not used by every child of the ORed filter, and isn't pre-existing needs to be promoted to LOUTER join. """ for alias, use_count in alias_usage_counts.items(): if use_count < num_childs and alias not in aliases_before: self.promote_joins([alias]) def change_aliases(self, change_map): """ Changes the aliases in change_map (which maps old-alias -> new-alias), relabelling any references to them in select columns and the where clause. """ assert set(change_map.keys()).intersection(set(change_map.values())) == set() def relabel_column(col): if isinstance(col, (list, tuple)): old_alias = col[0] return (change_map.get(old_alias, old_alias), col[1]) else: return col.relabeled_clone(change_map) # 1. Update references in "select" (normal columns plus aliases), # "group by", "where" and "having". self.where.relabel_aliases(change_map) self.having.relabel_aliases(change_map) if self.group_by: self.group_by = [relabel_column(col) for col in self.group_by] self.select = [SelectInfo(relabel_column(s.col), s.field) for s in self.select] self.aggregates = SortedDict( (key, relabel_column(col)) for key, col in self.aggregates.items()) # 2. Rename the alias in the internal table/alias datastructures. for ident, aliases in self.join_map.items(): del self.join_map[ident] aliases = tuple([change_map.get(a, a) for a in aliases]) ident = (change_map.get(ident[0], ident[0]),) + ident[1:] self.join_map[ident] = aliases for old_alias, new_alias in six.iteritems(change_map): alias_data = self.alias_map[old_alias] alias_data = alias_data._replace(rhs_alias=new_alias) self.alias_refcount[new_alias] = self.alias_refcount[old_alias] del self.alias_refcount[old_alias] self.alias_map[new_alias] = alias_data del self.alias_map[old_alias] table_aliases = self.table_map[alias_data.table_name] for pos, alias in enumerate(table_aliases): if alias == old_alias: table_aliases[pos] = new_alias break for pos, alias in enumerate(self.tables): if alias == old_alias: self.tables[pos] = new_alias break for key, alias in self.included_inherited_models.items(): if alias in change_map: self.included_inherited_models[key] = change_map[alias] # 3. Update any joins that refer to the old alias. for alias, data in six.iteritems(self.alias_map): lhs = data.lhs_alias if lhs in change_map: data = data._replace(lhs_alias=change_map[lhs]) self.alias_map[alias] = data # 4. Update the temporary _lookup_joins list if hasattr(self, '_lookup_joins'): self._lookup_joins = [change_map.get(lj, lj) for lj in self._lookup_joins] def bump_prefix(self, exceptions=()): """ Changes the alias prefix to the next letter in the alphabet and relabels all the aliases. Even tables that previously had no alias will get an alias after this call (it's mostly used for nested queries and the outer query will already be using the non-aliased table name). Subclasses who create their own prefix should override this method to produce a similar result (a new prefix and relabelled aliases). The 'exceptions' parameter is a container that holds alias names which should not be changed. """ current = ord(self.alias_prefix) assert current < ord('Z') prefix = chr(current + 1) self.alias_prefix = prefix change_map = SortedDict() for pos, alias in enumerate(self.tables): if alias in exceptions: continue new_alias = '%s%d' % (prefix, pos) change_map[alias] = new_alias self.tables[pos] = new_alias self.change_aliases(change_map) def get_initial_alias(self): """ Returns the first alias for this query, after increasing its reference count. """ if self.tables: alias = self.tables[0] self.ref_alias(alias) else: alias = self.join((None, self.get_meta().db_table, None)) return alias def count_active_tables(self): """ Returns the number of tables in this query with a non-zero reference count. Note that after execution, the reference counts are zeroed, so tables added in compiler will not be seen by this method. """ return len([1 for count in self.alias_refcount.values() if count]) def join(self, connection, reuse=None, outer_if_first=False, nullable=False, join_field=None): """ Returns an alias for the join in 'connection', either reusing an existing alias for that join or creating a new one. 'connection' is a tuple (lhs, table, join_cols) where 'lhs' is either an existing table alias or a table name. 'join_cols' is a tuple of tuples containing columns to join on ((l_id1, r_id1), (l_id2, r_id2)). The join corresponds to the SQL equivalent of:: lhs.l_id1 = table.r_id1 AND lhs.l_id2 = table.r_id2 The 'reuse' parameter can be either None which means all joins (matching the connection) are reusable, or it can be a set containing the aliases that can be reused. If 'outer_if_first' is True and a new join is created, it will have the LOUTER join type. A join is always created as LOUTER if the lhs alias is LOUTER to make sure we do not generate chains like t1 LOUTER t2 INNER t3. If 'nullable' is True, the join can potentially involve NULL values and is a candidate for promotion (to "left outer") when combining querysets. The 'join_field' is the field we are joining along (if any). """ lhs, table, join_cols = connection assert lhs is None or join_field is not None existing = self.join_map.get(connection, ()) if reuse is None: reuse = existing else: reuse = [a for a in existing if a in reuse] for alias in reuse: if join_field and self.alias_map[alias].join_field != join_field: # The join_map doesn't contain join_field (mainly because # fields in Query structs are problematic in pickling), so # check that the existing join is created using the same # join_field used for the under work join. continue self.ref_alias(alias) return alias # No reuse is possible, so we need a new alias. alias, _ = self.table_alias(table, True) if not lhs: # Not all tables need to be joined to anything. No join type # means the later columns are ignored. join_type = None elif outer_if_first or self.alias_map[lhs].join_type == self.LOUTER: # We need to use LOUTER join if asked by outer_if_first or if the # LHS table is left-joined in the query. join_type = self.LOUTER else: join_type = self.INNER join = JoinInfo(table, alias, join_type, lhs, join_cols or ((None, None),), nullable, join_field) self.alias_map[alias] = join if connection in self.join_map: self.join_map[connection] += (alias,) else: self.join_map[connection] = (alias,) return alias def setup_inherited_models(self): """ If the model that is the basis for this QuerySet inherits other models, we need to ensure that those other models have their tables included in the query. We do this as a separate step so that subclasses know which tables are going to be active in the query, without needing to compute all the select columns (this method is called from pre_sql_setup(), whereas column determination is a later part, and side-effect, of as_sql()). """ opts = self.get_meta() root_alias = self.tables[0] seen = {None: root_alias} for field, model in opts.get_fields_with_model(): if model not in seen: self.join_parent_model(opts, model, root_alias, seen) self.included_inherited_models = seen def join_parent_model(self, opts, model, alias, seen): """ Makes sure the given 'model' is joined in the query. If 'model' isn't a parent of 'opts' or if it is None this method is a no-op. The 'alias' is the root alias for starting the join, 'seen' is a dict of model -> alias of existing joins. It must also contain a mapping of None -> some alias. This will be returned in the no-op case. """ if model in seen: return seen[model] chain = opts.get_base_chain(model) if chain is None: return alias curr_opts = opts for int_model in chain: if int_model in seen: return seen[int_model] # Proxy model have elements in base chain # with no parents, assign the new options # object and skip to the next base in that # case if not curr_opts.parents[int_model]: curr_opts = int_model._meta continue link_field = curr_opts.get_ancestor_link(int_model) _, _, _, joins, _ = self.setup_joins( [link_field.name], curr_opts, alias) curr_opts = int_model._meta alias = seen[int_model] = joins[-1] return alias or seen[None] def remove_inherited_models(self): """ Undoes the effects of setup_inherited_models(). Should be called whenever select columns (self.select) are set explicitly. """ for key, alias in self.included_inherited_models.items(): if key: self.unref_alias(alias) self.included_inherited_models = {} def add_aggregate(self, aggregate, model, alias, is_summary): """ Adds a single aggregate expression to the Query """ opts = model._meta field_list = aggregate.lookup.split(LOOKUP_SEP) if len(field_list) == 1 and aggregate.lookup in self.aggregates: # Aggregate is over an annotation field_name = field_list[0] col = field_name source = self.aggregates[field_name] if not is_summary: raise FieldError("Cannot compute %s('%s'): '%s' is an aggregate" % ( aggregate.name, field_name, field_name)) elif ((len(field_list) > 1) or (field_list[0] not in [i.name for i in opts.fields]) or self.group_by is None or not is_summary): # If: # - the field descriptor has more than one part (foo__bar), or # - the field descriptor is referencing an m2m/m2o field, or # - this is a reference to a model field (possibly inherited), or # - this is an annotation over a model field # then we need to explore the joins that are required. field, sources, opts, join_list, path = self.setup_joins( field_list, opts, self.get_initial_alias()) # Process the join chain to see if it can be trimmed targets, _, join_list = self.trim_joins(sources, join_list, path) # If the aggregate references a model or field that requires a join, # those joins must be LEFT OUTER - empty join rows must be returned # in order for zeros to be returned for those aggregates. self.promote_joins(join_list, True) col = targets[0].column source = sources[0] col = (join_list[-1], col) else: # The simplest cases. No joins required - # just reference the provided column alias. field_name = field_list[0] source = opts.get_field(field_name) col = field_name # Add the aggregate to the query aggregate.add_to_query(self, alias, col=col, source=source, is_summary=is_summary) def build_filter(self, filter_expr, branch_negated=False, current_negated=False, can_reuse=None): """ Builds a WhereNode for a single filter clause, but doesn't add it to this Query. Query.add_q() will then add this filter to the where or having Node. The 'branch_negated' tells us if the current branch contains any negations. This will be used to determine if subqueries are needed. The 'current_negated' is used to determine if the current filter is negated or not and this will be used to determine if IS NULL filtering is needed. The difference between current_netageted and branch_negated is that branch_negated is set on first negation, but current_negated is flipped for each negation. Note that add_filter will not do any negating itself, that is done upper in the code by add_q(). The 'can_reuse' is a set of reusable joins for multijoins. The method will create a filter clause that can be added to the current query. However, if the filter isn't added to the query then the caller is responsible for unreffing the joins used. """ arg, value = filter_expr parts = arg.split(LOOKUP_SEP) if not parts: raise FieldError("Cannot parse keyword query %r" % arg) # Work out the lookup type and remove it from the end of 'parts', # if necessary. lookup_type = 'exact' # Default lookup type num_parts = len(parts) if (len(parts) > 1 and parts[-1] in self.query_terms and arg not in self.aggregates): # Traverse the lookup query to distinguish related fields from # lookup types. lookup_model = self.model for counter, field_name in enumerate(parts): try: lookup_field = lookup_model._meta.get_field(field_name) except FieldDoesNotExist: # Not a field. Bail out. lookup_type = parts.pop() break # Unless we're at the end of the list of lookups, let's attempt # to continue traversing relations. if (counter + 1) < num_parts: try: lookup_model = lookup_field.rel.to except AttributeError: # Not a related field. Bail out. lookup_type = parts.pop() break clause = self.where_class() # Interpret '__exact=None' as the sql 'is NULL'; otherwise, reject all # uses of None as a query value. if value is None: if lookup_type != 'exact': raise ValueError("Cannot use None as a query value") lookup_type = 'isnull' value = True elif callable(value): value = value() elif isinstance(value, ExpressionNode): # If value is a query expression, evaluate it value = SQLEvaluator(value, self, reuse=can_reuse) # For Oracle '' is equivalent to null. The check needs to be done # at this stage because join promotion can't be done at compiler # stage. Using DEFAULT_DB_ALIAS isn't nice, but it is the best we # can do here. Similar thing is done in is_nullable(), too. if (connections[DEFAULT_DB_ALIAS].features.interprets_empty_strings_as_nulls and lookup_type == 'exact' and value == ''): value = True lookup_type = 'isnull' for alias, aggregate in self.aggregates.items(): if alias in (parts[0], LOOKUP_SEP.join(parts)): clause.add((aggregate, lookup_type, value), AND) return clause opts = self.get_meta() alias = self.get_initial_alias() allow_many = not branch_negated try: field, sources, opts, join_list, path = self.setup_joins( parts, opts, alias, can_reuse, allow_many, allow_explicit_fk=True) if can_reuse is not None: can_reuse.update(join_list) # split_exclude() needs to know which joins were generated for the # lookup parts self._lookup_joins = join_list except MultiJoin as e: return self.split_exclude(filter_expr, LOOKUP_SEP.join(parts[:e.level]), can_reuse, e.names_with_path) if (lookup_type == 'isnull' and value is True and not current_negated and len(join_list) > 1): # If the comparison is against NULL, we may need to use some left # outer joins when creating the join chain. This is only done when # needed, as it's less efficient at the database level. self.promote_joins(join_list) # Process the join list to see if we can remove any inner joins from # the far end (fewer tables in a query is better). Note that join # promotion must happen before join trimming to have the join type # information available when reusing joins. targets, alias, join_list = self.trim_joins(sources, join_list, path) if hasattr(field, 'get_lookup_constraint'): constraint = field.get_lookup_constraint(self.where_class, alias, targets, sources, lookup_type, value) else: constraint = (Constraint(alias, targets[0].column, field), lookup_type, value) clause.add(constraint, AND) if current_negated and (lookup_type != 'isnull' or value is False): self.promote_joins(join_list) if (lookup_type != 'isnull' and ( self.is_nullable(targets[0]) or self.alias_map[join_list[-1]].join_type == self.LOUTER)): # The condition added here will be SQL like this: # NOT (col IS NOT NULL), where the first NOT is added in # upper layers of code. The reason for addition is that if col # is null, then col != someval will result in SQL "unknown" # which isn't the same as in Python. The Python None handling # is wanted, and it can be gotten by # (col IS NULL OR col != someval) # <=> # NOT (col IS NOT NULL AND col = someval). clause.add((Constraint(alias, targets[0].column, None), 'isnull', False), AND) return clause def add_filter(self, filter_clause): self.where.add(self.build_filter(filter_clause), 'AND') def need_having(self, obj): """ Returns whether or not all elements of this q_object need to be put together in the HAVING clause. """ if not isinstance(obj, Node): return (refs_aggregate(obj[0].split(LOOKUP_SEP), self.aggregates) or (hasattr(obj[1], 'contains_aggregate') and obj[1].contains_aggregate(self.aggregates))) return any(self.need_having(c) for c in obj.children) def split_having_parts(self, q_object, negated=False): """ Returns a list of q_objects which need to go into the having clause instead of the where clause. Removes the splitted out nodes from the given q_object. Note that the q_object is altered, so cloning it is needed. """ having_parts = [] for c in q_object.children[:]: # When constucting the having nodes we need to take care to # preserve the negation status from the upper parts of the tree if isinstance(c, Node): # For each negated child, flip the in_negated flag. in_negated = c.negated ^ negated if c.connector == OR and self.need_having(c): # A subtree starting from OR clause must go into having in # whole if any part of that tree references an aggregate. q_object.children.remove(c) having_parts.append(c) c.negated = in_negated else: having_parts.extend( self.split_having_parts(c, in_negated)[1]) elif self.need_having(c): q_object.children.remove(c) new_q = self.where_class(children=[c], negated=negated) having_parts.append(new_q) return q_object, having_parts def add_q(self, q_object): """ A preprocessor for the internal _add_q(). Responsible for splitting the given q_object into where and having parts and setting up some internal variables. """ if not self.need_having(q_object): where_part, having_parts = q_object, [] else: where_part, having_parts = self.split_having_parts( q_object.clone(), q_object.negated) used_aliases = self.used_aliases clause = self._add_q(where_part, used_aliases) self.where.add(clause, AND) for hp in having_parts: clause = self._add_q(hp, used_aliases) self.having.add(clause, AND) if self.filter_is_sticky: self.used_aliases = used_aliases def _add_q(self, q_object, used_aliases, branch_negated=False, current_negated=False): """ Adds a Q-object to the current filter. """ connector = q_object.connector current_negated = current_negated ^ q_object.negated branch_negated = branch_negated or q_object.negated target_clause = self.where_class(connector=connector, negated=q_object.negated) # Treat case NOT (a AND b) like case ((NOT a) OR (NOT b)) for join # promotion. See ticket #21748. effective_connector = connector if current_negated: effective_connector = OR if effective_connector == AND else AND if effective_connector == OR: alias_usage_counts = dict() aliases_before = set(self.tables) for child in q_object.children: if effective_connector == OR: refcounts_before = self.alias_refcount.copy() if isinstance(child, Node): child_clause = self._add_q( child, used_aliases, branch_negated, current_negated) else: child_clause = self.build_filter( child, can_reuse=used_aliases, branch_negated=branch_negated, current_negated=current_negated) target_clause.add(child_clause, connector) if effective_connector == OR: used = alias_diff(refcounts_before, self.alias_refcount) for alias in used: alias_usage_counts[alias] = alias_usage_counts.get(alias, 0) + 1 if effective_connector == OR: self.promote_disjunction(aliases_before, alias_usage_counts, len(q_object.children)) return target_clause def names_to_path(self, names, opts, allow_many, allow_explicit_fk): """ Walks the names path and turns them PathInfo tuples. Note that a single name in 'names' can generate multiple PathInfos (m2m for example). 'names' is the path of names to travle, 'opts' is the model Options we start the name resolving from, 'allow_many' and 'allow_explicit_fk' are as for setup_joins(). Returns a list of PathInfo tuples. In addition returns the final field (the last used join field), and target (which is a field guaranteed to contain the same value as the final field). """ path, names_with_path = [], [] for pos, name in enumerate(names): cur_names_with_path = (name, []) if name == 'pk': name = opts.pk.name try: field, model, direct, m2m = opts.get_field_by_name(name) except FieldDoesNotExist: for f in opts.fields: if allow_explicit_fk and name == f.attname: # XXX: A hack to allow foo_id to work in values() for # backwards compatibility purposes. If we dropped that # feature, this could be removed. field, model, direct, m2m = opts.get_field_by_name(f.name) break else: available = opts.get_all_field_names() + list(self.aggregate_select) raise FieldError("Cannot resolve keyword %r into field. " "Choices are: %s" % (name, ", ".join(available))) # Check if we need any joins for concrete inheritance cases (the # field lives in parent, but we are currently in one of its # children) if model: # The field lives on a base class of the current model. # Skip the chain of proxy to the concrete proxied model proxied_model = opts.concrete_model for int_model in opts.get_base_chain(model): if int_model is proxied_model: opts = int_model._meta else: final_field = opts.parents[int_model] targets = (final_field.rel.get_related_field(),) opts = int_model._meta path.append(PathInfo(final_field.model._meta, opts, targets, final_field, False, True)) cur_names_with_path[1].append(PathInfo(final_field.model._meta, opts, targets, final_field, False, True)) if hasattr(field, 'get_path_info'): pathinfos = field.get_path_info() if not allow_many: for inner_pos, p in enumerate(pathinfos): if p.m2m: cur_names_with_path[1].extend(pathinfos[0:inner_pos + 1]) names_with_path.append(cur_names_with_path) raise MultiJoin(pos + 1, names_with_path) last = pathinfos[-1] path.extend(pathinfos) final_field = last.join_field opts = last.to_opts targets = last.target_fields cur_names_with_path[1].extend(pathinfos) names_with_path.append(cur_names_with_path) else: # Local non-relational field. final_field = field targets = (field,) break if pos != len(names) - 1: if pos == len(names) - 2: raise FieldError( "Join on field %r not permitted. Did you misspell %r for " "the lookup type?" % (name, names[pos + 1])) else: raise FieldError("Join on field %r not permitted." % name) return path, final_field, targets def setup_joins(self, names, opts, alias, can_reuse=None, allow_many=True, allow_explicit_fk=False, outer_if_first=False): """ Compute the necessary table joins for the passage through the fields given in 'names'. 'opts' is the Options class for the current model (which gives the table we are starting from), 'alias' is the alias for the table to start the joining from. The 'can_reuse' defines the reverse foreign key joins we can reuse. It can be None in which case all joins are reusable or a set of aliases that can be reused. Note that non-reverse foreign keys are always reusable when using setup_joins(). If 'allow_many' is False, then any reverse foreign key seen will generate a MultiJoin exception. The 'allow_explicit_fk' controls if field.attname is allowed in the lookups. Returns the final field involved in the joins, the target field (used for any 'where' constraint), the final 'opts' value, the joins and the field path travelled to generate the joins. The target field is the field containing the concrete value. Final field can be something different, for example foreign key pointing to that value. Final field is needed for example in some value conversions (convert 'obj' in fk__id=obj to pk val using the foreign key field for example). """ joins = [alias] # First, generate the path for the names path, final_field, targets = self.names_to_path( names, opts, allow_many, allow_explicit_fk) # Then, add the path to the query's joins. Note that we can't trim # joins at this stage - we will need the information about join type # of the trimmed joins. for pos, join in enumerate(path): opts = join.to_opts if join.direct: nullable = self.is_nullable(join.join_field) else: nullable = True connection = alias, opts.db_table, join.join_field.get_joining_columns() reuse = can_reuse if join.m2m else None alias = self.join( connection, reuse=reuse, nullable=nullable, join_field=join.join_field, outer_if_first=outer_if_first) joins.append(alias) if hasattr(final_field, 'field'): final_field = final_field.field return final_field, targets, opts, joins, path def trim_joins(self, targets, joins, path): """ The 'target' parameter is the final field being joined to, 'joins' is the full list of join aliases. The 'path' contain the PathInfos used to create the joins. Returns the final target field and table alias and the new active joins. We will always trim any direct join if we have the target column available already in the previous table. Reverse joins can't be trimmed as we don't know if there is anything on the other side of the join. """ for pos, info in enumerate(reversed(path)): if len(joins) == 1 or not info.direct: break join_targets = set(t.column for t in info.join_field.foreign_related_fields) cur_targets = set(t.column for t in targets) if not cur_targets.issubset(join_targets): break targets = tuple(r[0] for r in info.join_field.related_fields if r[1].column in cur_targets) self.unref_alias(joins.pop()) return targets, joins[-1], joins def split_exclude(self, filter_expr, prefix, can_reuse, names_with_path): """ When doing an exclude against any kind of N-to-many relation, we need to use a subquery. This method constructs the nested query, given the original exclude filter (filter_expr) and the portion up to the first N-to-many relation field. As an example we could have original filter ~Q(child__name='foo'). We would get here with filter_expr = child__name, prefix = child and can_reuse is a set of joins usable for filters in the original query. We will turn this into equivalent of: WHERE NOT (pk IN (SELECT parent_id FROM thetable WHERE name = 'foo' AND parent_id IS NOT NULL)) It might be worth it to consider using WHERE NOT EXISTS as that has saner null handling, and is easier for the backend's optimizer to handle. """ # Generate the inner query. query = Query(self.model) query.where.add(query.build_filter(filter_expr), AND) query.bump_prefix() query.clear_ordering(True) # Try to have as simple as possible subquery -> trim leading joins from # the subquery. trimmed_prefix, contains_louter = query.trim_start(names_with_path) query.remove_inherited_models() # Add extra check to make sure the selected field will not be null # since we are adding a IN <subquery> clause. This prevents the # database from tripping over IN (...,NULL,...) selects and returning # nothing if self.is_nullable(query.select[0].field): alias, col = query.select[0].col query.where.add((Constraint(alias, col, query.select[0].field), 'isnull', False), AND) condition = self.build_filter( ('%s__in' % trimmed_prefix, query), current_negated=True, branch_negated=True, can_reuse=can_reuse) if contains_louter: or_null_condition = self.build_filter( ('%s__isnull' % trimmed_prefix, True), current_negated=True, branch_negated=True, can_reuse=can_reuse) condition.add(or_null_condition, OR) # Note that the end result will be: # (outercol NOT IN innerq AND outercol IS NOT NULL) OR outercol IS NULL. # This might look crazy but due to how IN works, this seems to be # correct. If the IS NOT NULL check is removed then outercol NOT # IN will return UNKNOWN. If the IS NULL check is removed, then if # outercol IS NULL we will not match the row. return condition def set_empty(self): self.where = EmptyWhere() self.having = EmptyWhere() def is_empty(self): return isinstance(self.where, EmptyWhere) or isinstance(self.having, EmptyWhere) def set_limits(self, low=None, high=None): """ Adjusts the limits on the rows retrieved. We use low/high to set these, as it makes it more Pythonic to read and write. When the SQL query is created, they are converted to the appropriate offset and limit values. Any limits passed in here are applied relative to the existing constraints. So low is added to the current low value and both will be clamped to any existing high value. """ if high is not None: if self.high_mark is not None: self.high_mark = min(self.high_mark, self.low_mark + high) else: self.high_mark = self.low_mark + high if low is not None: if self.high_mark is not None: self.low_mark = min(self.high_mark, self.low_mark + low) else: self.low_mark = self.low_mark + low def clear_limits(self): """ Clears any existing limits. """ self.low_mark, self.high_mark = 0, None def can_filter(self): """ Returns True if adding filters to this instance is still possible. Typically, this means no limits or offsets have been put on the results. """ return not self.low_mark and self.high_mark is None def clear_select_clause(self): """ Removes all fields from SELECT clause. """ self.select = [] self.default_cols = False self.select_related = False self.set_extra_mask(()) self.set_aggregate_mask(()) def clear_select_fields(self): """ Clears the list of fields to select (but not extra_select columns). Some queryset types completely replace any existing list of select columns. """ self.select = [] def add_distinct_fields(self, *field_names): """ Adds and resolves the given fields to the query's "distinct on" clause. """ self.distinct_fields = field_names self.distinct = True def add_fields(self, field_names, allow_m2m=True): """ Adds the given (model) fields to the select set. The field names are added in the order specified. """ alias = self.get_initial_alias() opts = self.get_meta() try: for name in field_names: field, targets, u2, joins, path = self.setup_joins( name.split(LOOKUP_SEP), opts, alias, None, allow_m2m, allow_explicit_fk=True, outer_if_first=True) # Trim last join if possible targets, final_alias, remaining_joins = self.trim_joins(targets, joins[-2:], path) joins = joins[:-2] + remaining_joins self.promote_joins(joins[1:]) for target in targets: self.select.append(SelectInfo((final_alias, target.column), target)) except MultiJoin: raise FieldError("Invalid field name: '%s'" % name) except FieldError: if LOOKUP_SEP in name: # For lookups spanning over relationships, show the error # from the model on which the lookup failed. raise else: names = sorted(opts.get_all_field_names() + list(self.extra) + list(self.aggregate_select)) raise FieldError("Cannot resolve keyword %r into field. " "Choices are: %s" % (name, ", ".join(names))) self.remove_inherited_models() def add_ordering(self, *ordering): """ Adds items from the 'ordering' sequence to the query's "order by" clause. These items are either field names (not column names) -- possibly with a direction prefix ('-' or '?') -- or ordinals, corresponding to column positions in the 'select' list. If 'ordering' is empty, all ordering is cleared from the query. """ errors = [] for item in ordering: if not ORDER_PATTERN.match(item): errors.append(item) if errors: raise FieldError('Invalid order_by arguments: %s' % errors) if ordering: self.order_by.extend(ordering) else: self.default_ordering = False def clear_ordering(self, force_empty): """ Removes any ordering settings. If 'force_empty' is True, there will be no ordering in the resulting query (not even the model's default). """ self.order_by = [] self.extra_order_by = () if force_empty: self.default_ordering = False def set_group_by(self): """ Expands the GROUP BY clause required by the query. This will usually be the set of all non-aggregate fields in the return data. If the database backend supports grouping by the primary key, and the query would be equivalent, the optimization will be made automatically. """ self.group_by = [] for col, _ in self.select: self.group_by.append(col) def add_count_column(self): """ Converts the query to do count(...) or count(distinct(pk)) in order to get its size. """ if not self.distinct: if not self.select: count = self.aggregates_module.Count('*', is_summary=True) else: assert len(self.select) == 1, \ "Cannot add count col with multiple cols in 'select': %r" % self.select count = self.aggregates_module.Count(self.select[0].col) else: opts = self.get_meta() if not self.select: count = self.aggregates_module.Count( (self.join((None, opts.db_table, None)), opts.pk.column), is_summary=True, distinct=True) else: # Because of SQL portability issues, multi-column, distinct # counts need a sub-query -- see get_count() for details. assert len(self.select) == 1, \ "Cannot add count col with multiple cols in 'select'." count = self.aggregates_module.Count(self.select[0].col, distinct=True) # Distinct handling is done in Count(), so don't do it at this # level. self.distinct = False # Set only aggregate to be the count column. # Clear out the select cache to reflect the new unmasked aggregates. self.aggregates = {None: count} self.set_aggregate_mask(None) self.group_by = None def add_select_related(self, fields): """ Sets up the select_related data structure so that we only select certain related models (as opposed to all models, when self.select_related=True). """ field_dict = {} for field in fields: d = field_dict for part in field.split(LOOKUP_SEP): d = d.setdefault(part, {}) self.select_related = field_dict self.related_select_cols = [] def add_extra(self, select, select_params, where, params, tables, order_by): """ Adds data to the various extra_* attributes for user-created additions to the query. """ if select: # We need to pair any placeholder markers in the 'select' # dictionary with their parameters in 'select_params' so that # subsequent updates to the select dictionary also adjust the # parameters appropriately. select_pairs = SortedDict() if select_params: param_iter = iter(select_params) else: param_iter = iter([]) for name, entry in select.items(): entry = force_text(entry) entry_params = [] pos = entry.find("%s") while pos != -1: entry_params.append(next(param_iter)) pos = entry.find("%s", pos + 2) select_pairs[name] = (entry, entry_params) # This is order preserving, since self.extra_select is a SortedDict. self.extra.update(select_pairs) if where or params: self.where.add(ExtraWhere(where, params), AND) if tables: self.extra_tables += tuple(tables) if order_by: self.extra_order_by = order_by def clear_deferred_loading(self): """ Remove any fields from the deferred loading set. """ self.deferred_loading = (set(), True) def add_deferred_loading(self, field_names): """ Add the given list of model field names to the set of fields to exclude from loading from the database when automatic column selection is done. The new field names are added to any existing field names that are deferred (or removed from any existing field names that are marked as the only ones for immediate loading). """ # Fields on related models are stored in the literal double-underscore # format, so that we can use a set datastructure. We do the foo__bar # splitting and handling when computing the SQL colum names (as part of # get_columns()). existing, defer = self.deferred_loading if defer: # Add to existing deferred names. self.deferred_loading = existing.union(field_names), True else: # Remove names from the set of any existing "immediate load" names. self.deferred_loading = existing.difference(field_names), False def add_immediate_loading(self, field_names): """ Add the given list of model field names to the set of fields to retrieve when the SQL is executed ("immediate loading" fields). The field names replace any existing immediate loading field names. If there are field names already specified for deferred loading, those names are removed from the new field_names before storing the new names for immediate loading. (That is, immediate loading overrides any existing immediate values, but respects existing deferrals.) """ existing, defer = self.deferred_loading field_names = set(field_names) if 'pk' in field_names: field_names.remove('pk') field_names.add(self.get_meta().pk.name) if defer: # Remove any existing deferred names from the current set before # setting the new names. self.deferred_loading = field_names.difference(existing), False else: # Replace any existing "immediate load" field names. self.deferred_loading = field_names, False def get_loaded_field_names(self): """ If any fields are marked to be deferred, returns a dictionary mapping models to a set of names in those fields that will be loaded. If a model is not in the returned dictionary, none of it's fields are deferred. If no fields are marked for deferral, returns an empty dictionary. """ # We cache this because we call this function multiple times # (compiler.fill_related_selections, query.iterator) try: return self._loaded_field_names_cache except AttributeError: collection = {} self.deferred_to_data(collection, self.get_loaded_field_names_cb) self._loaded_field_names_cache = collection return collection def get_loaded_field_names_cb(self, target, model, fields): """ Callback used by get_deferred_field_names(). """ target[model] = set([f.name for f in fields]) def set_aggregate_mask(self, names): "Set the mask of aggregates that will actually be returned by the SELECT" if names is None: self.aggregate_select_mask = None else: self.aggregate_select_mask = set(names) self._aggregate_select_cache = None def set_extra_mask(self, names): """ Set the mask of extra select items that will be returned by SELECT, we don't actually remove them from the Query since they might be used later """ if names is None: self.extra_select_mask = None else: self.extra_select_mask = set(names) self._extra_select_cache = None def _aggregate_select(self): """The SortedDict of aggregate columns that are not masked, and should be used in the SELECT clause. This result is cached for optimization purposes. """ if self._aggregate_select_cache is not None: return self._aggregate_select_cache elif self.aggregate_select_mask is not None: self._aggregate_select_cache = SortedDict([ (k,v) for k,v in self.aggregates.items() if k in self.aggregate_select_mask ]) return self._aggregate_select_cache else: return self.aggregates aggregate_select = property(_aggregate_select) def _extra_select(self): if self._extra_select_cache is not None: return self._extra_select_cache elif self.extra_select_mask is not None: self._extra_select_cache = SortedDict([ (k,v) for k,v in self.extra.items() if k in self.extra_select_mask ]) return self._extra_select_cache else: return self.extra extra_select = property(_extra_select) def trim_start(self, names_with_path): """ Trims joins from the start of the join path. The candidates for trim are the PathInfos in names_with_path structure that are m2m joins. Also sets the select column so the start matches the join. This method is meant to be used for generating the subquery joins & cols in split_exclude(). Returns a lookup usable for doing outerq.filter(lookup=self). Returns also if the joins in the prefix contain a LEFT OUTER join. _""" all_paths = [] for _, paths in names_with_path: all_paths.extend(paths) contains_louter = False # Trim and operate only on tables that were generated for # the lookup part of the query. That is, avoid trimming # joins generated for F() expressions. lookup_tables = [t for t in self.tables if t in self._lookup_joins or t == self.tables[0]] for trimmed_paths, path in enumerate(all_paths): if path.m2m: break if self.alias_map[lookup_tables[trimmed_paths + 1]].join_type == self.LOUTER: contains_louter = True self.unref_alias(lookup_tables[trimmed_paths]) # The path.join_field is a Rel, lets get the other side's field join_field = path.join_field.field # Build the filter prefix. paths_in_prefix = trimmed_paths trimmed_prefix = [] for name, path in names_with_path: if paths_in_prefix - len(path) < 0: break trimmed_prefix.append(name) paths_in_prefix -= len(path) trimmed_prefix.append( join_field.foreign_related_fields[0].name) trimmed_prefix = LOOKUP_SEP.join(trimmed_prefix) # Lets still see if we can trim the first join from the inner query # (that is, self). We can't do this for LEFT JOINs because we would # miss those rows that have nothing on the outer side. if self.alias_map[lookup_tables[trimmed_paths + 1]].join_type != self.LOUTER: select_fields = [r[0] for r in join_field.related_fields] select_alias = lookup_tables[trimmed_paths + 1] self.unref_alias(lookup_tables[trimmed_paths]) extra_restriction = join_field.get_extra_restriction( self.where_class, None, lookup_tables[trimmed_paths + 1]) if extra_restriction: self.where.add(extra_restriction, AND) else: # TODO: It might be possible to trim more joins from the start of the # inner query if it happens to have a longer join chain containing the # values in select_fields. Lets punt this one for now. select_fields = [r[1] for r in join_field.related_fields] select_alias = lookup_tables[trimmed_paths] self.select = [SelectInfo((select_alias, f.column), f) for f in select_fields] return trimmed_prefix, contains_louter def is_nullable(self, field): """ A helper to check if the given field should be treated as nullable. Some backends treat '' as null and Django treats such fields as nullable for those backends. In such situations field.null can be False even if we should treat the field as nullable. """ # We need to use DEFAULT_DB_ALIAS here, as QuerySet does not have # (nor should it have) knowledge of which connection is going to be # used. The proper fix would be to defer all decisions where # is_nullable() is needed to the compiler stage, but that is not easy # to do currently. if ((connections[DEFAULT_DB_ALIAS].features.interprets_empty_strings_as_nulls) and field.empty_strings_allowed): return True else: return field.null def get_order_dir(field, default='ASC'): """ Returns the field name and direction for an order specification. For example, '-foo' is returned as ('foo', 'DESC'). The 'default' param is used to indicate which way no prefix (or a '+' prefix) should sort. The '-' prefix always sorts the opposite way. """ dirn = ORDER_DIR[default] if field[0] == '-': return field[1:], dirn[1] return field, dirn[0] def add_to_dict(data, key, value): """ A helper function to add "value" to the set of values for "key", whether or not "key" already exists. """ if key in data: data[key].add(value) else: data[key] = set([value]) def is_reverse_o2o(field): """ A little helper to check if the given field is reverse-o2o. The field is expected to be some sort of relation field or related object. """ return not hasattr(field, 'rel') and field.field.unique def alias_diff(refcounts_before, refcounts_after): """ Given the before and after copies of refcounts works out which aliases have been added to the after copy. """ # Use -1 as default value so that any join that is created, then trimmed # is seen as added. return set(t for t in refcounts_after if refcounts_after[t] > refcounts_before.get(t, -1))
agpl-3.0
737,084,032,429,890,700
42.664249
129
0.591869
false
tectronics/py-lepton
lepton/pygame_renderer.py
6
3955
############################################################################# # # Copyright (c) 2008 by Casey Duncan and contributors # All Rights Reserved. # # This software is subject to the provisions of the MIT License # A copy of the license should accompany this distribution. # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. # ############################################################################# """Pygame particle renderers. (Obviously) requires pygame """ __version__ = '$Id$' from pygame.transform import rotozoom from math import degrees class FillRenderer: """Renders particles to a pygame surface using simple fills""" def __init__(self, surface, flags=None): """ surface -- pygame surface to render particles to flags -- Special fill flags (pygame 1.8+ required) """ self.surface = surface self.flags = flags def draw(self, group): fill = self.surface.fill if self.flags is None: for p in group: fill(p.color.clamp(0, 255), (p.position.x, p.position.y, p.size.x, p.size.y)) else: flags = self.flags for p in group: fill(p.color.clamp(0, 255), (p.position.x, p.position.y, p.size.x, p.size.y), flags) class Cache: """Simple, fast, bounded cache that gives approximate MRU behavior""" def __init__(self, max_size, load_factor=0.85): self.max_size = max_size self.max_recent_size = int(max_size * load_factor) self._recent = {} # Recently accessed bucket self._aged = {} # Less recently accessed bucket self.accesses = 0 self.misses = 0 self.adds = 0 self.flips = 0 self.purged = 0 def __getitem__(self, key): #self.accesses += 1 try: try: return self._recent[key] except KeyError: # Promote aged element to "recent" value = self._aged.pop(key) self._recent[key] = value return value except KeyError: #self.misses += 1 raise def __len__(self): return len(self._recent) + len(self._aged) def __contains__(self, key): return key in self._recent or key in self._aged def __setitem__(self, key, value): assert value is not None #self.adds += 1 if key in self._aged: del self._aged[key] if len(self._recent) >= self.max_recent_size: # Flip the cache discarding aged entries #self.flips += 1 #print self.flips, 'cache flips in', self.adds, ' adds. ', #print self.misses, 'misses in', self.accesses, 'accesses (', #print (self.accesses - self.misses) * 100 / self.accesses, '% hit rate) ', #print 'with', self.purged, 'purged' self._aged = self._recent self._recent = {} self._recent[key] = value while self._aged and len(self) > self.max_size: # Over max size, purge aged entries #self.purged += 1 self._aged.popitem() class BlitRenderer: """Renders particles by blitting to a pygame surface""" surf_cache = Cache(200) def __init__(self, surface, particle_surface, rotate_and_scale=False): """ surface -- pygame surface to render particles to particle_surface -- surface blit to draw each particle. rotate_and_scale -- If true, the particles surfaces are rotated and scaled before blitting. """ self.surface = surface self.particle_surface = particle_surface self.rotate_and_scale = rotate_and_scale def draw(self, group): blit = self.surface.blit psurface = self.particle_surface if not self.rotate_and_scale: for p in group: blit(psurface, (p.position.x, p.position.y)) else: cache = self.surf_cache surfid = id(psurface) for p in group: size = int(p.size.x) rot = int(p.rotation.x) cachekey = (surfid, size, rot) try: surface = cache[cachekey] except KeyError: scale = p.size.x / psurface.get_width() surface = cache[cachekey] = rotozoom(psurface, rot, scale) blit(surface, (p.position.x, p.position.y))
mit
1,196,420,979,823,259,600
27.65942
78
0.643236
false
takaaptech/sky_engine
build/linux/unbundle/replace_gyp_files.py
40
2929
#!/usr/bin/env python # Copyright 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ Replaces gyp files in tree with files from here that make the build use system libraries. """ import optparse import os.path import shutil import sys REPLACEMENTS = { 'use_system_expat': 'third_party/expat/expat.gyp', 'use_system_ffmpeg': 'third_party/ffmpeg/ffmpeg.gyp', 'use_system_flac': 'third_party/flac/flac.gyp', 'use_system_harfbuzz': 'third_party/harfbuzz-ng/harfbuzz.gyp', 'use_system_icu': 'third_party/icu/icu.gyp', 'use_system_jsoncpp': 'third_party/jsoncpp/jsoncpp.gyp', 'use_system_libevent': 'third_party/libevent/libevent.gyp', 'use_system_libjpeg': 'third_party/libjpeg/libjpeg.gyp', 'use_system_libpng': 'third_party/libpng/libpng.gyp', 'use_system_libusb': 'third_party/libusb/libusb.gyp', 'use_system_libvpx': 'third_party/libvpx/libvpx.gyp', 'use_system_libwebp': 'third_party/libwebp/libwebp.gyp', 'use_system_libxml': 'third_party/libxml/libxml.gyp', 'use_system_libxnvctrl' : 'third_party/libXNVCtrl/libXNVCtrl.gyp', 'use_system_libxslt': 'third_party/libxslt/libxslt.gyp', 'use_system_opus': 'third_party/opus/opus.gyp', 'use_system_protobuf': 'third_party/protobuf/protobuf.gyp', 'use_system_re2': 'third_party/re2/re2.gyp', 'use_system_snappy': 'third_party/snappy/snappy.gyp', 'use_system_speex': 'third_party/speex/speex.gyp', 'use_system_sqlite': 'third_party/sqlite/sqlite.gyp', 'use_system_v8': 'v8/tools/gyp/v8.gyp', 'use_system_zlib': 'third_party/zlib/zlib.gyp', } def DoMain(argv): my_dirname = os.path.dirname(__file__) source_tree_root = os.path.abspath( os.path.join(my_dirname, '..', '..', '..')) parser = optparse.OptionParser() # Accept arguments in gyp command-line syntax, so that the caller can re-use # command-line for this script and gyp. parser.add_option('-D', dest='defines', action='append') parser.add_option('--undo', action='store_true') options, args = parser.parse_args(argv) for flag, path in REPLACEMENTS.items(): if '%s=1' % flag not in options.defines: continue if options.undo: # Restore original file, and also remove the backup. # This is meant to restore the source tree to its original state. os.rename(os.path.join(source_tree_root, path + '.orig'), os.path.join(source_tree_root, path)) else: # Create a backup copy for --undo. shutil.copyfile(os.path.join(source_tree_root, path), os.path.join(source_tree_root, path + '.orig')) # Copy the gyp file from directory of this script to target path. shutil.copyfile(os.path.join(my_dirname, os.path.basename(path)), os.path.join(source_tree_root, path)) return 0 if __name__ == '__main__': sys.exit(DoMain(sys.argv))
bsd-3-clause
-8,113,458,690,183,102,000
34.719512
78
0.679754
false
fabiobrandespim/flat-earth
flat_earth.py
1
4983
""" Author: Fabio Brandespim Email: [email protected] Location: Brazil - Goiania Date: 09-19-2016 """ #!C:/Python27_32/python.exe import pygame import math import particle import titulos import email_py #import time from threading import Thread from pygame.locals import * img = pygame.image.load("terra_plana.bmp") if not pygame.font: print 'Attention, no founts found.' if not pygame.mixer: print 'Attention, theres no sound.' pygame.init() vermelho = (255, 0, 0) amarelo = (255, 255, 0) preto = (0, 0, 0) branco2 = (255, 64, 64) branco = (255, 255, 255) azul = (0, 0, 255) verde = (0, 255, 0) pi = 3.141592653 comprimento_ecra = 820 altura_ecra = 820 ecra = pygame.display.set_mode((comprimento_ecra, altura_ecra)) xpos = (comprimento_ecra)/2 ypos = (altura_ecra)/2 raio_circulo = 15 raio = 130 raio2 = 130 #================================= def g2rad(graus): radianos = (graus * pi) / 180; return radianos; #================================ def sun(raio, pontocentral,graus): rad = g2rad(graus); x = (math.cos(rad) * raio) + pontocentral; x = int(x) y = (math.sin(rad) * raio) + pontocentral; y = int(y) return (x, y) #=================================== def moon(raio, pontocentral, graus): rad = g2rad(graus); x = (math.cos(rad) * raio) + pontocentral; x = int(x) y = (math.sin(rad) * raio) + pontocentral; y = int(y) return (x, y) #========================================= def chama_classe_email(subject, mensagem): e = email_py.SendEmail('[email protected]','Fabio123','[email protected]',subject,mensagem) e.sendnow() #circulo = pygame.draw.circle(ecra, amarelo, (410, 410), 100,1) #circulo = pygame.draw.circle(ecra, amarelo, (410, 410), 200,1) #circulo = pygame.draw.circle(ecra, amarelo, (410, 410), 300,1) pygame.display.set_caption('Flat Earth by Fabio Brandespim 03-19-2016 +55 62 91909935') pygame.display.flip() pygame.key.set_repeat(100, 100) graus = 0 graus2 = 0 subindo = True subindo2 = True volta = 0 while True: for event in pygame.event.get(): pass #if event.type == pygame.QUIT: # pygame.quit() # sys.exit() tecla_pressionada = pygame.key.get_pressed() if tecla_pressionada[K_ESCAPE]: break #=================================== graus += 10 if graus > 360: graus = 1 if subindo: if raio < 270: raio += 10 volta = volta + 1 #if volta > 30: # volta = 1 print(volta) else: volta = volta + 1 print(volta) subindo = False else: if raio > 130: raio -= 10 volta = volta + 1 #if volta > 30: # volta = 1 print(volta) else: volta = volta + 1 print(volta) subindo = True x1, y1 = sun(raio, 410, graus) #=================================== graus2 += 9.7055555 if graus2 > 360: graus2 = 1 if subindo2: if raio2 < 270: raio2 += 10 else: subindo2 = False else: if raio2 > 130: raio2 -= 10 else: subindo2 = True x2, y2 = moon(raio2, 410, graus2) #sun_shadow = pygame.draw.circle(ecra, amarelo, (x1, y1), 135,1) sun2 = pygame.draw.circle(ecra, amarelo, (x1, y1), raio_circulo) #moon_shadow = pygame.draw.circle(ecra, branco, (x2, y2), 135,1) moon2 = pygame.draw.circle(ecra, branco, (x2, y2), raio_circulo) pygame.display.flip() #pygame.time.delay(1) #ecra.fill((white)) #Imagem de fundo ecra.blit(img,(0,0)) #Criar Linhas pygame.draw.line(ecra, branco, [410, 0], [410, 820], 1) pygame.draw.line(ecra, branco, [0, 410], [820, 410], 1) #Criar Circulos tropico_capricornio = particle.Particle((410, 410), 270) tropico_capricornio.display() equador = particle.Particle((410, 410), 200) equador.display() tropico_cancer = particle.Particle((410, 410), 130) tropico_cancer.display() polo_norte = particle.Particle((410, 410), 5) polo_norte.display() # Display Labels titulo1 = titulos.titulo("South Pole",30) titulo1.display() titulo2 = titulos.titulo("Capricornio",130) titulo2.display() titulo3 = titulos.titulo("Equador",200) titulo3.display() titulo4 = titulos.titulo("Cancer",270) titulo4.display() titulo5 = titulos.titulo("North Pole",395) titulo5.display() titulo6 = titulos.titulo("South Pole",780) titulo6.display() #envia email com thread if (x1==x2) and (y1==y2): print('Eclipse') th = Thread(target=chama_classe_email, args = ('Eclipse no dia: '+str(volta), "dia: "+str(volta),)) th.start()
gpl-3.0
-2,121,083,049,372,370,200
21.963134
110
0.549669
false
Karm/qpid-proton
proton-j/src/main/resources/cmessenger.py
17
5177
# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # from org.apache.qpid.proton import Proton from org.apache.qpid.proton.messenger import Messenger, Status from org.apache.qpid.proton import InterruptException, TimeoutException from cerror import * # from proton/messenger.h PN_STATUS_UNKNOWN = 0 PN_STATUS_PENDING = 1 PN_STATUS_ACCEPTED = 2 PN_STATUS_REJECTED = 3 PN_STATUS_RELEASED = 4 PN_STATUS_MODIFIED = 5 PN_STATUS_ABORTED = 6 PN_STATUS_SETTLED = 7 PN_CUMULATIVE = 1 class pn_messenger_wrapper: def __init__(self, impl): self.impl = impl self.error = pn_error(0, None) def pn_messenger(name): if name is None: return pn_messenger_wrapper(Proton.messenger()) else: return pn_messenger_wrapper(Proton.messenger(name)) def pn_messenger_error(m): return m.error def pn_messenger_set_timeout(m, t): m.impl.setTimeout(t) return 0 def pn_messenger_set_blocking(m, b): m.impl.setBlocking(b) return 0 def pn_messenger_set_certificate(m, c): m.impl.setCertificate(c) return 0 def pn_messenger_set_private_key(m, p): m.impl.setPrivateKey(p) return 0 def pn_messenger_set_password(m, p): m.impl.setPassword(p) return 0 def pn_messenger_set_trusted_certificates(m, t): m.impl.setTrustedCertificates(t) return 0 def pn_messenger_set_incoming_window(m, w): m.impl.setIncomingWindow(w) return 0 def pn_messenger_set_outgoing_window(m, w): m.impl.setOutgoingWindow(w) return 0 def pn_messenger_start(m): m.impl.start() return 0 # XXX: ??? def pn_messenger_work(m, t): try: if m.impl.work(t): return 1 else: return PN_TIMEOUT except InterruptException, e: return PN_INTR class pn_subscription: def __init__(self): pass def pn_messenger_subscribe(m, source): m.impl.subscribe(source) return pn_subscription() def pn_messenger_route(m, pattern, address): m.impl.route(pattern, address) return 0 def pn_messenger_rewrite(m, pattern, address): m.impl.rewrite(pattern, address) return 0 def pn_messenger_interrupt(m): m.impl.interrupt() return 0 def pn_messenger_buffered(m, t): raise Skipped() from org.apache.qpid.proton.engine import TransportException def pn_messenger_stop(m): m.impl.stop() return 0 def pn_messenger_stopped(m): return m.impl.stopped() def pn_messenger_put(m, msg): msg.pre_encode() m.impl.put(msg.impl) return 0 def pn_messenger_outgoing_tracker(m): return m.impl.outgoingTracker() def pn_messenger_send(m, n): try: m.impl.send(n) return 0 except InterruptException, e: return PN_INTR except TimeoutException, e: return PN_TIMEOUT def pn_messenger_recv(m, n): try: m.impl.recv(n) return 0 except InterruptException, e: return PN_INTR except TimeoutException, e: return PN_TIMEOUT def pn_messenger_receiving(m): return m.impl.receiving() def pn_messenger_incoming(m): return m.impl.incoming() def pn_messenger_outgoing(m): return m.impl.outgoing() def pn_messenger_get(m, msg): mimpl = m.impl.get() if msg: msg.decode(mimpl) return 0 def pn_messenger_incoming_tracker(m): return m.impl.incomingTracker() def pn_messenger_accept(m, tracker, flags): if flags: m.impl.accept(tracker, Messenger.CUMULATIVE) else: m.impl.accept(tracker, 0) return 0 def pn_messenger_reject(m, tracker, flags): if flags: m.impl.reject(tracker, Messenger.CUMULATIVE) else: m.impl.reject(tracker, 0) return 0 def pn_messenger_settle(m, tracker, flags): if flags: m.impl.settle(tracker, Messenger.CUMULATIVE) else: m.impl.settle(tracker, 0) return 0 STATUS_P2J = { PN_STATUS_UNKNOWN: Status.UNKNOWN, PN_STATUS_PENDING: Status.PENDING, PN_STATUS_ACCEPTED: Status.ACCEPTED, PN_STATUS_REJECTED: Status.REJECTED, PN_STATUS_RELEASED: Status.RELEASED, PN_STATUS_MODIFIED: Status.MODIFIED, PN_STATUS_ABORTED: Status.ABORTED, PN_STATUS_SETTLED: Status.SETTLED } STATUS_J2P = { Status.UNKNOWN: PN_STATUS_UNKNOWN, Status.PENDING: PN_STATUS_PENDING, Status.ACCEPTED: PN_STATUS_ACCEPTED, Status.REJECTED: PN_STATUS_REJECTED, Status.RELEASED: PN_STATUS_RELEASED, Status.MODIFIED: PN_STATUS_MODIFIED, Status.ABORTED: PN_STATUS_ABORTED, Status.SETTLED: PN_STATUS_SETTLED } def pn_messenger_status(m, tracker): return STATUS_J2P[m.impl.getStatus(tracker)] def pn_messenger_set_passive(m, passive): raise Skipped() def pn_messenger_selectable(m): raise Skipped()
apache-2.0
914,276,588,698,954,400
22.008889
71
0.72204
false
kumar303/zamboni
mkt/tags/tests/test_models.py
17
2143
from nose.tools import eq_, ok_ import mkt.site.tests from mkt.site.utils import app_factory from mkt.tags.models import attach_tags, Tag from mkt.websites.utils import website_factory class TestTagManager(mkt.site.tests.TestCase): def test_not_blocked(self): """Make sure Tag Manager filters right for not blocked tags.""" tag1 = Tag(tag_text='abc', blocked=False) tag1.save() tag2 = Tag(tag_text='swearword', blocked=True) tag2.save() eq_(Tag.objects.all().count(), 2) eq_(Tag.objects.not_blocked().count(), 1) eq_(Tag.objects.not_blocked()[0], tag1) class TestAttachTags(mkt.site.tests.TestCase): def test_attach_tags_apps(self): tag1 = Tag.objects.create(tag_text='abc', blocked=False) tag2 = Tag.objects.create(tag_text='xyz', blocked=False) tag3 = Tag.objects.create(tag_text='swearword', blocked=True) app1 = app_factory() app1.tags.add(tag1) app1.tags.add(tag2) app1.tags.add(tag3) app2 = app_factory() app2.tags.add(tag2) app2.tags.add(tag3) app3 = app_factory() ok_(not hasattr(app1, 'tags_list')) attach_tags([app3, app2, app1]) eq_(app1.tags_list, ['abc', 'xyz']) eq_(app2.tags_list, ['xyz']) ok_(not hasattr(app3, 'tags_list')) def test_attach_tags_websites(self): tag1 = Tag.objects.create(tag_text='abc', blocked=False) tag2 = Tag.objects.create(tag_text='xyz', blocked=False) tag3 = Tag.objects.create(tag_text='swearword', blocked=True) website1 = website_factory() website1.keywords.add(tag1) website1.keywords.add(tag2) website1.keywords.add(tag3) website2 = website_factory() website2.keywords.add(tag2) website2.keywords.add(tag3) website3 = website_factory() ok_(not hasattr(website1, 'keywords_list')) attach_tags([website3, website2, website1]) eq_(website1.keywords_list, ['abc', 'xyz']) eq_(website2.keywords_list, ['xyz']) ok_(not hasattr(website3, 'keywords_list'))
bsd-3-clause
-1,933,067,569,631,609,600
30.985075
71
0.616892
false
pschmitt/home-assistant
homeassistant/components/keba/__init__.py
16
8405
"""Support for KEBA charging stations.""" import asyncio import logging from keba_kecontact.connection import KebaKeContact import voluptuous as vol from homeassistant.const import CONF_HOST from homeassistant.helpers import discovery import homeassistant.helpers.config_validation as cv _LOGGER = logging.getLogger(__name__) DOMAIN = "keba" SUPPORTED_COMPONENTS = ["binary_sensor", "sensor", "lock", "notify"] CONF_RFID = "rfid" CONF_FS = "failsafe" CONF_FS_TIMEOUT = "failsafe_timeout" CONF_FS_FALLBACK = "failsafe_fallback" CONF_FS_PERSIST = "failsafe_persist" CONF_FS_INTERVAL = "refresh_interval" MAX_POLLING_INTERVAL = 5 # in seconds MAX_FAST_POLLING_COUNT = 4 CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema( { vol.Required(CONF_HOST): cv.string, vol.Optional(CONF_RFID, default="00845500"): cv.string, vol.Optional(CONF_FS, default=False): cv.boolean, vol.Optional(CONF_FS_TIMEOUT, default=30): cv.positive_int, vol.Optional(CONF_FS_FALLBACK, default=6): cv.positive_int, vol.Optional(CONF_FS_PERSIST, default=0): cv.positive_int, vol.Optional(CONF_FS_INTERVAL, default=5): cv.positive_int, } ) }, extra=vol.ALLOW_EXTRA, ) _SERVICE_MAP = { "request_data": "async_request_data", "set_energy": "async_set_energy", "set_current": "async_set_current", "authorize": "async_start", "deauthorize": "async_stop", "enable": "async_enable_ev", "disable": "async_disable_ev", "set_failsafe": "async_set_failsafe", } async def async_setup(hass, config): """Check connectivity and version of KEBA charging station.""" host = config[DOMAIN][CONF_HOST] rfid = config[DOMAIN][CONF_RFID] refresh_interval = config[DOMAIN][CONF_FS_INTERVAL] keba = KebaHandler(hass, host, rfid, refresh_interval) hass.data[DOMAIN] = keba # Wait for KebaHandler setup complete (initial values loaded) if not await keba.setup(): _LOGGER.error("Could not find a charging station at %s", host) return False # Set failsafe mode at start up of Home Assistant failsafe = config[DOMAIN][CONF_FS] timeout = config[DOMAIN][CONF_FS_TIMEOUT] if failsafe else 0 fallback = config[DOMAIN][CONF_FS_FALLBACK] if failsafe else 0 persist = config[DOMAIN][CONF_FS_PERSIST] if failsafe else 0 try: hass.loop.create_task(keba.set_failsafe(timeout, fallback, persist)) except ValueError as ex: _LOGGER.warning("Could not set failsafe mode %s", ex) # Register services to hass async def execute_service(call): """Execute a service to KEBA charging station. This must be a member function as we need access to the keba object here. """ function_name = _SERVICE_MAP[call.service] function_call = getattr(keba, function_name) await function_call(call.data) for service in _SERVICE_MAP: hass.services.async_register(DOMAIN, service, execute_service) # Load components for domain in SUPPORTED_COMPONENTS: hass.async_create_task( discovery.async_load_platform(hass, domain, DOMAIN, {}, config) ) # Start periodic polling of charging station data keba.start_periodic_request() return True class KebaHandler(KebaKeContact): """Representation of a KEBA charging station connection.""" def __init__(self, hass, host, rfid, refresh_interval): """Initialize charging station connection.""" super().__init__(host, self.hass_callback) self._update_listeners = [] self._hass = hass self.rfid = rfid self.device_name = "keba" # correct device name will be set in setup() self.device_id = "keba_wallbox_" # correct device id will be set in setup() # Ensure at least MAX_POLLING_INTERVAL seconds delay self._refresh_interval = max(MAX_POLLING_INTERVAL, refresh_interval) self._fast_polling_count = MAX_FAST_POLLING_COUNT self._polling_task = None def start_periodic_request(self): """Start periodic data polling.""" self._polling_task = self._hass.loop.create_task(self._periodic_request()) async def _periodic_request(self): """Send periodic update requests.""" await self.request_data() if self._fast_polling_count < MAX_FAST_POLLING_COUNT: self._fast_polling_count += 1 _LOGGER.debug("Periodic data request executed, now wait for 2 seconds") await asyncio.sleep(2) else: _LOGGER.debug( "Periodic data request executed, now wait for %s seconds", self._refresh_interval, ) await asyncio.sleep(self._refresh_interval) _LOGGER.debug("Periodic data request rescheduled") self._polling_task = self._hass.loop.create_task(self._periodic_request()) async def setup(self, loop=None): """Initialize KebaHandler object.""" await super().setup(loop) # Request initial values and extract serial number await self.request_data() if ( self.get_value("Serial") is not None and self.get_value("Product") is not None ): self.device_id = f"keba_wallbox_{self.get_value('Serial')}" self.device_name = self.get_value("Product") return True return False def hass_callback(self, data): """Handle component notification via callback.""" # Inform entities about updated values for listener in self._update_listeners: listener() _LOGGER.debug("Notifying %d listeners", len(self._update_listeners)) def _set_fast_polling(self): _LOGGER.debug("Fast polling enabled") self._fast_polling_count = 0 self._polling_task.cancel() self._polling_task = self._hass.loop.create_task(self._periodic_request()) def add_update_listener(self, listener): """Add a listener for update notifications.""" self._update_listeners.append(listener) # initial data is already loaded, thus update the component listener() async def async_request_data(self, param): """Request new data in async way.""" await self.request_data() _LOGGER.debug("New data from KEBA wallbox requested") async def async_set_energy(self, param): """Set energy target in async way.""" try: energy = param["energy"] await self.set_energy(float(energy)) self._set_fast_polling() except (KeyError, ValueError) as ex: _LOGGER.warning("Energy value is not correct. %s", ex) async def async_set_current(self, param): """Set current maximum in async way.""" try: current = param["current"] await self.set_current(float(current)) # No fast polling as this function might be called regularly except (KeyError, ValueError) as ex: _LOGGER.warning("Current value is not correct. %s", ex) async def async_start(self, param=None): """Authorize EV in async way.""" await self.start(self.rfid) self._set_fast_polling() async def async_stop(self, param=None): """De-authorize EV in async way.""" await self.stop(self.rfid) self._set_fast_polling() async def async_enable_ev(self, param=None): """Enable EV in async way.""" await self.enable(True) self._set_fast_polling() async def async_disable_ev(self, param=None): """Disable EV in async way.""" await self.enable(False) self._set_fast_polling() async def async_set_failsafe(self, param=None): """Set failsafe mode in async way.""" try: timeout = param[CONF_FS_TIMEOUT] fallback = param[CONF_FS_FALLBACK] persist = param[CONF_FS_PERSIST] await self.set_failsafe(int(timeout), float(fallback), bool(persist)) self._set_fast_polling() except (KeyError, ValueError) as ex: _LOGGER.warning( "failsafe_timeout, failsafe_fallback and/or " "failsafe_persist value are not correct. %s", ex, )
apache-2.0
-7,524,593,204,434,205,000
34.167364
84
0.6232
false
ludmilamarian/invenio
invenio/base/setuptools/__init__.py
21
1323
# -*- coding: utf-8 -*- # # This file is part of Invenio. # Copyright (C) 2014 CERN. # # Invenio is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License as # published by the Free Software Foundation; either version 2 of the # License, or (at your option) any later version. # # Invenio is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Invenio; if not, write to the Free Software Foundation, Inc., # 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA. import setuptools class InvenioManageCommand(setuptools.Command): """ Setuptools command for running ```bower <command>``` """ description = "run inveniomanage commands." user_options = [ ('manage-command=', 'c', 'inveniomanage command to run.'), ] def initialize_options(self): """ Default values for options """ self.manage_command = None def finalize_options(self): pass def run(self): cmd = ['inveniomanage', self.manage_command] self.spawn(cmd)
gpl-2.0
8,730,223,333,637,374,000
29.068182
74
0.686319
false
elit3ge/SickRage
sickbeard/notifiers/synologynotifier.py
12
2585
# Author: Nyaran <[email protected]> # # This file is part of SickRage. # # SickRage is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # SickRage is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with SickRage. If not, see <http://www.gnu.org/licenses/>. import os import subprocess import sickbeard from sickbeard import logger from sickbeard import common from sickrage.helper.encoding import ek from sickrage.helper.exceptions import ex class synologyNotifier: def notify_snatch(self, ep_name): if sickbeard.SYNOLOGYNOTIFIER_NOTIFY_ONSNATCH: self._send_synologyNotifier(ep_name, common.notifyStrings[common.NOTIFY_SNATCH]) def notify_download(self, ep_name): if sickbeard.SYNOLOGYNOTIFIER_NOTIFY_ONDOWNLOAD: self._send_synologyNotifier(ep_name, common.notifyStrings[common.NOTIFY_DOWNLOAD]) def notify_subtitle_download(self, ep_name, lang): if sickbeard.SYNOLOGYNOTIFIER_NOTIFY_ONSUBTITLEDOWNLOAD: self._send_synologyNotifier(ep_name + ": " + lang, common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD]) def notify_git_update(self, new_version = "??"): if sickbeard.USE_SYNOLOGYNOTIFIER: update_text=common.notifyStrings[common.NOTIFY_GIT_UPDATE_TEXT] title=common.notifyStrings[common.NOTIFY_GIT_UPDATE] self._send_synologyNotifier(update_text + new_version, title) def _send_synologyNotifier(self, message, title): synodsmnotify_cmd = ["/usr/syno/bin/synodsmnotify", "@administrators", title, message] logger.log(u"Executing command " + str(synodsmnotify_cmd)) logger.log(u"Absolute path to command: " + ek(os.path.abspath, synodsmnotify_cmd[0]), logger.DEBUG) try: p = subprocess.Popen(synodsmnotify_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=sickbeard.PROG_DIR) out, err = p.communicate() #@UnusedVariable logger.log(u"Script result: " + str(out), logger.DEBUG) except OSError, e: logger.log(u"Unable to run synodsmnotify: " + ex(e)) notifier = synologyNotifier
gpl-3.0
3,332,266,006,298,008,600
41.377049
117
0.70677
false
AdrianoMaron/kWantera
argparse.py
490
87791
# Author: Steven J. Bethard <[email protected]>. """Command-line parsing library This module is an optparse-inspired command-line parsing library that: - handles both optional and positional arguments - produces highly informative usage messages - supports parsers that dispatch to sub-parsers The following is a simple usage example that sums integers from the command-line and writes the result to a file:: parser = argparse.ArgumentParser( description='sum the integers at the command line') parser.add_argument( 'integers', metavar='int', nargs='+', type=int, help='an integer to be summed') parser.add_argument( '--log', default=sys.stdout, type=argparse.FileType('w'), help='the file where the sum should be written') args = parser.parse_args() args.log.write('%s' % sum(args.integers)) args.log.close() The module contains the following public classes: - ArgumentParser -- The main entry point for command-line parsing. As the example above shows, the add_argument() method is used to populate the parser with actions for optional and positional arguments. Then the parse_args() method is invoked to convert the args at the command-line into an object with attributes. - ArgumentError -- The exception raised by ArgumentParser objects when there are errors with the parser's actions. Errors raised while parsing the command-line are caught by ArgumentParser and emitted as command-line messages. - FileType -- A factory for defining types of files to be created. As the example above shows, instances of FileType are typically passed as the type= argument of add_argument() calls. - Action -- The base class for parser actions. Typically actions are selected by passing strings like 'store_true' or 'append_const' to the action= argument of add_argument(). However, for greater customization of ArgumentParser actions, subclasses of Action may be defined and passed as the action= argument. - HelpFormatter, RawDescriptionHelpFormatter, RawTextHelpFormatter, ArgumentDefaultsHelpFormatter -- Formatter classes which may be passed as the formatter_class= argument to the ArgumentParser constructor. HelpFormatter is the default, RawDescriptionHelpFormatter and RawTextHelpFormatter tell the parser not to change the formatting for help text, and ArgumentDefaultsHelpFormatter adds information about argument defaults to the help. All other classes in this module are considered implementation details. (Also note that HelpFormatter and RawDescriptionHelpFormatter are only considered public as object names -- the API of the formatter objects is still considered an implementation detail.) """ __version__ = '1.2.1' __all__ = [ 'ArgumentParser', 'ArgumentError', 'ArgumentTypeError', 'FileType', 'HelpFormatter', 'ArgumentDefaultsHelpFormatter', 'RawDescriptionHelpFormatter', 'RawTextHelpFormatter', 'Namespace', 'Action', 'ONE_OR_MORE', 'OPTIONAL', 'PARSER', 'REMAINDER', 'SUPPRESS', 'ZERO_OR_MORE', ] import copy as _copy import os as _os import re as _re import sys as _sys import textwrap as _textwrap from gettext import gettext as _ try: set except NameError: # for python < 2.4 compatibility (sets module is there since 2.3): from sets import Set as set try: basestring except NameError: basestring = str try: sorted except NameError: # for python < 2.4 compatibility: def sorted(iterable, reverse=False): result = list(iterable) result.sort() if reverse: result.reverse() return result def _callable(obj): return hasattr(obj, '__call__') or hasattr(obj, '__bases__') SUPPRESS = '==SUPPRESS==' OPTIONAL = '?' ZERO_OR_MORE = '*' ONE_OR_MORE = '+' PARSER = 'A...' REMAINDER = '...' _UNRECOGNIZED_ARGS_ATTR = '_unrecognized_args' # ============================= # Utility functions and classes # ============================= class _AttributeHolder(object): """Abstract base class that provides __repr__. The __repr__ method returns a string in the format:: ClassName(attr=name, attr=name, ...) The attributes are determined either by a class-level attribute, '_kwarg_names', or by inspecting the instance __dict__. """ def __repr__(self): type_name = type(self).__name__ arg_strings = [] for arg in self._get_args(): arg_strings.append(repr(arg)) for name, value in self._get_kwargs(): arg_strings.append('%s=%r' % (name, value)) return '%s(%s)' % (type_name, ', '.join(arg_strings)) def _get_kwargs(self): return sorted(self.__dict__.items()) def _get_args(self): return [] def _ensure_value(namespace, name, value): if getattr(namespace, name, None) is None: setattr(namespace, name, value) return getattr(namespace, name) # =============== # Formatting Help # =============== class HelpFormatter(object): """Formatter for generating usage messages and argument help strings. Only the name of this class is considered a public API. All the methods provided by the class are considered an implementation detail. """ def __init__(self, prog, indent_increment=2, max_help_position=24, width=None): # default setting for width if width is None: try: width = int(_os.environ['COLUMNS']) except (KeyError, ValueError): width = 80 width -= 2 self._prog = prog self._indent_increment = indent_increment self._max_help_position = max_help_position self._width = width self._current_indent = 0 self._level = 0 self._action_max_length = 0 self._root_section = self._Section(self, None) self._current_section = self._root_section self._whitespace_matcher = _re.compile(r'\s+') self._long_break_matcher = _re.compile(r'\n\n\n+') # =============================== # Section and indentation methods # =============================== def _indent(self): self._current_indent += self._indent_increment self._level += 1 def _dedent(self): self._current_indent -= self._indent_increment assert self._current_indent >= 0, 'Indent decreased below 0.' self._level -= 1 class _Section(object): def __init__(self, formatter, parent, heading=None): self.formatter = formatter self.parent = parent self.heading = heading self.items = [] def format_help(self): # format the indented section if self.parent is not None: self.formatter._indent() join = self.formatter._join_parts for func, args in self.items: func(*args) item_help = join([func(*args) for func, args in self.items]) if self.parent is not None: self.formatter._dedent() # return nothing if the section was empty if not item_help: return '' # add the heading if the section was non-empty if self.heading is not SUPPRESS and self.heading is not None: current_indent = self.formatter._current_indent heading = '%*s%s:\n' % (current_indent, '', self.heading) else: heading = '' # join the section-initial newline, the heading and the help return join(['\n', heading, item_help, '\n']) def _add_item(self, func, args): self._current_section.items.append((func, args)) # ======================== # Message building methods # ======================== def start_section(self, heading): self._indent() section = self._Section(self, self._current_section, heading) self._add_item(section.format_help, []) self._current_section = section def end_section(self): self._current_section = self._current_section.parent self._dedent() def add_text(self, text): if text is not SUPPRESS and text is not None: self._add_item(self._format_text, [text]) def add_usage(self, usage, actions, groups, prefix=None): if usage is not SUPPRESS: args = usage, actions, groups, prefix self._add_item(self._format_usage, args) def add_argument(self, action): if action.help is not SUPPRESS: # find all invocations get_invocation = self._format_action_invocation invocations = [get_invocation(action)] for subaction in self._iter_indented_subactions(action): invocations.append(get_invocation(subaction)) # update the maximum item length invocation_length = max([len(s) for s in invocations]) action_length = invocation_length + self._current_indent self._action_max_length = max(self._action_max_length, action_length) # add the item to the list self._add_item(self._format_action, [action]) def add_arguments(self, actions): for action in actions: self.add_argument(action) # ======================= # Help-formatting methods # ======================= def format_help(self): help = self._root_section.format_help() if help: help = self._long_break_matcher.sub('\n\n', help) help = help.strip('\n') + '\n' return help def _join_parts(self, part_strings): return ''.join([part for part in part_strings if part and part is not SUPPRESS]) def _format_usage(self, usage, actions, groups, prefix): if prefix is None: prefix = _('usage: ') # if usage is specified, use that if usage is not None: usage = usage % dict(prog=self._prog) # if no optionals or positionals are available, usage is just prog elif usage is None and not actions: usage = '%(prog)s' % dict(prog=self._prog) # if optionals and positionals are available, calculate usage elif usage is None: prog = '%(prog)s' % dict(prog=self._prog) # split optionals from positionals optionals = [] positionals = [] for action in actions: if action.option_strings: optionals.append(action) else: positionals.append(action) # build full usage string format = self._format_actions_usage action_usage = format(optionals + positionals, groups) usage = ' '.join([s for s in [prog, action_usage] if s]) # wrap the usage parts if it's too long text_width = self._width - self._current_indent if len(prefix) + len(usage) > text_width: # break usage into wrappable parts part_regexp = r'\(.*?\)+|\[.*?\]+|\S+' opt_usage = format(optionals, groups) pos_usage = format(positionals, groups) opt_parts = _re.findall(part_regexp, opt_usage) pos_parts = _re.findall(part_regexp, pos_usage) assert ' '.join(opt_parts) == opt_usage assert ' '.join(pos_parts) == pos_usage # helper for wrapping lines def get_lines(parts, indent, prefix=None): lines = [] line = [] if prefix is not None: line_len = len(prefix) - 1 else: line_len = len(indent) - 1 for part in parts: if line_len + 1 + len(part) > text_width: lines.append(indent + ' '.join(line)) line = [] line_len = len(indent) - 1 line.append(part) line_len += len(part) + 1 if line: lines.append(indent + ' '.join(line)) if prefix is not None: lines[0] = lines[0][len(indent):] return lines # if prog is short, follow it with optionals or positionals if len(prefix) + len(prog) <= 0.75 * text_width: indent = ' ' * (len(prefix) + len(prog) + 1) if opt_parts: lines = get_lines([prog] + opt_parts, indent, prefix) lines.extend(get_lines(pos_parts, indent)) elif pos_parts: lines = get_lines([prog] + pos_parts, indent, prefix) else: lines = [prog] # if prog is long, put it on its own line else: indent = ' ' * len(prefix) parts = opt_parts + pos_parts lines = get_lines(parts, indent) if len(lines) > 1: lines = [] lines.extend(get_lines(opt_parts, indent)) lines.extend(get_lines(pos_parts, indent)) lines = [prog] + lines # join lines into usage usage = '\n'.join(lines) # prefix with 'usage:' return '%s%s\n\n' % (prefix, usage) def _format_actions_usage(self, actions, groups): # find group indices and identify actions in groups group_actions = set() inserts = {} for group in groups: try: start = actions.index(group._group_actions[0]) except ValueError: continue else: end = start + len(group._group_actions) if actions[start:end] == group._group_actions: for action in group._group_actions: group_actions.add(action) if not group.required: if start in inserts: inserts[start] += ' [' else: inserts[start] = '[' inserts[end] = ']' else: if start in inserts: inserts[start] += ' (' else: inserts[start] = '(' inserts[end] = ')' for i in range(start + 1, end): inserts[i] = '|' # collect all actions format strings parts = [] for i, action in enumerate(actions): # suppressed arguments are marked with None # remove | separators for suppressed arguments if action.help is SUPPRESS: parts.append(None) if inserts.get(i) == '|': inserts.pop(i) elif inserts.get(i + 1) == '|': inserts.pop(i + 1) # produce all arg strings elif not action.option_strings: part = self._format_args(action, action.dest) # if it's in a group, strip the outer [] if action in group_actions: if part[0] == '[' and part[-1] == ']': part = part[1:-1] # add the action string to the list parts.append(part) # produce the first way to invoke the option in brackets else: option_string = action.option_strings[0] # if the Optional doesn't take a value, format is: # -s or --long if action.nargs == 0: part = '%s' % option_string # if the Optional takes a value, format is: # -s ARGS or --long ARGS else: default = action.dest.upper() args_string = self._format_args(action, default) part = '%s %s' % (option_string, args_string) # make it look optional if it's not required or in a group if not action.required and action not in group_actions: part = '[%s]' % part # add the action string to the list parts.append(part) # insert things at the necessary indices for i in sorted(inserts, reverse=True): parts[i:i] = [inserts[i]] # join all the action items with spaces text = ' '.join([item for item in parts if item is not None]) # clean up separators for mutually exclusive groups open = r'[\[(]' close = r'[\])]' text = _re.sub(r'(%s) ' % open, r'\1', text) text = _re.sub(r' (%s)' % close, r'\1', text) text = _re.sub(r'%s *%s' % (open, close), r'', text) text = _re.sub(r'\(([^|]*)\)', r'\1', text) text = text.strip() # return the text return text def _format_text(self, text): if '%(prog)' in text: text = text % dict(prog=self._prog) text_width = self._width - self._current_indent indent = ' ' * self._current_indent return self._fill_text(text, text_width, indent) + '\n\n' def _format_action(self, action): # determine the required width and the entry label help_position = min(self._action_max_length + 2, self._max_help_position) help_width = self._width - help_position action_width = help_position - self._current_indent - 2 action_header = self._format_action_invocation(action) # ho nelp; start on same line and add a final newline if not action.help: tup = self._current_indent, '', action_header action_header = '%*s%s\n' % tup # short action name; start on the same line and pad two spaces elif len(action_header) <= action_width: tup = self._current_indent, '', action_width, action_header action_header = '%*s%-*s ' % tup indent_first = 0 # long action name; start on the next line else: tup = self._current_indent, '', action_header action_header = '%*s%s\n' % tup indent_first = help_position # collect the pieces of the action help parts = [action_header] # if there was help for the action, add lines of help text if action.help: help_text = self._expand_help(action) help_lines = self._split_lines(help_text, help_width) parts.append('%*s%s\n' % (indent_first, '', help_lines[0])) for line in help_lines[1:]: parts.append('%*s%s\n' % (help_position, '', line)) # or add a newline if the description doesn't end with one elif not action_header.endswith('\n'): parts.append('\n') # if there are any sub-actions, add their help as well for subaction in self._iter_indented_subactions(action): parts.append(self._format_action(subaction)) # return a single string return self._join_parts(parts) def _format_action_invocation(self, action): if not action.option_strings: metavar, = self._metavar_formatter(action, action.dest)(1) return metavar else: parts = [] # if the Optional doesn't take a value, format is: # -s, --long if action.nargs == 0: parts.extend(action.option_strings) # if the Optional takes a value, format is: # -s ARGS, --long ARGS else: default = action.dest.upper() args_string = self._format_args(action, default) for option_string in action.option_strings: parts.append('%s %s' % (option_string, args_string)) return ', '.join(parts) def _metavar_formatter(self, action, default_metavar): if action.metavar is not None: result = action.metavar elif action.choices is not None: choice_strs = [str(choice) for choice in action.choices] result = '{%s}' % ','.join(choice_strs) else: result = default_metavar def format(tuple_size): if isinstance(result, tuple): return result else: return (result, ) * tuple_size return format def _format_args(self, action, default_metavar): get_metavar = self._metavar_formatter(action, default_metavar) if action.nargs is None: result = '%s' % get_metavar(1) elif action.nargs == OPTIONAL: result = '[%s]' % get_metavar(1) elif action.nargs == ZERO_OR_MORE: result = '[%s [%s ...]]' % get_metavar(2) elif action.nargs == ONE_OR_MORE: result = '%s [%s ...]' % get_metavar(2) elif action.nargs == REMAINDER: result = '...' elif action.nargs == PARSER: result = '%s ...' % get_metavar(1) else: formats = ['%s' for _ in range(action.nargs)] result = ' '.join(formats) % get_metavar(action.nargs) return result def _expand_help(self, action): params = dict(vars(action), prog=self._prog) for name in list(params): if params[name] is SUPPRESS: del params[name] for name in list(params): if hasattr(params[name], '__name__'): params[name] = params[name].__name__ if params.get('choices') is not None: choices_str = ', '.join([str(c) for c in params['choices']]) params['choices'] = choices_str return self._get_help_string(action) % params def _iter_indented_subactions(self, action): try: get_subactions = action._get_subactions except AttributeError: pass else: self._indent() for subaction in get_subactions(): yield subaction self._dedent() def _split_lines(self, text, width): text = self._whitespace_matcher.sub(' ', text).strip() return _textwrap.wrap(text, width) def _fill_text(self, text, width, indent): text = self._whitespace_matcher.sub(' ', text).strip() return _textwrap.fill(text, width, initial_indent=indent, subsequent_indent=indent) def _get_help_string(self, action): return action.help class RawDescriptionHelpFormatter(HelpFormatter): """Help message formatter which retains any formatting in descriptions. Only the name of this class is considered a public API. All the methods provided by the class are considered an implementation detail. """ def _fill_text(self, text, width, indent): return ''.join([indent + line for line in text.splitlines(True)]) class RawTextHelpFormatter(RawDescriptionHelpFormatter): """Help message formatter which retains formatting of all help text. Only the name of this class is considered a public API. All the methods provided by the class are considered an implementation detail. """ def _split_lines(self, text, width): return text.splitlines() class ArgumentDefaultsHelpFormatter(HelpFormatter): """Help message formatter which adds default values to argument help. Only the name of this class is considered a public API. All the methods provided by the class are considered an implementation detail. """ def _get_help_string(self, action): help = action.help if '%(default)' not in action.help: if action.default is not SUPPRESS: defaulting_nargs = [OPTIONAL, ZERO_OR_MORE] if action.option_strings or action.nargs in defaulting_nargs: help += ' (default: %(default)s)' return help # ===================== # Options and Arguments # ===================== def _get_action_name(argument): if argument is None: return None elif argument.option_strings: return '/'.join(argument.option_strings) elif argument.metavar not in (None, SUPPRESS): return argument.metavar elif argument.dest not in (None, SUPPRESS): return argument.dest else: return None class ArgumentError(Exception): """An error from creating or using an argument (optional or positional). The string value of this exception is the message, augmented with information about the argument that caused it. """ def __init__(self, argument, message): self.argument_name = _get_action_name(argument) self.message = message def __str__(self): if self.argument_name is None: format = '%(message)s' else: format = 'argument %(argument_name)s: %(message)s' return format % dict(message=self.message, argument_name=self.argument_name) class ArgumentTypeError(Exception): """An error from trying to convert a command line string to a type.""" pass # ============== # Action classes # ============== class Action(_AttributeHolder): """Information about how to convert command line strings to Python objects. Action objects are used by an ArgumentParser to represent the information needed to parse a single argument from one or more strings from the command line. The keyword arguments to the Action constructor are also all attributes of Action instances. Keyword Arguments: - option_strings -- A list of command-line option strings which should be associated with this action. - dest -- The name of the attribute to hold the created object(s) - nargs -- The number of command-line arguments that should be consumed. By default, one argument will be consumed and a single value will be produced. Other values include: - N (an integer) consumes N arguments (and produces a list) - '?' consumes zero or one arguments - '*' consumes zero or more arguments (and produces a list) - '+' consumes one or more arguments (and produces a list) Note that the difference between the default and nargs=1 is that with the default, a single value will be produced, while with nargs=1, a list containing a single value will be produced. - const -- The value to be produced if the option is specified and the option uses an action that takes no values. - default -- The value to be produced if the option is not specified. - type -- The type which the command-line arguments should be converted to, should be one of 'string', 'int', 'float', 'complex' or a callable object that accepts a single string argument. If None, 'string' is assumed. - choices -- A container of values that should be allowed. If not None, after a command-line argument has been converted to the appropriate type, an exception will be raised if it is not a member of this collection. - required -- True if the action must always be specified at the command line. This is only meaningful for optional command-line arguments. - help -- The help string describing the argument. - metavar -- The name to be used for the option's argument with the help string. If None, the 'dest' value will be used as the name. """ def __init__(self, option_strings, dest, nargs=None, const=None, default=None, type=None, choices=None, required=False, help=None, metavar=None): self.option_strings = option_strings self.dest = dest self.nargs = nargs self.const = const self.default = default self.type = type self.choices = choices self.required = required self.help = help self.metavar = metavar def _get_kwargs(self): names = [ 'option_strings', 'dest', 'nargs', 'const', 'default', 'type', 'choices', 'help', 'metavar', ] return [(name, getattr(self, name)) for name in names] def __call__(self, parser, namespace, values, option_string=None): raise NotImplementedError(_('.__call__() not defined')) class _StoreAction(Action): def __init__(self, option_strings, dest, nargs=None, const=None, default=None, type=None, choices=None, required=False, help=None, metavar=None): if nargs == 0: raise ValueError('nargs for store actions must be > 0; if you ' 'have nothing to store, actions such as store ' 'true or store const may be more appropriate') if const is not None and nargs != OPTIONAL: raise ValueError('nargs must be %r to supply const' % OPTIONAL) super(_StoreAction, self).__init__( option_strings=option_strings, dest=dest, nargs=nargs, const=const, default=default, type=type, choices=choices, required=required, help=help, metavar=metavar) def __call__(self, parser, namespace, values, option_string=None): setattr(namespace, self.dest, values) class _StoreConstAction(Action): def __init__(self, option_strings, dest, const, default=None, required=False, help=None, metavar=None): super(_StoreConstAction, self).__init__( option_strings=option_strings, dest=dest, nargs=0, const=const, default=default, required=required, help=help) def __call__(self, parser, namespace, values, option_string=None): setattr(namespace, self.dest, self.const) class _StoreTrueAction(_StoreConstAction): def __init__(self, option_strings, dest, default=False, required=False, help=None): super(_StoreTrueAction, self).__init__( option_strings=option_strings, dest=dest, const=True, default=default, required=required, help=help) class _StoreFalseAction(_StoreConstAction): def __init__(self, option_strings, dest, default=True, required=False, help=None): super(_StoreFalseAction, self).__init__( option_strings=option_strings, dest=dest, const=False, default=default, required=required, help=help) class _AppendAction(Action): def __init__(self, option_strings, dest, nargs=None, const=None, default=None, type=None, choices=None, required=False, help=None, metavar=None): if nargs == 0: raise ValueError('nargs for append actions must be > 0; if arg ' 'strings are not supplying the value to append, ' 'the append const action may be more appropriate') if const is not None and nargs != OPTIONAL: raise ValueError('nargs must be %r to supply const' % OPTIONAL) super(_AppendAction, self).__init__( option_strings=option_strings, dest=dest, nargs=nargs, const=const, default=default, type=type, choices=choices, required=required, help=help, metavar=metavar) def __call__(self, parser, namespace, values, option_string=None): items = _copy.copy(_ensure_value(namespace, self.dest, [])) items.append(values) setattr(namespace, self.dest, items) class _AppendConstAction(Action): def __init__(self, option_strings, dest, const, default=None, required=False, help=None, metavar=None): super(_AppendConstAction, self).__init__( option_strings=option_strings, dest=dest, nargs=0, const=const, default=default, required=required, help=help, metavar=metavar) def __call__(self, parser, namespace, values, option_string=None): items = _copy.copy(_ensure_value(namespace, self.dest, [])) items.append(self.const) setattr(namespace, self.dest, items) class _CountAction(Action): def __init__(self, option_strings, dest, default=None, required=False, help=None): super(_CountAction, self).__init__( option_strings=option_strings, dest=dest, nargs=0, default=default, required=required, help=help) def __call__(self, parser, namespace, values, option_string=None): new_count = _ensure_value(namespace, self.dest, 0) + 1 setattr(namespace, self.dest, new_count) class _HelpAction(Action): def __init__(self, option_strings, dest=SUPPRESS, default=SUPPRESS, help=None): super(_HelpAction, self).__init__( option_strings=option_strings, dest=dest, default=default, nargs=0, help=help) def __call__(self, parser, namespace, values, option_string=None): parser.print_help() parser.exit() class _VersionAction(Action): def __init__(self, option_strings, version=None, dest=SUPPRESS, default=SUPPRESS, help="show program's version number and exit"): super(_VersionAction, self).__init__( option_strings=option_strings, dest=dest, default=default, nargs=0, help=help) self.version = version def __call__(self, parser, namespace, values, option_string=None): version = self.version if version is None: version = parser.version formatter = parser._get_formatter() formatter.add_text(version) parser.exit(message=formatter.format_help()) class _SubParsersAction(Action): class _ChoicesPseudoAction(Action): def __init__(self, name, help): sup = super(_SubParsersAction._ChoicesPseudoAction, self) sup.__init__(option_strings=[], dest=name, help=help) def __init__(self, option_strings, prog, parser_class, dest=SUPPRESS, help=None, metavar=None): self._prog_prefix = prog self._parser_class = parser_class self._name_parser_map = {} self._choices_actions = [] super(_SubParsersAction, self).__init__( option_strings=option_strings, dest=dest, nargs=PARSER, choices=self._name_parser_map, help=help, metavar=metavar) def add_parser(self, name, **kwargs): # set prog from the existing prefix if kwargs.get('prog') is None: kwargs['prog'] = '%s %s' % (self._prog_prefix, name) # create a pseudo-action to hold the choice help if 'help' in kwargs: help = kwargs.pop('help') choice_action = self._ChoicesPseudoAction(name, help) self._choices_actions.append(choice_action) # create the parser and add it to the map parser = self._parser_class(**kwargs) self._name_parser_map[name] = parser return parser def _get_subactions(self): return self._choices_actions def __call__(self, parser, namespace, values, option_string=None): parser_name = values[0] arg_strings = values[1:] # set the parser name if requested if self.dest is not SUPPRESS: setattr(namespace, self.dest, parser_name) # select the parser try: parser = self._name_parser_map[parser_name] except KeyError: tup = parser_name, ', '.join(self._name_parser_map) msg = _('unknown parser %r (choices: %s)' % tup) raise ArgumentError(self, msg) # parse all the remaining options into the namespace # store any unrecognized options on the object, so that the top # level parser can decide what to do with them namespace, arg_strings = parser.parse_known_args(arg_strings, namespace) if arg_strings: vars(namespace).setdefault(_UNRECOGNIZED_ARGS_ATTR, []) getattr(namespace, _UNRECOGNIZED_ARGS_ATTR).extend(arg_strings) # ============== # Type classes # ============== class FileType(object): """Factory for creating file object types Instances of FileType are typically passed as type= arguments to the ArgumentParser add_argument() method. Keyword Arguments: - mode -- A string indicating how the file is to be opened. Accepts the same values as the builtin open() function. - bufsize -- The file's desired buffer size. Accepts the same values as the builtin open() function. """ def __init__(self, mode='r', bufsize=None): self._mode = mode self._bufsize = bufsize def __call__(self, string): # the special argument "-" means sys.std{in,out} if string == '-': if 'r' in self._mode: return _sys.stdin elif 'w' in self._mode: return _sys.stdout else: msg = _('argument "-" with mode %r' % self._mode) raise ValueError(msg) # all other arguments are used as file names if self._bufsize: return open(string, self._mode, self._bufsize) else: return open(string, self._mode) def __repr__(self): args = [self._mode, self._bufsize] args_str = ', '.join([repr(arg) for arg in args if arg is not None]) return '%s(%s)' % (type(self).__name__, args_str) # =========================== # Optional and Positional Parsing # =========================== class Namespace(_AttributeHolder): """Simple object for storing attributes. Implements equality by attribute names and values, and provides a simple string representation. """ def __init__(self, **kwargs): for name in kwargs: setattr(self, name, kwargs[name]) __hash__ = None def __eq__(self, other): return vars(self) == vars(other) def __ne__(self, other): return not (self == other) def __contains__(self, key): return key in self.__dict__ class _ActionsContainer(object): def __init__(self, description, prefix_chars, argument_default, conflict_handler): super(_ActionsContainer, self).__init__() self.description = description self.argument_default = argument_default self.prefix_chars = prefix_chars self.conflict_handler = conflict_handler # set up registries self._registries = {} # register actions self.register('action', None, _StoreAction) self.register('action', 'store', _StoreAction) self.register('action', 'store_const', _StoreConstAction) self.register('action', 'store_true', _StoreTrueAction) self.register('action', 'store_false', _StoreFalseAction) self.register('action', 'append', _AppendAction) self.register('action', 'append_const', _AppendConstAction) self.register('action', 'count', _CountAction) self.register('action', 'help', _HelpAction) self.register('action', 'version', _VersionAction) self.register('action', 'parsers', _SubParsersAction) # raise an exception if the conflict handler is invalid self._get_handler() # action storage self._actions = [] self._option_string_actions = {} # groups self._action_groups = [] self._mutually_exclusive_groups = [] # defaults storage self._defaults = {} # determines whether an "option" looks like a negative number self._negative_number_matcher = _re.compile(r'^-\d+$|^-\d*\.\d+$') # whether or not there are any optionals that look like negative # numbers -- uses a list so it can be shared and edited self._has_negative_number_optionals = [] # ==================== # Registration methods # ==================== def register(self, registry_name, value, object): registry = self._registries.setdefault(registry_name, {}) registry[value] = object def _registry_get(self, registry_name, value, default=None): return self._registries[registry_name].get(value, default) # ================================== # Namespace default accessor methods # ================================== def set_defaults(self, **kwargs): self._defaults.update(kwargs) # if these defaults match any existing arguments, replace # the previous default on the object with the new one for action in self._actions: if action.dest in kwargs: action.default = kwargs[action.dest] def get_default(self, dest): for action in self._actions: if action.dest == dest and action.default is not None: return action.default return self._defaults.get(dest, None) # ======================= # Adding argument actions # ======================= def add_argument(self, *args, **kwargs): """ add_argument(dest, ..., name=value, ...) add_argument(option_string, option_string, ..., name=value, ...) """ # if no positional args are supplied or only one is supplied and # it doesn't look like an option string, parse a positional # argument chars = self.prefix_chars if not args or len(args) == 1 and args[0][0] not in chars: if args and 'dest' in kwargs: raise ValueError('dest supplied twice for positional argument') kwargs = self._get_positional_kwargs(*args, **kwargs) # otherwise, we're adding an optional argument else: kwargs = self._get_optional_kwargs(*args, **kwargs) # if no default was supplied, use the parser-level default if 'default' not in kwargs: dest = kwargs['dest'] if dest in self._defaults: kwargs['default'] = self._defaults[dest] elif self.argument_default is not None: kwargs['default'] = self.argument_default # create the action object, and add it to the parser action_class = self._pop_action_class(kwargs) if not _callable(action_class): raise ValueError('unknown action "%s"' % action_class) action = action_class(**kwargs) # raise an error if the action type is not callable type_func = self._registry_get('type', action.type, action.type) if not _callable(type_func): raise ValueError('%r is not callable' % type_func) return self._add_action(action) def add_argument_group(self, *args, **kwargs): group = _ArgumentGroup(self, *args, **kwargs) self._action_groups.append(group) return group def add_mutually_exclusive_group(self, **kwargs): group = _MutuallyExclusiveGroup(self, **kwargs) self._mutually_exclusive_groups.append(group) return group def _add_action(self, action): # resolve any conflicts self._check_conflict(action) # add to actions list self._actions.append(action) action.container = self # index the action by any option strings it has for option_string in action.option_strings: self._option_string_actions[option_string] = action # set the flag if any option strings look like negative numbers for option_string in action.option_strings: if self._negative_number_matcher.match(option_string): if not self._has_negative_number_optionals: self._has_negative_number_optionals.append(True) # return the created action return action def _remove_action(self, action): self._actions.remove(action) def _add_container_actions(self, container): # collect groups by titles title_group_map = {} for group in self._action_groups: if group.title in title_group_map: msg = _('cannot merge actions - two groups are named %r') raise ValueError(msg % (group.title)) title_group_map[group.title] = group # map each action to its group group_map = {} for group in container._action_groups: # if a group with the title exists, use that, otherwise # create a new group matching the container's group if group.title not in title_group_map: title_group_map[group.title] = self.add_argument_group( title=group.title, description=group.description, conflict_handler=group.conflict_handler) # map the actions to their new group for action in group._group_actions: group_map[action] = title_group_map[group.title] # add container's mutually exclusive groups # NOTE: if add_mutually_exclusive_group ever gains title= and # description= then this code will need to be expanded as above for group in container._mutually_exclusive_groups: mutex_group = self.add_mutually_exclusive_group( required=group.required) # map the actions to their new mutex group for action in group._group_actions: group_map[action] = mutex_group # add all actions to this container or their group for action in container._actions: group_map.get(action, self)._add_action(action) def _get_positional_kwargs(self, dest, **kwargs): # make sure required is not specified if 'required' in kwargs: msg = _("'required' is an invalid argument for positionals") raise TypeError(msg) # mark positional arguments as required if at least one is # always required if kwargs.get('nargs') not in [OPTIONAL, ZERO_OR_MORE]: kwargs['required'] = True if kwargs.get('nargs') == ZERO_OR_MORE and 'default' not in kwargs: kwargs['required'] = True # return the keyword arguments with no option strings return dict(kwargs, dest=dest, option_strings=[]) def _get_optional_kwargs(self, *args, **kwargs): # determine short and long option strings option_strings = [] long_option_strings = [] for option_string in args: # error on strings that don't start with an appropriate prefix if not option_string[0] in self.prefix_chars: msg = _('invalid option string %r: ' 'must start with a character %r') tup = option_string, self.prefix_chars raise ValueError(msg % tup) # strings starting with two prefix characters are long options option_strings.append(option_string) if option_string[0] in self.prefix_chars: if len(option_string) > 1: if option_string[1] in self.prefix_chars: long_option_strings.append(option_string) # infer destination, '--foo-bar' -> 'foo_bar' and '-x' -> 'x' dest = kwargs.pop('dest', None) if dest is None: if long_option_strings: dest_option_string = long_option_strings[0] else: dest_option_string = option_strings[0] dest = dest_option_string.lstrip(self.prefix_chars) if not dest: msg = _('dest= is required for options like %r') raise ValueError(msg % option_string) dest = dest.replace('-', '_') # return the updated keyword arguments return dict(kwargs, dest=dest, option_strings=option_strings) def _pop_action_class(self, kwargs, default=None): action = kwargs.pop('action', default) return self._registry_get('action', action, action) def _get_handler(self): # determine function from conflict handler string handler_func_name = '_handle_conflict_%s' % self.conflict_handler try: return getattr(self, handler_func_name) except AttributeError: msg = _('invalid conflict_resolution value: %r') raise ValueError(msg % self.conflict_handler) def _check_conflict(self, action): # find all options that conflict with this option confl_optionals = [] for option_string in action.option_strings: if option_string in self._option_string_actions: confl_optional = self._option_string_actions[option_string] confl_optionals.append((option_string, confl_optional)) # resolve any conflicts if confl_optionals: conflict_handler = self._get_handler() conflict_handler(action, confl_optionals) def _handle_conflict_error(self, action, conflicting_actions): message = _('conflicting option string(s): %s') conflict_string = ', '.join([option_string for option_string, action in conflicting_actions]) raise ArgumentError(action, message % conflict_string) def _handle_conflict_resolve(self, action, conflicting_actions): # remove all conflicting options for option_string, action in conflicting_actions: # remove the conflicting option action.option_strings.remove(option_string) self._option_string_actions.pop(option_string, None) # if the option now has no option string, remove it from the # container holding it if not action.option_strings: action.container._remove_action(action) class _ArgumentGroup(_ActionsContainer): def __init__(self, container, title=None, description=None, **kwargs): # add any missing keyword arguments by checking the container update = kwargs.setdefault update('conflict_handler', container.conflict_handler) update('prefix_chars', container.prefix_chars) update('argument_default', container.argument_default) super_init = super(_ArgumentGroup, self).__init__ super_init(description=description, **kwargs) # group attributes self.title = title self._group_actions = [] # share most attributes with the container self._registries = container._registries self._actions = container._actions self._option_string_actions = container._option_string_actions self._defaults = container._defaults self._has_negative_number_optionals = \ container._has_negative_number_optionals def _add_action(self, action): action = super(_ArgumentGroup, self)._add_action(action) self._group_actions.append(action) return action def _remove_action(self, action): super(_ArgumentGroup, self)._remove_action(action) self._group_actions.remove(action) class _MutuallyExclusiveGroup(_ArgumentGroup): def __init__(self, container, required=False): super(_MutuallyExclusiveGroup, self).__init__(container) self.required = required self._container = container def _add_action(self, action): if action.required: msg = _('mutually exclusive arguments must be optional') raise ValueError(msg) action = self._container._add_action(action) self._group_actions.append(action) return action def _remove_action(self, action): self._container._remove_action(action) self._group_actions.remove(action) class ArgumentParser(_AttributeHolder, _ActionsContainer): """Object for parsing command line strings into Python objects. Keyword Arguments: - prog -- The name of the program (default: sys.argv[0]) - usage -- A usage message (default: auto-generated from arguments) - description -- A description of what the program does - epilog -- Text following the argument descriptions - parents -- Parsers whose arguments should be copied into this one - formatter_class -- HelpFormatter class for printing help messages - prefix_chars -- Characters that prefix optional arguments - fromfile_prefix_chars -- Characters that prefix files containing additional arguments - argument_default -- The default value for all arguments - conflict_handler -- String indicating how to handle conflicts - add_help -- Add a -h/-help option """ def __init__(self, prog=None, usage=None, description=None, epilog=None, version=None, parents=[], formatter_class=HelpFormatter, prefix_chars='-', fromfile_prefix_chars=None, argument_default=None, conflict_handler='error', add_help=True): if version is not None: import warnings warnings.warn( """The "version" argument to ArgumentParser is deprecated. """ """Please use """ """"add_argument(..., action='version', version="N", ...)" """ """instead""", DeprecationWarning) superinit = super(ArgumentParser, self).__init__ superinit(description=description, prefix_chars=prefix_chars, argument_default=argument_default, conflict_handler=conflict_handler) # default setting for prog if prog is None: prog = _os.path.basename(_sys.argv[0]) self.prog = prog self.usage = usage self.epilog = epilog self.version = version self.formatter_class = formatter_class self.fromfile_prefix_chars = fromfile_prefix_chars self.add_help = add_help add_group = self.add_argument_group self._positionals = add_group(_('positional arguments')) self._optionals = add_group(_('optional arguments')) self._subparsers = None # register types def identity(string): return string self.register('type', None, identity) # add help and version arguments if necessary # (using explicit default to override global argument_default) if '-' in prefix_chars: default_prefix = '-' else: default_prefix = prefix_chars[0] if self.add_help: self.add_argument( default_prefix+'h', default_prefix*2+'help', action='help', default=SUPPRESS, help=_('show this help message and exit')) if self.version: self.add_argument( default_prefix+'v', default_prefix*2+'version', action='version', default=SUPPRESS, version=self.version, help=_("show program's version number and exit")) # add parent arguments and defaults for parent in parents: self._add_container_actions(parent) try: defaults = parent._defaults except AttributeError: pass else: self._defaults.update(defaults) # ======================= # Pretty __repr__ methods # ======================= def _get_kwargs(self): names = [ 'prog', 'usage', 'description', 'version', 'formatter_class', 'conflict_handler', 'add_help', ] return [(name, getattr(self, name)) for name in names] # ================================== # Optional/Positional adding methods # ================================== def add_subparsers(self, **kwargs): if self._subparsers is not None: self.error(_('cannot have multiple subparser arguments')) # add the parser class to the arguments if it's not present kwargs.setdefault('parser_class', type(self)) if 'title' in kwargs or 'description' in kwargs: title = _(kwargs.pop('title', 'subcommands')) description = _(kwargs.pop('description', None)) self._subparsers = self.add_argument_group(title, description) else: self._subparsers = self._positionals # prog defaults to the usage message of this parser, skipping # optional arguments and with no "usage:" prefix if kwargs.get('prog') is None: formatter = self._get_formatter() positionals = self._get_positional_actions() groups = self._mutually_exclusive_groups formatter.add_usage(self.usage, positionals, groups, '') kwargs['prog'] = formatter.format_help().strip() # create the parsers action and add it to the positionals list parsers_class = self._pop_action_class(kwargs, 'parsers') action = parsers_class(option_strings=[], **kwargs) self._subparsers._add_action(action) # return the created parsers action return action def _add_action(self, action): if action.option_strings: self._optionals._add_action(action) else: self._positionals._add_action(action) return action def _get_optional_actions(self): return [action for action in self._actions if action.option_strings] def _get_positional_actions(self): return [action for action in self._actions if not action.option_strings] # ===================================== # Command line argument parsing methods # ===================================== def parse_args(self, args=None, namespace=None): args, argv = self.parse_known_args(args, namespace) if argv: msg = _('unrecognized arguments: %s') self.error(msg % ' '.join(argv)) return args def parse_known_args(self, args=None, namespace=None): # args default to the system args if args is None: args = _sys.argv[1:] # default Namespace built from parser defaults if namespace is None: namespace = Namespace() # add any action defaults that aren't present for action in self._actions: if action.dest is not SUPPRESS: if not hasattr(namespace, action.dest): if action.default is not SUPPRESS: default = action.default if isinstance(action.default, basestring): default = self._get_value(action, default) setattr(namespace, action.dest, default) # add any parser defaults that aren't present for dest in self._defaults: if not hasattr(namespace, dest): setattr(namespace, dest, self._defaults[dest]) # parse the arguments and exit if there are any errors try: namespace, args = self._parse_known_args(args, namespace) if hasattr(namespace, _UNRECOGNIZED_ARGS_ATTR): args.extend(getattr(namespace, _UNRECOGNIZED_ARGS_ATTR)) delattr(namespace, _UNRECOGNIZED_ARGS_ATTR) return namespace, args except ArgumentError: err = _sys.exc_info()[1] self.error(str(err)) def _parse_known_args(self, arg_strings, namespace): # replace arg strings that are file references if self.fromfile_prefix_chars is not None: arg_strings = self._read_args_from_files(arg_strings) # map all mutually exclusive arguments to the other arguments # they can't occur with action_conflicts = {} for mutex_group in self._mutually_exclusive_groups: group_actions = mutex_group._group_actions for i, mutex_action in enumerate(mutex_group._group_actions): conflicts = action_conflicts.setdefault(mutex_action, []) conflicts.extend(group_actions[:i]) conflicts.extend(group_actions[i + 1:]) # find all option indices, and determine the arg_string_pattern # which has an 'O' if there is an option at an index, # an 'A' if there is an argument, or a '-' if there is a '--' option_string_indices = {} arg_string_pattern_parts = [] arg_strings_iter = iter(arg_strings) for i, arg_string in enumerate(arg_strings_iter): # all args after -- are non-options if arg_string == '--': arg_string_pattern_parts.append('-') for arg_string in arg_strings_iter: arg_string_pattern_parts.append('A') # otherwise, add the arg to the arg strings # and note the index if it was an option else: option_tuple = self._parse_optional(arg_string) if option_tuple is None: pattern = 'A' else: option_string_indices[i] = option_tuple pattern = 'O' arg_string_pattern_parts.append(pattern) # join the pieces together to form the pattern arg_strings_pattern = ''.join(arg_string_pattern_parts) # converts arg strings to the appropriate and then takes the action seen_actions = set() seen_non_default_actions = set() def take_action(action, argument_strings, option_string=None): seen_actions.add(action) argument_values = self._get_values(action, argument_strings) # error if this argument is not allowed with other previously # seen arguments, assuming that actions that use the default # value don't really count as "present" if argument_values is not action.default: seen_non_default_actions.add(action) for conflict_action in action_conflicts.get(action, []): if conflict_action in seen_non_default_actions: msg = _('not allowed with argument %s') action_name = _get_action_name(conflict_action) raise ArgumentError(action, msg % action_name) # take the action if we didn't receive a SUPPRESS value # (e.g. from a default) if argument_values is not SUPPRESS: action(self, namespace, argument_values, option_string) # function to convert arg_strings into an optional action def consume_optional(start_index): # get the optional identified at this index option_tuple = option_string_indices[start_index] action, option_string, explicit_arg = option_tuple # identify additional optionals in the same arg string # (e.g. -xyz is the same as -x -y -z if no args are required) match_argument = self._match_argument action_tuples = [] while True: # if we found no optional action, skip it if action is None: extras.append(arg_strings[start_index]) return start_index + 1 # if there is an explicit argument, try to match the # optional's string arguments to only this if explicit_arg is not None: arg_count = match_argument(action, 'A') # if the action is a single-dash option and takes no # arguments, try to parse more single-dash options out # of the tail of the option string chars = self.prefix_chars if arg_count == 0 and option_string[1] not in chars: action_tuples.append((action, [], option_string)) char = option_string[0] option_string = char + explicit_arg[0] new_explicit_arg = explicit_arg[1:] or None optionals_map = self._option_string_actions if option_string in optionals_map: action = optionals_map[option_string] explicit_arg = new_explicit_arg else: msg = _('ignored explicit argument %r') raise ArgumentError(action, msg % explicit_arg) # if the action expect exactly one argument, we've # successfully matched the option; exit the loop elif arg_count == 1: stop = start_index + 1 args = [explicit_arg] action_tuples.append((action, args, option_string)) break # error if a double-dash option did not use the # explicit argument else: msg = _('ignored explicit argument %r') raise ArgumentError(action, msg % explicit_arg) # if there is no explicit argument, try to match the # optional's string arguments with the following strings # if successful, exit the loop else: start = start_index + 1 selected_patterns = arg_strings_pattern[start:] arg_count = match_argument(action, selected_patterns) stop = start + arg_count args = arg_strings[start:stop] action_tuples.append((action, args, option_string)) break # add the Optional to the list and return the index at which # the Optional's string args stopped assert action_tuples for action, args, option_string in action_tuples: take_action(action, args, option_string) return stop # the list of Positionals left to be parsed; this is modified # by consume_positionals() positionals = self._get_positional_actions() # function to convert arg_strings into positional actions def consume_positionals(start_index): # match as many Positionals as possible match_partial = self._match_arguments_partial selected_pattern = arg_strings_pattern[start_index:] arg_counts = match_partial(positionals, selected_pattern) # slice off the appropriate arg strings for each Positional # and add the Positional and its args to the list for action, arg_count in zip(positionals, arg_counts): args = arg_strings[start_index: start_index + arg_count] start_index += arg_count take_action(action, args) # slice off the Positionals that we just parsed and return the # index at which the Positionals' string args stopped positionals[:] = positionals[len(arg_counts):] return start_index # consume Positionals and Optionals alternately, until we have # passed the last option string extras = [] start_index = 0 if option_string_indices: max_option_string_index = max(option_string_indices) else: max_option_string_index = -1 while start_index <= max_option_string_index: # consume any Positionals preceding the next option next_option_string_index = min([ index for index in option_string_indices if index >= start_index]) if start_index != next_option_string_index: positionals_end_index = consume_positionals(start_index) # only try to parse the next optional if we didn't consume # the option string during the positionals parsing if positionals_end_index > start_index: start_index = positionals_end_index continue else: start_index = positionals_end_index # if we consumed all the positionals we could and we're not # at the index of an option string, there were extra arguments if start_index not in option_string_indices: strings = arg_strings[start_index:next_option_string_index] extras.extend(strings) start_index = next_option_string_index # consume the next optional and any arguments for it start_index = consume_optional(start_index) # consume any positionals following the last Optional stop_index = consume_positionals(start_index) # if we didn't consume all the argument strings, there were extras extras.extend(arg_strings[stop_index:]) # if we didn't use all the Positional objects, there were too few # arg strings supplied. if positionals: self.error(_('too few arguments')) # make sure all required actions were present for action in self._actions: if action.required: if action not in seen_actions: name = _get_action_name(action) self.error(_('argument %s is required') % name) # make sure all required groups had one option present for group in self._mutually_exclusive_groups: if group.required: for action in group._group_actions: if action in seen_non_default_actions: break # if no actions were used, report the error else: names = [_get_action_name(action) for action in group._group_actions if action.help is not SUPPRESS] msg = _('one of the arguments %s is required') self.error(msg % ' '.join(names)) # return the updated namespace and the extra arguments return namespace, extras def _read_args_from_files(self, arg_strings): # expand arguments referencing files new_arg_strings = [] for arg_string in arg_strings: # for regular arguments, just add them back into the list if arg_string[0] not in self.fromfile_prefix_chars: new_arg_strings.append(arg_string) # replace arguments referencing files with the file content else: try: args_file = open(arg_string[1:]) try: arg_strings = [] for arg_line in args_file.read().splitlines(): for arg in self.convert_arg_line_to_args(arg_line): arg_strings.append(arg) arg_strings = self._read_args_from_files(arg_strings) new_arg_strings.extend(arg_strings) finally: args_file.close() except IOError: err = _sys.exc_info()[1] self.error(str(err)) # return the modified argument list return new_arg_strings def convert_arg_line_to_args(self, arg_line): return [arg_line] def _match_argument(self, action, arg_strings_pattern): # match the pattern for this action to the arg strings nargs_pattern = self._get_nargs_pattern(action) match = _re.match(nargs_pattern, arg_strings_pattern) # raise an exception if we weren't able to find a match if match is None: nargs_errors = { None: _('expected one argument'), OPTIONAL: _('expected at most one argument'), ONE_OR_MORE: _('expected at least one argument'), } default = _('expected %s argument(s)') % action.nargs msg = nargs_errors.get(action.nargs, default) raise ArgumentError(action, msg) # return the number of arguments matched return len(match.group(1)) def _match_arguments_partial(self, actions, arg_strings_pattern): # progressively shorten the actions list by slicing off the # final actions until we find a match result = [] for i in range(len(actions), 0, -1): actions_slice = actions[:i] pattern = ''.join([self._get_nargs_pattern(action) for action in actions_slice]) match = _re.match(pattern, arg_strings_pattern) if match is not None: result.extend([len(string) for string in match.groups()]) break # return the list of arg string counts return result def _parse_optional(self, arg_string): # if it's an empty string, it was meant to be a positional if not arg_string: return None # if it doesn't start with a prefix, it was meant to be positional if not arg_string[0] in self.prefix_chars: return None # if the option string is present in the parser, return the action if arg_string in self._option_string_actions: action = self._option_string_actions[arg_string] return action, arg_string, None # if it's just a single character, it was meant to be positional if len(arg_string) == 1: return None # if the option string before the "=" is present, return the action if '=' in arg_string: option_string, explicit_arg = arg_string.split('=', 1) if option_string in self._option_string_actions: action = self._option_string_actions[option_string] return action, option_string, explicit_arg # search through all possible prefixes of the option string # and all actions in the parser for possible interpretations option_tuples = self._get_option_tuples(arg_string) # if multiple actions match, the option string was ambiguous if len(option_tuples) > 1: options = ', '.join([option_string for action, option_string, explicit_arg in option_tuples]) tup = arg_string, options self.error(_('ambiguous option: %s could match %s') % tup) # if exactly one action matched, this segmentation is good, # so return the parsed action elif len(option_tuples) == 1: option_tuple, = option_tuples return option_tuple # if it was not found as an option, but it looks like a negative # number, it was meant to be positional # unless there are negative-number-like options if self._negative_number_matcher.match(arg_string): if not self._has_negative_number_optionals: return None # if it contains a space, it was meant to be a positional if ' ' in arg_string: return None # it was meant to be an optional but there is no such option # in this parser (though it might be a valid option in a subparser) return None, arg_string, None def _get_option_tuples(self, option_string): result = [] # option strings starting with two prefix characters are only # split at the '=' chars = self.prefix_chars if option_string[0] in chars and option_string[1] in chars: if '=' in option_string: option_prefix, explicit_arg = option_string.split('=', 1) else: option_prefix = option_string explicit_arg = None for option_string in self._option_string_actions: if option_string.startswith(option_prefix): action = self._option_string_actions[option_string] tup = action, option_string, explicit_arg result.append(tup) # single character options can be concatenated with their arguments # but multiple character options always have to have their argument # separate elif option_string[0] in chars and option_string[1] not in chars: option_prefix = option_string explicit_arg = None short_option_prefix = option_string[:2] short_explicit_arg = option_string[2:] for option_string in self._option_string_actions: if option_string == short_option_prefix: action = self._option_string_actions[option_string] tup = action, option_string, short_explicit_arg result.append(tup) elif option_string.startswith(option_prefix): action = self._option_string_actions[option_string] tup = action, option_string, explicit_arg result.append(tup) # shouldn't ever get here else: self.error(_('unexpected option string: %s') % option_string) # return the collected option tuples return result def _get_nargs_pattern(self, action): # in all examples below, we have to allow for '--' args # which are represented as '-' in the pattern nargs = action.nargs # the default (None) is assumed to be a single argument if nargs is None: nargs_pattern = '(-*A-*)' # allow zero or one arguments elif nargs == OPTIONAL: nargs_pattern = '(-*A?-*)' # allow zero or more arguments elif nargs == ZERO_OR_MORE: nargs_pattern = '(-*[A-]*)' # allow one or more arguments elif nargs == ONE_OR_MORE: nargs_pattern = '(-*A[A-]*)' # allow any number of options or arguments elif nargs == REMAINDER: nargs_pattern = '([-AO]*)' # allow one argument followed by any number of options or arguments elif nargs == PARSER: nargs_pattern = '(-*A[-AO]*)' # all others should be integers else: nargs_pattern = '(-*%s-*)' % '-*'.join('A' * nargs) # if this is an optional action, -- is not allowed if action.option_strings: nargs_pattern = nargs_pattern.replace('-*', '') nargs_pattern = nargs_pattern.replace('-', '') # return the pattern return nargs_pattern # ======================== # Value conversion methods # ======================== def _get_values(self, action, arg_strings): # for everything but PARSER args, strip out '--' if action.nargs not in [PARSER, REMAINDER]: arg_strings = [s for s in arg_strings if s != '--'] # optional argument produces a default when not present if not arg_strings and action.nargs == OPTIONAL: if action.option_strings: value = action.const else: value = action.default if isinstance(value, basestring): value = self._get_value(action, value) self._check_value(action, value) # when nargs='*' on a positional, if there were no command-line # args, use the default if it is anything other than None elif (not arg_strings and action.nargs == ZERO_OR_MORE and not action.option_strings): if action.default is not None: value = action.default else: value = arg_strings self._check_value(action, value) # single argument or optional argument produces a single value elif len(arg_strings) == 1 and action.nargs in [None, OPTIONAL]: arg_string, = arg_strings value = self._get_value(action, arg_string) self._check_value(action, value) # REMAINDER arguments convert all values, checking none elif action.nargs == REMAINDER: value = [self._get_value(action, v) for v in arg_strings] # PARSER arguments convert all values, but check only the first elif action.nargs == PARSER: value = [self._get_value(action, v) for v in arg_strings] self._check_value(action, value[0]) # all other types of nargs produce a list else: value = [self._get_value(action, v) for v in arg_strings] for v in value: self._check_value(action, v) # return the converted value return value def _get_value(self, action, arg_string): type_func = self._registry_get('type', action.type, action.type) if not _callable(type_func): msg = _('%r is not callable') raise ArgumentError(action, msg % type_func) # convert the value to the appropriate type try: result = type_func(arg_string) # ArgumentTypeErrors indicate errors except ArgumentTypeError: name = getattr(action.type, '__name__', repr(action.type)) msg = str(_sys.exc_info()[1]) raise ArgumentError(action, msg) # TypeErrors or ValueErrors also indicate errors except (TypeError, ValueError): name = getattr(action.type, '__name__', repr(action.type)) msg = _('invalid %s value: %r') raise ArgumentError(action, msg % (name, arg_string)) # return the converted value return result def _check_value(self, action, value): # converted value must be one of the choices (if specified) if action.choices is not None and value not in action.choices: tup = value, ', '.join(map(repr, action.choices)) msg = _('invalid choice: %r (choose from %s)') % tup raise ArgumentError(action, msg) # ======================= # Help-formatting methods # ======================= def format_usage(self): formatter = self._get_formatter() formatter.add_usage(self.usage, self._actions, self._mutually_exclusive_groups) return formatter.format_help() def format_help(self): formatter = self._get_formatter() # usage formatter.add_usage(self.usage, self._actions, self._mutually_exclusive_groups) # description formatter.add_text(self.description) # positionals, optionals and user-defined groups for action_group in self._action_groups: formatter.start_section(action_group.title) formatter.add_text(action_group.description) formatter.add_arguments(action_group._group_actions) formatter.end_section() # epilog formatter.add_text(self.epilog) # determine help from format above return formatter.format_help() def format_version(self): import warnings warnings.warn( 'The format_version method is deprecated -- the "version" ' 'argument to ArgumentParser is no longer supported.', DeprecationWarning) formatter = self._get_formatter() formatter.add_text(self.version) return formatter.format_help() def _get_formatter(self): return self.formatter_class(prog=self.prog) # ===================== # Help-printing methods # ===================== def print_usage(self, file=None): if file is None: file = _sys.stdout self._print_message(self.format_usage(), file) def print_help(self, file=None): if file is None: file = _sys.stdout self._print_message(self.format_help(), file) def print_version(self, file=None): import warnings warnings.warn( 'The print_version method is deprecated -- the "version" ' 'argument to ArgumentParser is no longer supported.', DeprecationWarning) self._print_message(self.format_version(), file) def _print_message(self, message, file=None): if message: if file is None: file = _sys.stderr file.write(message) # =============== # Exiting methods # =============== def exit(self, status=0, message=None): if message: self._print_message(message, _sys.stderr) _sys.exit(status) def error(self, message): """error(message: string) Prints a usage message incorporating the message to stderr and exits. If you override this in a subclass, it should not return -- it should either exit or raise an exception. """ self.print_usage(_sys.stderr) self.exit(2, _('%s: error: %s\n') % (self.prog, message))
gpl-2.0
8,972,317,420,490,129,000
36.168078
80
0.558987
false
aptrishu/coala-bears
tests/haskell/HaskellLintBearTest.py
8
1995
from queue import Queue from bears.haskell.HaskellLintBear import HaskellLintBear from coalib.testing.LocalBearTestHelper import LocalBearTestHelper from coalib.testing.BearTestHelper import generate_skip_decorator from coalib.settings.Section import Section good_single_line_file = """ myconcat = (++) """.splitlines() bad_single_line_file = """ myconcat a b = ((++) a b) """.splitlines() good_multiple_line_file = """ import qualified Data.ByteString.Char8 as BS main :: IO() main = return $ BS.concat [ BS.pack "I am being tested by hlint!" , "String dummy" , "Another String dummy" ] """.splitlines() bad_multiple_line_file = """ import qualified Data.ByteString.Char8 as BS main :: IO() main = return $ BS.concat $ [ BS.pack $ "I am being tested by hlint!" , "String dummy" , "Another String dummy" ] """.splitlines() @generate_skip_decorator(HaskellLintBear) class HaskellLintBearTest(LocalBearTestHelper): def setUp(self): self.section = Section('name') self.uut = HaskellLintBear(self.section, Queue()) def test_valid(self): self.check_validity(self.uut, good_single_line_file, tempfile_kwargs={'suffix': '.hs'}) self.check_validity(self.uut, good_multiple_line_file, tempfile_kwargs={'suffix': '.hs'}) def test_invalid(self): results = self.check_invalidity(self.uut, bad_single_line_file, tempfile_kwargs={'suffix': '.hs'}) self.assertEqual(len(results), 1, str(results)) self.assertIn('Redundant bracket', results[0].message) results = self.check_invalidity(self.uut, bad_multiple_line_file, tempfile_kwargs={'suffix': '.hs'}) self.assertEqual(len(results), 2, str(results)) self.assertIn('Redundant $', results[0].message)
agpl-3.0
-6,586,619,287,682,781,000
28.338235
74
0.607018
false
salivatears/ansible
lib/ansible/plugins/cache/base.py
124
1479
# (c) 2014, Brian Coca, Josh Drake, et al # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. from __future__ import (absolute_import, division, print_function) __metaclass__ = type from abc import ABCMeta, abstractmethod from six import with_metaclass try: from __main__ import display except ImportError: from ansible.utils.display import Display display = Display() class BaseCacheModule(with_metaclass(ABCMeta, object)): display = display @abstractmethod def get(self, key): pass @abstractmethod def set(self, key, value): pass @abstractmethod def keys(self): pass @abstractmethod def contains(self, key): pass @abstractmethod def delete(self, key): pass @abstractmethod def flush(self): pass @abstractmethod def copy(self): pass
gpl-3.0
5,494,726,859,176,623,000
22.47619
70
0.69236
false
alxgu/ansible
lib/ansible/modules/identity/keycloak/keycloak_client.py
27
32548
#!/usr/bin/python # -*- coding: utf-8 -*- # Copyright (c) 2017, Eike Frost <[email protected]> # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = { 'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community' } DOCUMENTATION = ''' --- module: keycloak_client short_description: Allows administration of Keycloak clients via Keycloak API version_added: "2.5" description: - This module allows the administration of Keycloak clients via the Keycloak REST API. It requires access to the REST API via OpenID Connect; the user connecting and the client being used must have the requisite access rights. In a default Keycloak installation, admin-cli and an admin user would work, as would a separate client definition with the scope tailored to your needs and a user having the expected roles. - The names of module options are snake_cased versions of the camelCase ones found in the Keycloak API and its documentation at U(http://www.keycloak.org/docs-api/3.3/rest-api/). Aliases are provided so camelCased versions can be used as well. - The Keycloak API does not always sanity check inputs e.g. you can set SAML-specific settings on an OpenID Connect client for instance and vice versa. Be careful. If you do not specify a setting, usually a sensible default is chosen. options: state: description: - State of the client - On C(present), the client will be created (or updated if it exists already). - On C(absent), the client will be removed if it exists choices: ['present', 'absent'] default: 'present' realm: description: - The realm to create the client in. client_id: description: - Client id of client to be worked on. This is usually an alphanumeric name chosen by you. Either this or I(id) is required. If you specify both, I(id) takes precedence. This is 'clientId' in the Keycloak REST API. aliases: - clientId id: description: - Id of client to be worked on. This is usually an UUID. Either this or I(client_id) is required. If you specify both, this takes precedence. name: description: - Name of the client (this is not the same as I(client_id)) description: description: - Description of the client in Keycloak root_url: description: - Root URL appended to relative URLs for this client This is 'rootUrl' in the Keycloak REST API. aliases: - rootUrl admin_url: description: - URL to the admin interface of the client This is 'adminUrl' in the Keycloak REST API. aliases: - adminUrl base_url: description: - Default URL to use when the auth server needs to redirect or link back to the client This is 'baseUrl' in the Keycloak REST API. aliases: - baseUrl enabled: description: - Is this client enabled or not? type: bool client_authenticator_type: description: - How do clients authenticate with the auth server? Either C(client-secret) or C(client-jwt) can be chosen. When using C(client-secret), the module parameter I(secret) can set it, while for C(client-jwt), you can use the keys C(use.jwks.url), C(jwks.url), and C(jwt.credential.certificate) in the I(attributes) module parameter to configure its behavior. This is 'clientAuthenticatorType' in the Keycloak REST API. choices: ['client-secret', 'client-jwt'] aliases: - clientAuthenticatorType secret: description: - When using I(client_authenticator_type) C(client-secret) (the default), you can specify a secret here (otherwise one will be generated if it does not exit). If changing this secret, the module will not register a change currently (but the changed secret will be saved). registration_access_token: description: - The registration access token provides access for clients to the client registration service. This is 'registrationAccessToken' in the Keycloak REST API. aliases: - registrationAccessToken default_roles: description: - list of default roles for this client. If the client roles referenced do not exist yet, they will be created. This is 'defaultRoles' in the Keycloak REST API. aliases: - defaultRoles redirect_uris: description: - Acceptable redirect URIs for this client. This is 'redirectUris' in the Keycloak REST API. aliases: - redirectUris web_origins: description: - List of allowed CORS origins. This is 'webOrigins' in the Keycloak REST API. aliases: - webOrigins not_before: description: - Revoke any tokens issued before this date for this client (this is a UNIX timestamp). This is 'notBefore' in the Keycloak REST API. aliases: - notBefore bearer_only: description: - The access type of this client is bearer-only. This is 'bearerOnly' in the Keycloak REST API. aliases: - bearerOnly type: bool consent_required: description: - If enabled, users have to consent to client access. This is 'consentRequired' in the Keycloak REST API. aliases: - consentRequired type: bool standard_flow_enabled: description: - Enable standard flow for this client or not (OpenID connect). This is 'standardFlowEnabled' in the Keycloak REST API. aliases: - standardFlowEnabled type: bool implicit_flow_enabled: description: - Enable implicit flow for this client or not (OpenID connect). This is 'implicitFlowEnabled' in the Keycloak REST API. aliases: - implicitFlowEnabled type: bool direct_access_grants_enabled: description: - Are direct access grants enabled for this client or not (OpenID connect). This is 'directAccessGrantsEnabled' in the Keycloak REST API. aliases: - directAccessGrantsEnabled type: bool service_accounts_enabled: description: - Are service accounts enabled for this client or not (OpenID connect). This is 'serviceAccountsEnabled' in the Keycloak REST API. aliases: - serviceAccountsEnabled type: bool authorization_services_enabled: description: - Are authorization services enabled for this client or not (OpenID connect). This is 'authorizationServicesEnabled' in the Keycloak REST API. aliases: - authorizationServicesEnabled type: bool public_client: description: - Is the access type for this client public or not. This is 'publicClient' in the Keycloak REST API. aliases: - publicClient type: bool frontchannel_logout: description: - Is frontchannel logout enabled for this client or not. This is 'frontchannelLogout' in the Keycloak REST API. aliases: - frontchannelLogout type: bool protocol: description: - Type of client (either C(openid-connect) or C(saml). choices: ['openid-connect', 'saml'] full_scope_allowed: description: - Is the "Full Scope Allowed" feature set for this client or not. This is 'fullScopeAllowed' in the Keycloak REST API. aliases: - fullScopeAllowed type: bool node_re_registration_timeout: description: - Cluster node re-registration timeout for this client. This is 'nodeReRegistrationTimeout' in the Keycloak REST API. aliases: - nodeReRegistrationTimeout registered_nodes: description: - dict of registered cluster nodes (with C(nodename) as the key and last registration time as the value). This is 'registeredNodes' in the Keycloak REST API. aliases: - registeredNodes client_template: description: - Client template to use for this client. If it does not exist this field will silently be dropped. This is 'clientTemplate' in the Keycloak REST API. aliases: - clientTemplate use_template_config: description: - Whether or not to use configuration from the I(client_template). This is 'useTemplateConfig' in the Keycloak REST API. aliases: - useTemplateConfig type: bool use_template_scope: description: - Whether or not to use scope configuration from the I(client_template). This is 'useTemplateScope' in the Keycloak REST API. aliases: - useTemplateScope type: bool use_template_mappers: description: - Whether or not to use mapper configuration from the I(client_template). This is 'useTemplateMappers' in the Keycloak REST API. aliases: - useTemplateMappers type: bool surrogate_auth_required: description: - Whether or not surrogate auth is required. This is 'surrogateAuthRequired' in the Keycloak REST API. aliases: - surrogateAuthRequired type: bool authorization_settings: description: - a data structure defining the authorization settings for this client. For reference, please see the Keycloak API docs at U(http://www.keycloak.org/docs-api/3.3/rest-api/index.html#_resourceserverrepresentation). This is 'authorizationSettings' in the Keycloak REST API. aliases: - authorizationSettings protocol_mappers: description: - a list of dicts defining protocol mappers for this client. This is 'protocolMappers' in the Keycloak REST API. aliases: - protocolMappers suboptions: consentRequired: description: - Specifies whether a user needs to provide consent to a client for this mapper to be active. consentText: description: - The human-readable name of the consent the user is presented to accept. id: description: - Usually a UUID specifying the internal ID of this protocol mapper instance. name: description: - The name of this protocol mapper. protocol: description: - This is either C(openid-connect) or C(saml), this specifies for which protocol this protocol mapper is active. choices: ['openid-connect', 'saml'] protocolMapper: description: - The Keycloak-internal name of the type of this protocol-mapper. While an exhaustive list is impossible to provide since this may be extended through SPIs by the user of Keycloak, by default Keycloak as of 3.4 ships with at least - C(docker-v2-allow-all-mapper) - C(oidc-address-mapper) - C(oidc-full-name-mapper) - C(oidc-group-membership-mapper) - C(oidc-hardcoded-claim-mapper) - C(oidc-hardcoded-role-mapper) - C(oidc-role-name-mapper) - C(oidc-script-based-protocol-mapper) - C(oidc-sha256-pairwise-sub-mapper) - C(oidc-usermodel-attribute-mapper) - C(oidc-usermodel-client-role-mapper) - C(oidc-usermodel-property-mapper) - C(oidc-usermodel-realm-role-mapper) - C(oidc-usersessionmodel-note-mapper) - C(saml-group-membership-mapper) - C(saml-hardcode-attribute-mapper) - C(saml-hardcode-role-mapper) - C(saml-role-list-mapper) - C(saml-role-name-mapper) - C(saml-user-attribute-mapper) - C(saml-user-property-mapper) - C(saml-user-session-note-mapper) - An exhaustive list of available mappers on your installation can be obtained on the admin console by going to Server Info -> Providers and looking under 'protocol-mapper'. config: description: - Dict specifying the configuration options for the protocol mapper; the contents differ depending on the value of I(protocolMapper) and are not documented other than by the source of the mappers and its parent class(es). An example is given below. It is easiest to obtain valid config values by dumping an already-existing protocol mapper configuration through check-mode in the I(existing) field. attributes: description: - A dict of further attributes for this client. This can contain various configuration settings; an example is given in the examples section. While an exhaustive list of permissible options is not available; possible options as of Keycloak 3.4 are listed below. The Keycloak API does not validate whether a given option is appropriate for the protocol used; if specified anyway, Keycloak will simply not use it. suboptions: saml.authnstatement: description: - For SAML clients, boolean specifying whether or not a statement containing method and timestamp should be included in the login response. saml.client.signature: description: - For SAML clients, boolean specifying whether a client signature is required and validated. saml.encrypt: description: - Boolean specifying whether SAML assertions should be encrypted with the client's public key. saml.force.post.binding: description: - For SAML clients, boolean specifying whether always to use POST binding for responses. saml.onetimeuse.condition: description: - For SAML clients, boolean specifying whether a OneTimeUse condition should be included in login responses. saml.server.signature: description: - Boolean specifying whether SAML documents should be signed by the realm. saml.server.signature.keyinfo.ext: description: - For SAML clients, boolean specifying whether REDIRECT signing key lookup should be optimized through inclusion of the signing key id in the SAML Extensions element. saml.signature.algorithm: description: - Signature algorithm used to sign SAML documents. One of C(RSA_SHA256), C(RSA_SHA1), C(RSA_SHA512), or C(DSA_SHA1). saml.signing.certificate: description: - SAML signing key certificate, base64-encoded. saml.signing.private.key: description: - SAML signing key private key, base64-encoded. saml_assertion_consumer_url_post: description: - SAML POST Binding URL for the client's assertion consumer service (login responses). saml_assertion_consumer_url_redirect: description: - SAML Redirect Binding URL for the client's assertion consumer service (login responses). saml_force_name_id_format: description: - For SAML clients, Boolean specifying whether to ignore requested NameID subject format and using the configured one instead. saml_name_id_format: description: - For SAML clients, the NameID format to use (one of C(username), C(email), C(transient), or C(persistent)) saml_signature_canonicalization_method: description: - SAML signature canonicalization method. This is one of four values, namely C(http://www.w3.org/2001/10/xml-exc-c14n#) for EXCLUSIVE, C(http://www.w3.org/2001/10/xml-exc-c14n#WithComments) for EXCLUSIVE_WITH_COMMENTS, C(http://www.w3.org/TR/2001/REC-xml-c14n-20010315) for INCLUSIVE, and C(http://www.w3.org/TR/2001/REC-xml-c14n-20010315#WithComments) for INCLUSIVE_WITH_COMMENTS. saml_single_logout_service_url_post: description: - SAML POST binding url for the client's single logout service. saml_single_logout_service_url_redirect: description: - SAML redirect binding url for the client's single logout service. user.info.response.signature.alg: description: - For OpenID-Connect clients, JWA algorithm for signed UserInfo-endpoint responses. One of C(RS256) or C(unsigned). request.object.signature.alg: description: - For OpenID-Connect clients, JWA algorithm which the client needs to use when sending OIDC request object. One of C(any), C(none), C(RS256). use.jwks.url: description: - For OpenID-Connect clients, boolean specifying whether to use a JWKS URL to obtain client public keys. jwks.url: description: - For OpenID-Connect clients, URL where client keys in JWK are stored. jwt.credential.certificate: description: - For OpenID-Connect clients, client certificate for validating JWT issued by client and signed by its key, base64-encoded. extends_documentation_fragment: - keycloak author: - Eike Frost (@eikef) ''' EXAMPLES = ''' - name: Create or update Keycloak client (minimal example) local_action: module: keycloak_client auth_client_id: admin-cli auth_keycloak_url: https://auth.example.com/auth auth_realm: master auth_username: USERNAME auth_password: PASSWORD client_id: test state: present - name: Delete a Keycloak client local_action: module: keycloak_client auth_client_id: admin-cli auth_keycloak_url: https://auth.example.com/auth auth_realm: master auth_username: USERNAME auth_password: PASSWORD client_id: test state: absent - name: Create or update a Keycloak client (with all the bells and whistles) local_action: module: keycloak_client auth_client_id: admin-cli auth_keycloak_url: https://auth.example.com/auth auth_realm: master auth_username: USERNAME auth_password: PASSWORD state: present realm: master client_id: test id: d8b127a3-31f6-44c8-a7e4-4ab9a3e78d95 name: this_is_a_test description: Description of this wonderful client root_url: https://www.example.com/ admin_url: https://www.example.com/admin_url base_url: basepath enabled: True client_authenticator_type: client-secret secret: REALLYWELLKEPTSECRET redirect_uris: - https://www.example.com/* - http://localhost:8888/ web_origins: - https://www.example.com/* not_before: 1507825725 bearer_only: False consent_required: False standard_flow_enabled: True implicit_flow_enabled: False direct_access_grants_enabled: False service_accounts_enabled: False authorization_services_enabled: False public_client: False frontchannel_logout: False protocol: openid-connect full_scope_allowed: false node_re_registration_timeout: -1 client_template: test use_template_config: False use_template_scope: false use_template_mappers: no registered_nodes: node01.example.com: 1507828202 registration_access_token: eyJWT_TOKEN surrogate_auth_required: false default_roles: - test01 - test02 protocol_mappers: - config: access.token.claim: True claim.name: "family_name" id.token.claim: True jsonType.label: String user.attribute: lastName userinfo.token.claim: True consentRequired: True consentText: "${familyName}" name: family name protocol: openid-connect protocolMapper: oidc-usermodel-property-mapper - config: attribute.name: Role attribute.nameformat: Basic single: false consentRequired: false name: role list protocol: saml protocolMapper: saml-role-list-mapper attributes: saml.authnstatement: True saml.client.signature: True saml.force.post.binding: True saml.server.signature: True saml.signature.algorithm: RSA_SHA256 saml.signing.certificate: CERTIFICATEHERE saml.signing.private.key: PRIVATEKEYHERE saml_force_name_id_format: False saml_name_id_format: username saml_signature_canonicalization_method: "http://www.w3.org/2001/10/xml-exc-c14n#" user.info.response.signature.alg: RS256 request.object.signature.alg: RS256 use.jwks.url: true jwks.url: JWKS_URL_FOR_CLIENT_AUTH_JWT jwt.credential.certificate: JWT_CREDENTIAL_CERTIFICATE_FOR_CLIENT_AUTH ''' RETURN = ''' msg: description: Message as to what action was taken returned: always type: str sample: "Client testclient has been updated" proposed: description: client representation of proposed changes to client returned: always type: dict sample: { clientId: "test" } existing: description: client representation of existing client (sample is truncated) returned: always type: dict sample: { "adminUrl": "http://www.example.com/admin_url", "attributes": { "request.object.signature.alg": "RS256", } } end_state: description: client representation of client after module execution (sample is truncated) returned: always type: dict sample: { "adminUrl": "http://www.example.com/admin_url", "attributes": { "request.object.signature.alg": "RS256", } } ''' from ansible.module_utils.keycloak import KeycloakAPI, camel, keycloak_argument_spec from ansible.module_utils.basic import AnsibleModule def sanitize_cr(clientrep): """ Removes probably sensitive details from a client representation :param clientrep: the clientrep dict to be sanitized :return: sanitized clientrep dict """ result = clientrep.copy() if 'secret' in result: result['secret'] = 'no_log' if 'attributes' in result: if 'saml.signing.private.key' in result['attributes']: result['attributes']['saml.signing.private.key'] = 'no_log' return result def main(): """ Module execution :return: """ argument_spec = keycloak_argument_spec() protmapper_spec = dict( consentRequired=dict(type='bool'), consentText=dict(type='str'), id=dict(type='str'), name=dict(type='str'), protocol=dict(type='str', choices=['openid-connect', 'saml']), protocolMapper=dict(type='str'), config=dict(type='dict'), ) meta_args = dict( state=dict(default='present', choices=['present', 'absent']), realm=dict(type='str', default='master'), id=dict(type='str'), client_id=dict(type='str', aliases=['clientId']), name=dict(type='str'), description=dict(type='str'), root_url=dict(type='str', aliases=['rootUrl']), admin_url=dict(type='str', aliases=['adminUrl']), base_url=dict(type='str', aliases=['baseUrl']), surrogate_auth_required=dict(type='bool', aliases=['surrogateAuthRequired']), enabled=dict(type='bool'), client_authenticator_type=dict(type='str', choices=['client-secret', 'client-jwt'], aliases=['clientAuthenticatorType']), secret=dict(type='str', no_log=True), registration_access_token=dict(type='str', aliases=['registrationAccessToken']), default_roles=dict(type='list', aliases=['defaultRoles']), redirect_uris=dict(type='list', aliases=['redirectUris']), web_origins=dict(type='list', aliases=['webOrigins']), not_before=dict(type='int', aliases=['notBefore']), bearer_only=dict(type='bool', aliases=['bearerOnly']), consent_required=dict(type='bool', aliases=['consentRequired']), standard_flow_enabled=dict(type='bool', aliases=['standardFlowEnabled']), implicit_flow_enabled=dict(type='bool', aliases=['implicitFlowEnabled']), direct_access_grants_enabled=dict(type='bool', aliases=['directAccessGrantsEnabled']), service_accounts_enabled=dict(type='bool', aliases=['serviceAccountsEnabled']), authorization_services_enabled=dict(type='bool', aliases=['authorizationServicesEnabled']), public_client=dict(type='bool', aliases=['publicClient']), frontchannel_logout=dict(type='bool', aliases=['frontchannelLogout']), protocol=dict(type='str', choices=['openid-connect', 'saml']), attributes=dict(type='dict'), full_scope_allowed=dict(type='bool', aliases=['fullScopeAllowed']), node_re_registration_timeout=dict(type='int', aliases=['nodeReRegistrationTimeout']), registered_nodes=dict(type='dict', aliases=['registeredNodes']), client_template=dict(type='str', aliases=['clientTemplate']), use_template_config=dict(type='bool', aliases=['useTemplateConfig']), use_template_scope=dict(type='bool', aliases=['useTemplateScope']), use_template_mappers=dict(type='bool', aliases=['useTemplateMappers']), protocol_mappers=dict(type='list', elements='dict', options=protmapper_spec, aliases=['protocolMappers']), authorization_settings=dict(type='dict', aliases=['authorizationSettings']), ) argument_spec.update(meta_args) module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=True, required_one_of=([['client_id', 'id']])) result = dict(changed=False, msg='', diff={}, proposed={}, existing={}, end_state={}) # Obtain access token, initialize API kc = KeycloakAPI(module) realm = module.params.get('realm') cid = module.params.get('id') state = module.params.get('state') # convert module parameters to client representation parameters (if they belong in there) client_params = [x for x in module.params if x not in list(keycloak_argument_spec().keys()) + ['state', 'realm'] and module.params.get(x) is not None] keycloak_argument_spec().keys() # See whether the client already exists in Keycloak if cid is None: before_client = kc.get_client_by_clientid(module.params.get('client_id'), realm=realm) if before_client is not None: cid = before_client['id'] else: before_client = kc.get_client_by_id(cid, realm=realm) if before_client is None: before_client = dict() # Build a proposed changeset from parameters given to this module changeset = dict() for client_param in client_params: new_param_value = module.params.get(client_param) # some lists in the Keycloak API are sorted, some are not. if isinstance(new_param_value, list): if client_param in ['attributes']: try: new_param_value = sorted(new_param_value) except TypeError: pass # Unfortunately, the ansible argument spec checker introduces variables with null values when # they are not specified if client_param == 'protocol_mappers': new_param_value = [dict((k, v) for k, v in x.items() if x[k] is not None) for x in new_param_value] changeset[camel(client_param)] = new_param_value # Whether creating or updating a client, take the before-state and merge the changeset into it updated_client = before_client.copy() updated_client.update(changeset) result['proposed'] = sanitize_cr(changeset) result['existing'] = sanitize_cr(before_client) # If the client does not exist yet, before_client is still empty if before_client == dict(): if state == 'absent': # do nothing and exit if module._diff: result['diff'] = dict(before='', after='') result['msg'] = 'Client does not exist, doing nothing.' module.exit_json(**result) # create new client result['changed'] = True if 'clientId' not in updated_client: module.fail_json(msg='client_id needs to be specified when creating a new client') if module._diff: result['diff'] = dict(before='', after=sanitize_cr(updated_client)) if module.check_mode: module.exit_json(**result) kc.create_client(updated_client, realm=realm) after_client = kc.get_client_by_clientid(updated_client['clientId'], realm=realm) result['end_state'] = sanitize_cr(after_client) result['msg'] = 'Client %s has been created.' % updated_client['clientId'] module.exit_json(**result) else: if state == 'present': # update existing client result['changed'] = True if module.check_mode: # We can only compare the current client with the proposed updates we have if module._diff: result['diff'] = dict(before=sanitize_cr(before_client), after=sanitize_cr(updated_client)) result['changed'] = (before_client != updated_client) module.exit_json(**result) kc.update_client(cid, updated_client, realm=realm) after_client = kc.get_client_by_id(cid, realm=realm) if before_client == after_client: result['changed'] = False if module._diff: result['diff'] = dict(before=sanitize_cr(before_client), after=sanitize_cr(after_client)) result['end_state'] = sanitize_cr(after_client) result['msg'] = 'Client %s has been updated.' % updated_client['clientId'] module.exit_json(**result) else: # Delete existing client result['changed'] = True if module._diff: result['diff']['before'] = sanitize_cr(before_client) result['diff']['after'] = '' if module.check_mode: module.exit_json(**result) kc.delete_client(cid, realm=realm) result['proposed'] = dict() result['end_state'] = dict() result['msg'] = 'Client %s has been deleted.' % before_client['clientId'] module.exit_json(**result) module.exit_json(**result) if __name__ == '__main__': main()
gpl-3.0
-5,134,648,339,766,925,000
37.793802
146
0.607134
false
fparrel/regepe
wamp-src/cgi-bin/mathutil.py
2
13205
from math import sqrt,cos,sin,atan2,ceil,floor,log10,pi,atan,tan from log import Warn try: from math import fsum except ImportError: from mymath import fsum ## MISC MATH FUNCTIONS ## def Mean(numbers): "Returns the arithmetic mean of a numeric list." return fsum(numbers) / len(numbers) def InBounds(x,a,b): "Returns x if x belongs to [a,b] else return the closest bound." if x<a: return a elif x>b: return b else: return x def ApplyThreshold(x,threshold): "Apply threshold on x" if abs(x)>threshold: if x<0: return -threshold else: return threshold else: return x def IdentIfPositive(x): "If x>0 return x else return 0" if x>0: return x else: return 0 def sign(x): "Returns x/abs(x)" if x<0: return -1 if x>0: return 1 return 0 def Filter(data,FilterFunc,halfsize): "Apply a filter function on a list of data." maxid = len(data)-1 return [FilterFunc(data[InBounds(x-halfsize,0,maxid):InBounds(x+halfsize,0,maxid)]) for x in range(0,len(data))] def MeanXY(datax,datay): "2 dimension Mean for using with a filter" #return (datax[0],Mean(datay)) return (Mean(datax),Mean(datay)) def FilterXY(datax,datay,FilterFunc,xsize): "Apply 2 dimension filter on data" j = 0 outx = [] outy = [] for i in range(1,len(datax)): if datax[i]-datax[j]>=xsize or i==len(datax)-1: (x,y) = FilterFunc(datax[j:i+1],datay[j:i+1]) if j==0: x = datax[0] if i==len(datax)-1: x = datax[len(datax)-1] outx.append(x) outy.append(y) j = i #print((outx,outy)) return (outx,outy) def FindLocalExtremums(y): "Find local extremums from a list of floats, return two lists of [x,y[x]] (localmins and localmaxs)" d = 0 # variation of function: 0 if stable, +1 if increasing, -1 if decreasing localmins = [] # list of [id,value] of local minimums found localmaxs = [] # local maximums found for x in range(0,len(y)-1): if y[x+1]>y[x] and d!=1: # \/ or _/-> local minimum localmins.append([x,y[x]]) d = 1 if y[x+1]<y[x] and d!=-1: # _ # /\ or \-> local maximum localmaxs.append([x,y[x]]) d = -1 if y[x+1]==y[x] and d!=0: if d==-1: # \_ -> local minimum localmins.append([x,y[x]]) if d==1: # _ # / -> local maximum localmaxs.append([x,y[x]]) d = 0 return (localmins,localmaxs) def FindLocalExtremums2(y): "Find local extremums from a list of floats, return two lists of [x,y[x]] (localmins and localmaxs)" d = 0 # variation of function: 0 if stable, +1 if increasing, -1 if decreasing locextremums = [] # list of [id,type] of local extremums found for x in range(0,len(y)-1): if y[x+1]>y[x] and d!=1: # \/ or _/-> local minimum locextremums.append([x,'min']) d = 1 if y[x+1]<y[x] and d!=-1: # _ # /\ or \-> local maximum locextremums.append([x,'max']) d = -1 if y[x+1]==y[x] and d!=0: if d==-1: # \_ -> local minimum locextremums.append([x,'min']) if d==1: # _ # / -> local maximum locextremums.append([x,'max']) d = 0 return locextremums def FindLocalMaximums(points,key,FilterFunc,filterhalfsize): "Find local maximums from a list of objects given a key, return a list of ids" y = list(map(key,points)) # Filter input data if FilterFunc==None: y_filtered = y else: y_filtered = Filter(list(map(key,points)),FilterFunc,filterhalfsize) # Find local mins and maxs (localmins,localmaxs) = FindLocalExtremums(y_filtered) # Remove doubloons when ___ but not # / \ /\__/\ #for i in range(0,len(localmax)-1): # if localmax[i+1][1] == localmax[i][1]: # # Remove filter side effect if FilterFunc!=None: for i in range(0,len(localmaxs)): if i==0: first = localmaxs[i][0]-filterhalfsize else: first = max(localmaxs[i][0]-filterhalfsize,localmaxs[i-1][0]+filterhalfsize) if i==len(localmaxs)-1: last_plus_1 = localmaxs[i][0]+filterhalfsize+1 else: last_plus_1 = min(localmaxs[i][0]+filterhalfsize+1,localmaxs[i+1][0]-filterhalfsize) first = max(len(localmaxs),min(0,first)) last_plus_1 = max(len(localmaxs),min(0,last_plus_1)) xys = [[x,y[x]] for x in range(first,last_plus_1)] #xys = [[x,y[x]] for x in range(max(0,localmaxs[i][0]-filterhalfsize),min(localmaxs[i][0]+filterhalfsize+1,len(y_notfiltered)))] if len(xys)>0: xys.sort(key=lambda xy: xy[1],reverse=True) localmaxs[i] = xys[0] else: x = localmaxs[i][0] localmaxs[i] = [x,y[x]] # Sort extremums localmaxs.sort(key=lambda pt: pt[1],reverse=True) localmins.sort(key=lambda pt: pt[1],reverse=True) # Return ids of points matching local max return [mymax[0] for mymax in localmaxs] def GeodeticDist(lat1, lng1, lat2, lng2): return GeodeticDistVincenty(lat1, lng1, lat2, lng2) def GeodeticDistVincenty(lat1, lng1, lat2, lng2): # Vincenty formula (taken from geopy) with WGS-84 # Convert degrees to radians lat1 = lat1 * 0.0174532925199433 lng1 = lng1 * 0.0174532925199433 lat2 = lat2 * 0.0174532925199433 lng2 = lng2 * 0.0174532925199433 delta_lng = lng2 - lng1 reduced_lat1 = atan((1 - 0.00335281066474748071984552861852) * tan(lat1)) reduced_lat2 = atan((1 - 0.00335281066474748071984552861852) * tan(lat2)) sin_reduced1, cos_reduced1 = sin(reduced_lat1), cos(reduced_lat1) sin_reduced2, cos_reduced2 = sin(reduced_lat2), cos(reduced_lat2) lambda_lng = delta_lng lambda_prime = 2 * pi iter_limit = 20 #20 iterations max i = 0 while abs(lambda_lng - lambda_prime) > 10e-12 and i <= iter_limit: i += 1 sin_lambda_lng, cos_lambda_lng = sin(lambda_lng), cos(lambda_lng) sin_sigma = sqrt( (cos_reduced2 * sin_lambda_lng) ** 2 + (cos_reduced1 * sin_reduced2 - sin_reduced1 * cos_reduced2 * cos_lambda_lng) ** 2 ) if sin_sigma == 0: return 0 # Coincident points cos_sigma = ( sin_reduced1 * sin_reduced2 + cos_reduced1 * cos_reduced2 * cos_lambda_lng ) sigma = atan2(sin_sigma, cos_sigma) sin_alpha = ( cos_reduced1 * cos_reduced2 * sin_lambda_lng / sin_sigma ) cos_sq_alpha = 1 - sin_alpha ** 2 if cos_sq_alpha != 0: cos2_sigma_m = cos_sigma - 2 * ( sin_reduced1 * sin_reduced2 / cos_sq_alpha ) else: cos2_sigma_m = 0.0 # Equatorial line C = 0.00335281066474748071984552861852 / 16. * cos_sq_alpha * (4 + 0.00335281066474748071984552861852 * (4 - 3 * cos_sq_alpha)) lambda_prime = lambda_lng lambda_lng = ( delta_lng + (1 - C) * 0.00335281066474748071984552861852 * sin_alpha * ( sigma + C * sin_sigma * ( cos2_sigma_m + C * cos_sigma * ( -1 + 2 * cos2_sigma_m ** 2 ) ) ) ) if i > iter_limit: # Vincenty formula failed to converge => use great circle algorithm Warn("Vincenty formula failed to converge") return GeodeticDistGreatCircle(lat1, lng1, lat2, lng2) u_sq = cos_sq_alpha * (6378137.0 ** 2 - 6356752.3142 ** 2) / 6356752.3142 ** 2 A = 1 + u_sq / 16384. * ( 4096 + u_sq * (-768 + u_sq * (320 - 175 * u_sq)) ) B = u_sq / 1024. * (256 + u_sq * (-128 + u_sq * (74 - 47 * u_sq))) delta_sigma = ( B * sin_sigma * ( cos2_sigma_m + B / 4. * ( cos_sigma * ( -1 + 2 * cos2_sigma_m ** 2 ) - B / 6. * cos2_sigma_m * ( -3 + 4 * sin_sigma ** 2 ) * ( -3 + 4 * cos2_sigma_m ** 2 ) ) ) ) s = 6356752.3142 * A * (sigma - delta_sigma) return s def GeodeticDistGreatCircleBitSlower(lat1,lon1,lat2,lon2): lat1 = lat1 * 0.0174532925199433 lon1 = lon1 * 0.0174532925199433 lat2 = lat2 * 0.0174532925199433 lon2 = lon2 * 0.0174532925199433 sin_lat1, cos_lat1 = sin(lat1), cos(lat1) sin_lat2, cos_lat2 = sin(lat2), cos(lat2) delta_lng = lon2 - lon1 cos_delta_lng, sin_delta_lng = cos(delta_lng), sin(delta_lng) d = atan2(sqrt((cos_lat2 * sin_delta_lng) ** 2 + (cos_lat1 * sin_lat2 - sin_lat1 * cos_lat2 * cos_delta_lng) ** 2), sin_lat1 * sin_lat2 + cos_lat1 * cos_lat2 * cos_delta_lng) return 6372795.0 * d def GeodeticDistGreatCircle(lat1,lon1,lat2,lon2): "Compute distance between two points of the earth geoid (approximated to a sphere)" # convert inputs in degrees to radians lat1 = lat1 * 0.0174532925199433 lon1 = lon1 * 0.0174532925199433 lat2 = lat2 * 0.0174532925199433 lon2 = lon2 * 0.0174532925199433 # just draw a schema of two points on a sphere and two radius and you'll understand a = sin((lat2 - lat1)/2)**2 + cos(lat1) * cos(lat2) * sin((lon2 - lon1)/2)**2 c = 2 * atan2(sqrt(a), sqrt(1-a)) # earth mean radius is 6371 km return 6372795.0 * c def GeodeticCourse(lat1,lon1,lat2,lon2): "Compute course from (lat1,lon1) to (lat2,lon2) Input is in degrees and output in degrees" # convert inputs in degrees to radians lat1 = lat1 * 0.0174532925199433 lon1 = lon1 * 0.0174532925199433 lat2 = lat2 * 0.0174532925199433 lon2 = lon2 * 0.0174532925199433 y = sin(lon2 - lon1) * cos(lat2) x = cos(lat1)*sin(lat2) - sin(lat1)*cos(lat2)*cos(lon2 - lon1) return (((atan2(y, x) * 180 / pi) + 360) % 360) def ComputeDiffAfter(data): "Return derivative of 'data'" return [data[x+1]-data[x] for x in range(0,len(data)-2)] def StrangeFilter(y): "Return a function made of segments linking sucessive extremums from the continuous function 'y'" (localmins,localmaxs) = FindLocalExtremums(y) localextremums = localmins + localmaxs localextremums.append([0,y[0]]) localextremums.append([len(y)-1,y[len(y)-1]]) localextremums.sort(key=lambda pt: pt[0]) val = y[0] out = [] j = 0 for i in range(0,len(y)): out.append(val) if localextremums[j+1][0]>localextremums[j][0]: val += (localextremums[j+1][1]-localextremums[j][1])/(localextremums[j+1][0]-localextremums[j][0]) if i==localextremums[j+1][0]: j = j + 1 return out def GetIndexOfClosestFromOrderedList(value,inputlist): "Return the id of the item in 'inputlist' closest to 'value'. 'inputlist' must be ordered" i = 0 # loop until inputlist[i] < value < inputlist[i+1] (or end of inputlist) while i<len(inputlist) and inputlist[i] < value: i += 1 if i==len(inputlist): # all elements of inputlist are lower than value, return last id out = i-1 elif i>0: # if prev item is closer than current, return its id if value-inputlist[i-1]<inputlist[i]-value: out = i-1 else: out = i else: out = i assert(out>=0) assert(out<len(inputlist)) return out def GetIndexOfClosest(mylist,value): "Return the index of the item of 'mylist' that is the closest to 'value'" if len(mylist)<1: raise IndexError('List is empty') out_index = 0 min_dist = abs(mylist[out_index]-value) for current_index in range(0,len(mylist)): dist = abs(mylist[current_index]-value) if dist < min_dist: min_dist = dist out_index = current_index return out_index ## UNIT TEST CODE ## def main(): from timeit import timeit print(Mean([0.6,0.9,0.7])) print("great circle 1",GeodeticDistGreatCircleBitSlower(45.0,0.0,46.0,1.0),timeit("GeodeticDistGreatCircleBitSlower(45.0,0.0,46.0,1.0)",setup="from __main__ import GeodeticDistGreatCircleBitSlower")) print("great circle 2",GeodeticDistGreatCircle(45.0,0.0,46.0,1.0),timeit("GeodeticDistGreatCircle(45.0,0.0,46.0,1.0)",setup="from __main__ import GeodeticDistGreatCircle")) print("vincenty",GeodeticDistVincenty(45.0,0.0,46.0,1.0),timeit("GeodeticDistVincenty(45.0,0.0,46.0,1.0)",setup="from __main__ import GeodeticDistVincenty")) print("GeodeticDist",GeodeticDist(45.0,0.0,46.0,1.0)) if __name__ == '__main__': main()
gpl-3.0
-1,777,497,493,673,461,200
32.0125
203
0.56259
false
edmundgentle/schoolscript
SchoolScript/bin/Debug/pythonlib/Lib/ctypes/test/test_values.py
3
3597
""" A testcase which accesses *values* in a dll. """ import unittest from ctypes import * import _ctypes_test class ValuesTestCase(unittest.TestCase): def test_an_integer(self): # This test checks and changes an integer stored inside the # _ctypes_test dll/shared lib. ctdll = CDLL(_ctypes_test.__file__) an_integer = c_int.in_dll(ctdll, "an_integer") x = an_integer.value self.assertEqual(x, ctdll.get_an_integer()) an_integer.value *= 2 self.assertEqual(x*2, ctdll.get_an_integer()) # To avoid test failures when this test is repeated several # times the original value must be restored an_integer.value = x self.assertEqual(x, ctdll.get_an_integer()) def test_undefined(self): ctdll = CDLL(_ctypes_test.__file__) self.assertRaises(ValueError, c_int.in_dll, ctdll, "Undefined_Symbol") class Win_ValuesTestCase(unittest.TestCase): """This test only works when python itself is a dll/shared library""" def test_optimizeflag(self): # This test accesses the Py_OptimizeFlag intger, which is # exported by the Python dll. # It's value is set depending on the -O and -OO flags: # if not given, it is 0 and __debug__ is 1. # If -O is given, the flag is 1, for -OO it is 2. # docstrings are also removed in the latter case. opt = c_int.in_dll(pydll, "Py_OptimizeFlag").value if __debug__: self.assertEqual(opt, 0) elif ValuesTestCase.__doc__ is not None: self.assertEqual(opt, 1) else: self.assertEqual(opt, 2) def test_frozentable(self): # Python exports a PyImport_FrozenModules symbol. This is a # pointer to an array of struct _frozen entries. The end of the # array is marked by an entry containing a NULL name and zero # size. # In standard Python, this table contains a __hello__ # module, and a __phello__ package containing a spam # module. class struct_frozen(Structure): _fields_ = [("name", c_char_p), ("code", POINTER(c_ubyte)), ("size", c_int)] FrozenTable = POINTER(struct_frozen) ft = FrozenTable.in_dll(pydll, "PyImport_FrozenModules") # ft is a pointer to the struct_frozen entries: items = [] for entry in ft: # This is dangerous. We *can* iterate over a pointer, but # the loop will not terminate (maybe with an access # violation;-) because the pointer instance has no size. if entry.name is None: break items.append((entry.name, entry.size)) import sys if sys.version_info[:2] >= (2, 3): expected = [("__hello__", 104), ("__phello__", -104), ("__phello__.spam", 104)] else: expected = [("__hello__", 100), ("__phello__", -100), ("__phello__.spam", 100)] self.assertEqual(items, expected) from ctypes import _pointer_type_cache del _pointer_type_cache[struct_frozen] def test_undefined(self): self.assertRaises(ValueError, c_int.in_dll, pydll, "Undefined_Symbol") if __name__ == '__main__': unittest.main()
gpl-2.0
5,324,689,836,292,118,000
38.875
95
0.542952
false
beni55/networkx
examples/drawing/giant_component.py
33
2084
#!/usr/bin/env python """ This example illustrates the sudden appearance of a giant connected component in a binomial random graph. Requires pygraphviz and matplotlib to draw. """ # Copyright (C) 2006-2008 # Aric Hagberg <[email protected]> # Dan Schult <[email protected]> # Pieter Swart <[email protected]> # All rights reserved. # BSD license. try: import matplotlib.pyplot as plt except: raise import networkx as nx import math try: from networkx import graphviz_layout layout=nx.graphviz_layout except ImportError: print("PyGraphviz not found; drawing with spring layout; will be slow.") layout=nx.spring_layout n=150 # 150 nodes # p value at which giant component (of size log(n) nodes) is expected p_giant=1.0/(n-1) # p value at which graph is expected to become completely connected p_conn=math.log(n)/float(n) # the following range of p values should be close to the threshold pvals=[0.003, 0.006, 0.008, 0.015] region=220 # for pylab 2x2 subplot layout plt.subplots_adjust(left=0,right=1,bottom=0,top=0.95,wspace=0.01,hspace=0.01) for p in pvals: G=nx.binomial_graph(n,p) pos=layout(G) region+=1 plt.subplot(region) plt.title("p = %6.3f"%(p)) nx.draw(G,pos, with_labels=False, node_size=10 ) # identify largest connected component Gcc=sorted(nx.connected_component_subgraphs(G), key = len, reverse=True) G0=Gcc[0] nx.draw_networkx_edges(G0,pos, with_labels=False, edge_color='r', width=6.0 ) # show other connected components for Gi in Gcc[1:]: if len(Gi)>1: nx.draw_networkx_edges(Gi,pos, with_labels=False, edge_color='r', alpha=0.3, width=5.0 ) plt.savefig("giant_component.png") plt.show() # display
bsd-3-clause
4,740,078,087,520,301,000
27.547945
77
0.578215
false
execuc/LCInterlocking
panel/hingeswidget.py
1
5079
#!/usr/bin/env python # -*- coding: utf-8 -*- # *************************************************************************** # * * # * Copyright (c) 2016 execuc * # * * # * This file is part of LCInterlocking module. * # * LCInterlocking module is free software; you can redistribute it and/or* # * modify it under the terms of the GNU Lesser General Public * # * License as published by the Free Software Foundation; either * # * version 2.1 of the License, or (at your option) any later version. * # * * # * This module is distributed in the hope that it will be useful, * # * but WITHOUT ANY WARRANTY; without even the implied warranty of * # * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * # * Lesser General Public License for more details. * # * * # * You should have received a copy of the GNU Lesser General Public * # * License along with this library; if not, write to the Free Software * # * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, * # * MA 02110-1301 USA * # * * # *************************************************************************** from lasercut.hingesproperties import GlobalLivingMaterialProperties from lasercut.hingesproperties import HingesProperties from panel.toolwidget import ParamWidget, WidgetValue class GlobalLivingHingeWidget(ParamWidget): def __init__(self, global_properties): self.name = global_properties.name self.label = global_properties.label ParamWidget.__init__(self, global_properties) self.widget_list.extend([WidgetValue(type=float, name="thickness", show_name="Thickness", widget=None), WidgetValue(type=str, name="new_name", show_name="Flat part name", widget=None), WidgetValue(type=list, name="hinge_type", show_name="Type", widget=None, interval_value=[GlobalLivingMaterialProperties.HINGE_TYPE_ALTERNATE_DOUBLE]), WidgetValue(type=float, name="alternate_nb_hinge", show_name="Nb hinge per column", widget=None, interval_value=[1, 30], decimals=0, step=1, parent_name="hinge_type", parent_value=[GlobalLivingMaterialProperties.HINGE_TYPE_ALTERNATE_DOUBLE]), WidgetValue(type=float, name="occupancy_ratio", show_name="Hinges occupancy ratio", widget=None, interval_value=[0.1, 1.], decimals=4, step=0.05, parent_name="hinge_type", parent_value=[GlobalLivingMaterialProperties.HINGE_TYPE_ALTERNATE_DOUBLE]), WidgetValue(type=float, name="link_clearance", show_name="Clearance width", widget=None, interval_value=[0., 30.], decimals=4, step=0.05), WidgetValue(type=float, name="laser_beam_diameter", show_name="Laser beam diameter", widget=None, interval_value=[0., 30.], decimals=4, step=0.05), WidgetValue(type=bool, name="generate_solid", show_name="Generate solid", widget=None)]) class LivingHingeWidget(ParamWidget): def __init__(self, hingeProperties): self.name = hingeProperties.name ParamWidget.__init__(self, hingeProperties) self.widget_list.extend([WidgetValue(type=float, name="arc_inner_radius", show_name="Arc radius (inner)", widget=None), WidgetValue(type=float, name="arc_outer_radius", show_name="Arc radius (outer)", widget=None), WidgetValue(type=float, name="arc_length", show_name="Arc length", widget=None), WidgetValue(type=bool, name="reversed_angle", show_name="Reverse Angle", widget=None), WidgetValue(type=float, name="deg_angle", show_name="Angle (degree)", widget=None), WidgetValue(type=float, name="min_links_nb", show_name="Min. link", widget=None), WidgetValue(type=float, name="nb_link", show_name="Number link", widget=None, interval_value=[2, 300], decimals=0, step=1) ])
lgpl-2.1
-4,049,863,118,812,959,000
69.541667
127
0.492617
false
junbochen/pylearn2
pylearn2/gui/tangent_plot.py
44
1730
""" Code for plotting curves with tangent lines. """ __author__ = "Ian Goodfellow" try: from matplotlib import pyplot except Exception: pyplot = None from theano.compat.six.moves import xrange def tangent_plot(x, y, s): """ Plots a curve with tangent lines. Parameters ---------- x : list List of x coordinates. Assumed to be sorted into ascending order, so that the tangent lines occupy 80 percent of the horizontal space between each pair of points. y : list List of y coordinates s : list List of slopes """ assert isinstance(x, list) assert isinstance(y, list) assert isinstance(s, list) n = len(x) assert len(y) == n assert len(s) == n if pyplot is None: raise RuntimeError("Could not import pyplot, can't run this code.") pyplot.plot(x, y, color='b') if n == 0: pyplot.show() return pyplot.hold(True) # Add dummy entries so that the for loop can use the same code on every # entry if n == 1: x = [x[0] - 1] + x[0] + [x[0] + 1.] else: x = [x[0] - (x[1] - x[0])] + x + [x[-2] + (x[-1] - x[-2])] y = [0.] + y + [0] s = [0.] + s + [0] for i in xrange(1, n + 1): ld = 0.4 * (x[i] - x[i - 1]) lx = x[i] - ld ly = y[i] - ld * s[i] rd = 0.4 * (x[i + 1] - x[i]) rx = x[i] + rd ry = y[i] + rd * s[i] pyplot.plot([lx, rx], [ly, ry], color='g') pyplot.show() if __name__ == "__main__": # Demo by plotting a quadratic function import numpy as np x = np.arange(-5., 5., .1) y = 0.5 * (x ** 2) x = list(x) y = list(y) tangent_plot(x, y, x)
bsd-3-clause
6,418,200,293,736,956,000
21.467532
75
0.508092
false
goFrendiAsgard/kokoropy
kokoropy/packages/sqlalchemy/dialects/sqlite/pysqlite.py
23
14980
# sqlite/pysqlite.py # Copyright (C) 2005-2014 the SQLAlchemy authors and contributors # <see AUTHORS file> # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php """ .. dialect:: sqlite+pysqlite :name: pysqlite :dbapi: sqlite3 :connectstring: sqlite+pysqlite:///file_path :url: http://docs.python.org/library/sqlite3.html Note that ``pysqlite`` is the same driver as the ``sqlite3`` module included with the Python distribution. Driver ------ When using Python 2.5 and above, the built in ``sqlite3`` driver is already installed and no additional installation is needed. Otherwise, the ``pysqlite2`` driver needs to be present. This is the same driver as ``sqlite3``, just with a different name. The ``pysqlite2`` driver will be loaded first, and if not found, ``sqlite3`` is loaded. This allows an explicitly installed pysqlite driver to take precedence over the built in one. As with all dialects, a specific DBAPI module may be provided to :func:`~sqlalchemy.create_engine()` to control this explicitly:: from sqlite3 import dbapi2 as sqlite e = create_engine('sqlite+pysqlite:///file.db', module=sqlite) Connect Strings --------------- The file specification for the SQLite database is taken as the "database" portion of the URL. Note that the format of a SQLAlchemy url is:: driver://user:pass@host/database This means that the actual filename to be used starts with the characters to the **right** of the third slash. So connecting to a relative filepath looks like:: # relative path e = create_engine('sqlite:///path/to/database.db') An absolute path, which is denoted by starting with a slash, means you need **four** slashes:: # absolute path e = create_engine('sqlite:////path/to/database.db') To use a Windows path, regular drive specifications and backslashes can be used. Double backslashes are probably needed:: # absolute path on Windows e = create_engine('sqlite:///C:\\\\path\\\\to\\\\database.db') The sqlite ``:memory:`` identifier is the default if no filepath is present. Specify ``sqlite://`` and nothing else:: # in-memory database e = create_engine('sqlite://') Compatibility with sqlite3 "native" date and datetime types ----------------------------------------------------------- The pysqlite driver includes the sqlite3.PARSE_DECLTYPES and sqlite3.PARSE_COLNAMES options, which have the effect of any column or expression explicitly cast as "date" or "timestamp" will be converted to a Python date or datetime object. The date and datetime types provided with the pysqlite dialect are not currently compatible with these options, since they render the ISO date/datetime including microseconds, which pysqlite's driver does not. Additionally, SQLAlchemy does not at this time automatically render the "cast" syntax required for the freestanding functions "current_timestamp" and "current_date" to return datetime/date types natively. Unfortunately, pysqlite does not provide the standard DBAPI types in ``cursor.description``, leaving SQLAlchemy with no way to detect these types on the fly without expensive per-row type checks. Keeping in mind that pysqlite's parsing option is not recommended, nor should be necessary, for use with SQLAlchemy, usage of PARSE_DECLTYPES can be forced if one configures "native_datetime=True" on create_engine():: engine = create_engine('sqlite://', connect_args={'detect_types': sqlite3.PARSE_DECLTYPES|sqlite3.PARSE_COLNAMES}, native_datetime=True ) With this flag enabled, the DATE and TIMESTAMP types (but note - not the DATETIME or TIME types...confused yet ?) will not perform any bind parameter or result processing. Execution of "func.current_date()" will return a string. "func.current_timestamp()" is registered as returning a DATETIME type in SQLAlchemy, so this function still receives SQLAlchemy-level result processing. .. _pysqlite_threading_pooling: Threading/Pooling Behavior --------------------------- Pysqlite's default behavior is to prohibit the usage of a single connection in more than one thread. This is originally intended to work with older versions of SQLite that did not support multithreaded operation under various circumstances. In particular, older SQLite versions did not allow a ``:memory:`` database to be used in multiple threads under any circumstances. Pysqlite does include a now-undocumented flag known as ``check_same_thread`` which will disable this check, however note that pysqlite connections are still not safe to use in concurrently in multiple threads. In particular, any statement execution calls would need to be externally mutexed, as Pysqlite does not provide for thread-safe propagation of error messages among other things. So while even ``:memory:`` databases can be shared among threads in modern SQLite, Pysqlite doesn't provide enough thread-safety to make this usage worth it. SQLAlchemy sets up pooling to work with Pysqlite's default behavior: * When a ``:memory:`` SQLite database is specified, the dialect by default will use :class:`.SingletonThreadPool`. This pool maintains a single connection per thread, so that all access to the engine within the current thread use the same ``:memory:`` database - other threads would access a different ``:memory:`` database. * When a file-based database is specified, the dialect will use :class:`.NullPool` as the source of connections. This pool closes and discards connections which are returned to the pool immediately. SQLite file-based connections have extremely low overhead, so pooling is not necessary. The scheme also prevents a connection from being used again in a different thread and works best with SQLite's coarse-grained file locking. .. versionchanged:: 0.7 Default selection of :class:`.NullPool` for SQLite file-based databases. Previous versions select :class:`.SingletonThreadPool` by default for all SQLite databases. Using a Memory Database in Multiple Threads ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ To use a ``:memory:`` database in a multithreaded scenario, the same connection object must be shared among threads, since the database exists only within the scope of that connection. The :class:`.StaticPool` implementation will maintain a single connection globally, and the ``check_same_thread`` flag can be passed to Pysqlite as ``False``:: from sqlalchemy.pool import StaticPool engine = create_engine('sqlite://', connect_args={'check_same_thread':False}, poolclass=StaticPool) Note that using a ``:memory:`` database in multiple threads requires a recent version of SQLite. Using Temporary Tables with SQLite ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Due to the way SQLite deals with temporary tables, if you wish to use a temporary table in a file-based SQLite database across multiple checkouts from the connection pool, such as when using an ORM :class:`.Session` where the temporary table should continue to remain after :meth:`.Session.commit` or :meth:`.Session.rollback` is called, a pool which maintains a single connection must be used. Use :class:`.SingletonThreadPool` if the scope is only needed within the current thread, or :class:`.StaticPool` is scope is needed within multiple threads for this case:: # maintain the same connection per thread from sqlalchemy.pool import SingletonThreadPool engine = create_engine('sqlite:///mydb.db', poolclass=SingletonThreadPool) # maintain the same connection across all threads from sqlalchemy.pool import StaticPool engine = create_engine('sqlite:///mydb.db', poolclass=StaticPool) Note that :class:`.SingletonThreadPool` should be configured for the number of threads that are to be used; beyond that number, connections will be closed out in a non deterministic way. Unicode ------- The pysqlite driver only returns Python ``unicode`` objects in result sets, never plain strings, and accommodates ``unicode`` objects within bound parameter values in all cases. Regardless of the SQLAlchemy string type in use, string-based result values will by Python ``unicode`` in Python 2. The :class:`.Unicode` type should still be used to indicate those columns that require unicode, however, so that non-``unicode`` values passed inadvertently will emit a warning. Pysqlite will emit an error if a non-``unicode`` string is passed containing non-ASCII characters. .. _pysqlite_serializable: Serializable isolation / Savepoints / Transactional DDL ------------------------------------------------------- In the section :ref:`sqlite_concurrency`, we refer to the pysqlite driver's assortment of issues that prevent several features of SQLite from working correctly. The pysqlite DBAPI driver has several long-standing bugs which impact the correctness of its transactional behavior. In its default mode of operation, SQLite features such as SERIALIZABLE isolation, transactional DDL, and SAVEPOINT support are non-functional, and in order to use these features, workarounds must be taken. The issue is essentially that the driver attempts to second-guess the user's intent, failing to start transactions and sometimes ending them prematurely, in an effort to minimize the SQLite databases's file locking behavior, even though SQLite itself uses "shared" locks for read-only activities. SQLAlchemy chooses to not alter this behavior by default, as it is the long-expected behavior of the pysqlite driver; if and when the pysqlite driver attempts to repair these issues, that will be more of a driver towards defaults for SQLAlchemy. The good news is that with a few events, we can implement transactional support fully, by disabling pysqlite's feature entirely and emitting BEGIN ourselves. This is achieved using two event listeners:: from sqlalchemy import create_engine, event engine = create_engine("sqlite:///myfile.db") @event.listens_for(engine, "connect") def do_connect(dbapi_connection, connection_record): # disable pysqlite's emitting of the BEGIN statement entirely. # also stops it from emitting COMMIT before any DDL. dbapi_connection.isolation_level = None @event.listens_for(engine, "begin") def do_begin(conn): # emit our own BEGIN conn.execute("BEGIN") Above, we intercept a new pysqlite connection and disable any transactional integration. Then, at the point at which SQLAlchemy knows that transaction scope is to begin, we emit ``"BEGIN"`` ourselves. When we take control of ``"BEGIN"``, we can also control directly SQLite's locking modes, introduced at `BEGIN TRANSACTION <http://sqlite.org/lang_transaction.html>`_, by adding the desired locking mode to our ``"BEGIN"``:: @event.listens_for(engine, "begin") def do_begin(conn): conn.execute("BEGIN EXCLUSIVE") .. seealso:: `BEGIN TRANSACTION <http://sqlite.org/lang_transaction.html>`_ - on the SQLite site `sqlite3 SELECT does not BEGIN a transaction <http://bugs.python.org/issue9924>`_ - on the Python bug tracker `sqlite3 module breaks transactions and potentially corrupts data <http://bugs.python.org/issue10740>`_ - on the Python bug tracker """ from sqlalchemy.dialects.sqlite.base import SQLiteDialect, DATETIME, DATE from sqlalchemy import exc, pool from sqlalchemy import types as sqltypes from sqlalchemy import util import os class _SQLite_pysqliteTimeStamp(DATETIME): def bind_processor(self, dialect): if dialect.native_datetime: return None else: return DATETIME.bind_processor(self, dialect) def result_processor(self, dialect, coltype): if dialect.native_datetime: return None else: return DATETIME.result_processor(self, dialect, coltype) class _SQLite_pysqliteDate(DATE): def bind_processor(self, dialect): if dialect.native_datetime: return None else: return DATE.bind_processor(self, dialect) def result_processor(self, dialect, coltype): if dialect.native_datetime: return None else: return DATE.result_processor(self, dialect, coltype) class SQLiteDialect_pysqlite(SQLiteDialect): default_paramstyle = 'qmark' colspecs = util.update_copy( SQLiteDialect.colspecs, { sqltypes.Date: _SQLite_pysqliteDate, sqltypes.TIMESTAMP: _SQLite_pysqliteTimeStamp, } ) if not util.py2k: description_encoding = None driver = 'pysqlite' def __init__(self, **kwargs): SQLiteDialect.__init__(self, **kwargs) if self.dbapi is not None: sqlite_ver = self.dbapi.version_info if sqlite_ver < (2, 1, 3): util.warn( ("The installed version of pysqlite2 (%s) is out-dated " "and will cause errors in some cases. Version 2.1.3 " "or greater is recommended.") % '.'.join([str(subver) for subver in sqlite_ver])) @classmethod def dbapi(cls): try: from pysqlite2 import dbapi2 as sqlite except ImportError as e: try: from sqlite3 import dbapi2 as sqlite # try 2.5+ stdlib name. except ImportError: raise e return sqlite @classmethod def get_pool_class(cls, url): if url.database and url.database != ':memory:': return pool.NullPool else: return pool.SingletonThreadPool def _get_server_version_info(self, connection): return self.dbapi.sqlite_version_info def create_connect_args(self, url): if url.username or url.password or url.host or url.port: raise exc.ArgumentError( "Invalid SQLite URL: %s\n" "Valid SQLite URL forms are:\n" " sqlite:///:memory: (or, sqlite://)\n" " sqlite:///relative/path/to/file.db\n" " sqlite:////absolute/path/to/file.db" % (url,)) filename = url.database or ':memory:' if filename != ':memory:': filename = os.path.abspath(filename) opts = url.query.copy() util.coerce_kw_type(opts, 'timeout', float) util.coerce_kw_type(opts, 'isolation_level', str) util.coerce_kw_type(opts, 'detect_types', int) util.coerce_kw_type(opts, 'check_same_thread', bool) util.coerce_kw_type(opts, 'cached_statements', int) return ([filename], opts) def is_disconnect(self, e, connection, cursor): return isinstance(e, self.dbapi.ProgrammingError) and \ "Cannot operate on a closed database." in str(e) dialect = SQLiteDialect_pysqlite
mit
1,461,469,480,222,139,000
38.734748
135
0.70494
false
AsimmHirani/ISpyPi
tensorflow/contrib/tensorflow-master/tensorflow/python/debug/wrappers/local_cli_wrapper_test.py
20
13116
# Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Unit tests for local command-line-interface debug wrapper session.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import os import shutil import tempfile from tensorflow.python.client import session from tensorflow.python.debug.cli import cli_shared from tensorflow.python.debug.cli import debugger_cli_common from tensorflow.python.debug.wrappers import local_cli_wrapper from tensorflow.python.framework import constant_op from tensorflow.python.framework import dtypes from tensorflow.python.framework import ops from tensorflow.python.framework import test_util from tensorflow.python.ops import array_ops from tensorflow.python.ops import math_ops from tensorflow.python.ops import state_ops from tensorflow.python.ops import variables from tensorflow.python.platform import googletest class LocalCLIDebuggerWrapperSessionForTest( local_cli_wrapper.LocalCLIDebugWrapperSession): """Subclasses the wrapper class for testing. Overrides its CLI-related methods for headless testing environments. Inserts observer variables for assertions. """ def __init__(self, command_args_sequence, sess, dump_root=None): """Constructor of the for-test subclass. Args: command_args_sequence: (list of list of str) A list of arguments for the "run" command. sess: See the doc string of LocalCLIDebugWrapperSession.__init__. dump_root: See the doc string of LocalCLIDebugWrapperSession.__init__. """ local_cli_wrapper.LocalCLIDebugWrapperSession.__init__( self, sess, dump_root=dump_root, log_usage=False) self._command_args_sequence = command_args_sequence self._response_pointer = 0 # Observer variables. self.observers = { "debug_dumps": [], "tf_errors": [], "run_start_cli_run_numbers": [], "run_end_cli_run_numbers": [], } def _prep_cli_for_run_start(self): pass def _prep_cli_for_run_end(self, debug_dump, tf_error, passed_filter): self.observers["debug_dumps"].append(debug_dump) self.observers["tf_errors"].append(tf_error) def _launch_cli(self): if self._is_run_start: self.observers["run_start_cli_run_numbers"].append(self._run_call_count) else: self.observers["run_end_cli_run_numbers"].append(self._run_call_count) command_args = self._command_args_sequence[self._response_pointer] self._response_pointer += 1 try: self._run_handler(command_args) except debugger_cli_common.CommandLineExit as e: response = e.exit_token return response class LocalCLIDebugWrapperSessionTest(test_util.TensorFlowTestCase): def setUp(self): self._tmp_dir = tempfile.mktemp() self.v = variables.Variable(10.0, name="v") self.delta = constant_op.constant(1.0, name="delta") self.inc_v = state_ops.assign_add(self.v, self.delta, name="inc_v") self.ph = array_ops.placeholder(dtypes.float32, name="ph") self.xph = array_ops.transpose(self.ph, name="xph") self.m = constant_op.constant( [[0.0, 1.0, 2.0], [-4.0, -1.0, 0.0]], dtype=dtypes.float32, name="m") self.y = math_ops.matmul(self.m, self.xph, name="y") self.sess = session.Session() # Initialize variable. self.sess.run(self.v.initializer) def tearDown(self): ops.reset_default_graph() if os.path.isdir(self._tmp_dir): shutil.rmtree(self._tmp_dir) def testConstructWrapper(self): local_cli_wrapper.LocalCLIDebugWrapperSession( session.Session(), log_usage=False) def testConstructWrapperWithExistingEmptyDumpRoot(self): os.mkdir(self._tmp_dir) self.assertTrue(os.path.isdir(self._tmp_dir)) local_cli_wrapper.LocalCLIDebugWrapperSession( session.Session(), dump_root=self._tmp_dir, log_usage=False) def testConstructWrapperWithExistingNonEmptyDumpRoot(self): os.mkdir(self._tmp_dir) dir_path = os.path.join(self._tmp_dir, "foo") os.mkdir(dir_path) self.assertTrue(os.path.isdir(dir_path)) with self.assertRaisesRegexp( ValueError, "dump_root path points to a non-empty directory"): local_cli_wrapper.LocalCLIDebugWrapperSession( session.Session(), dump_root=self._tmp_dir, log_usage=False) def testConstructWrapperWithExistingFileDumpRoot(self): os.mkdir(self._tmp_dir) file_path = os.path.join(self._tmp_dir, "foo") open(file_path, "a").close() # Create the file self.assertTrue(os.path.isfile(file_path)) with self.assertRaisesRegexp(ValueError, "dump_root path points to a file"): local_cli_wrapper.LocalCLIDebugWrapperSession( session.Session(), dump_root=file_path, log_usage=False) def testRunsUnderDebugMode(self): # Test command sequence: run; run; run; wrapped_sess = LocalCLIDebuggerWrapperSessionForTest( [[], [], []], self.sess, dump_root=self._tmp_dir) # run under debug mode twice. wrapped_sess.run(self.inc_v) wrapped_sess.run(self.inc_v) # Verify that the assign_add op did take effect. self.assertAllClose(12.0, self.sess.run(self.v)) # Assert correct run call numbers for which the CLI has been launched at # run-start and run-end. self.assertEqual([1], wrapped_sess.observers["run_start_cli_run_numbers"]) self.assertEqual([1, 2], wrapped_sess.observers["run_end_cli_run_numbers"]) # Verify that the dumps have been generated and picked up during run-end. self.assertEqual(2, len(wrapped_sess.observers["debug_dumps"])) # Verify that the TensorFlow runtime errors are picked up and in this case, # they should be both None. self.assertEqual([None, None], wrapped_sess.observers["tf_errors"]) def testRunInfoOutputAtRunEndIsCorrect(self): wrapped_sess = LocalCLIDebuggerWrapperSessionForTest( [[], [], []], self.sess, dump_root=self._tmp_dir) wrapped_sess.run(self.inc_v) run_info_output = wrapped_sess._run_info_handler([]) tfdbg_logo = cli_shared.get_tfdbg_logo() # The run_info output in the first run() call should contain the tfdbg logo. self.assertEqual(tfdbg_logo.lines, run_info_output.lines[:len(tfdbg_logo.lines)]) menu = run_info_output.annotations[debugger_cli_common.MAIN_MENU_KEY] self.assertIn("list_tensors", menu.captions()) wrapped_sess.run(self.inc_v) run_info_output = wrapped_sess._run_info_handler([]) # The run_info output in the second run() call should NOT contain the logo. self.assertNotEqual(tfdbg_logo.lines, run_info_output.lines[:len(tfdbg_logo.lines)]) menu = run_info_output.annotations[debugger_cli_common.MAIN_MENU_KEY] self.assertIn("list_tensors", menu.captions()) def testRunsUnderNonDebugMode(self): # Test command sequence: run -n; run -n; run -n; wrapped_sess = LocalCLIDebuggerWrapperSessionForTest( [["-n"], ["-n"], ["-n"]], self.sess, dump_root=self._tmp_dir) # run three times. wrapped_sess.run(self.inc_v) wrapped_sess.run(self.inc_v) wrapped_sess.run(self.inc_v) self.assertAllClose(13.0, self.sess.run(self.v)) self.assertEqual([1, 2, 3], wrapped_sess.observers["run_start_cli_run_numbers"]) self.assertEqual([], wrapped_sess.observers["run_end_cli_run_numbers"]) def testRunsUnderNonDebugThenDebugMode(self): # Test command sequence: run -n; run -n; run; run; # Do two NON_DEBUG_RUNs, followed by DEBUG_RUNs. wrapped_sess = LocalCLIDebuggerWrapperSessionForTest( [["-n"], ["-n"], [], []], self.sess, dump_root=self._tmp_dir) # run three times. wrapped_sess.run(self.inc_v) wrapped_sess.run(self.inc_v) wrapped_sess.run(self.inc_v) self.assertAllClose(13.0, self.sess.run(self.v)) self.assertEqual([1, 2, 3], wrapped_sess.observers["run_start_cli_run_numbers"]) # Here, the CLI should have been launched only under the third run, # because the first and second runs are NON_DEBUG. self.assertEqual([3], wrapped_sess.observers["run_end_cli_run_numbers"]) self.assertEqual(1, len(wrapped_sess.observers["debug_dumps"])) self.assertEqual([None], wrapped_sess.observers["tf_errors"]) def testRunMultipleTimesWithinLimit(self): # Test command sequence: run -t 3; run; wrapped_sess = LocalCLIDebuggerWrapperSessionForTest( [["-t", "3"], []], self.sess, dump_root=self._tmp_dir) # run three times. wrapped_sess.run(self.inc_v) wrapped_sess.run(self.inc_v) wrapped_sess.run(self.inc_v) self.assertAllClose(13.0, self.sess.run(self.v)) self.assertEqual([1], wrapped_sess.observers["run_start_cli_run_numbers"]) self.assertEqual([3], wrapped_sess.observers["run_end_cli_run_numbers"]) self.assertEqual(1, len(wrapped_sess.observers["debug_dumps"])) self.assertEqual([None], wrapped_sess.observers["tf_errors"]) def testRunMultipleTimesOverLimit(self): # Test command sequence: run -t 3; wrapped_sess = LocalCLIDebuggerWrapperSessionForTest( [["-t", "3"]], self.sess, dump_root=self._tmp_dir) # run twice, which is less than the number of times specified by the # command. wrapped_sess.run(self.inc_v) wrapped_sess.run(self.inc_v) self.assertAllClose(12.0, self.sess.run(self.v)) self.assertEqual([1], wrapped_sess.observers["run_start_cli_run_numbers"]) self.assertEqual([], wrapped_sess.observers["run_end_cli_run_numbers"]) self.assertEqual(0, len(wrapped_sess.observers["debug_dumps"])) self.assertEqual([], wrapped_sess.observers["tf_errors"]) def testRunMixingDebugModeAndMultpleTimes(self): # Test command sequence: run -n; run -t 2; run; run; wrapped_sess = LocalCLIDebuggerWrapperSessionForTest( [["-n"], ["-t", "2"], [], []], self.sess, dump_root=self._tmp_dir) # run four times. wrapped_sess.run(self.inc_v) wrapped_sess.run(self.inc_v) wrapped_sess.run(self.inc_v) wrapped_sess.run(self.inc_v) self.assertAllClose(14.0, self.sess.run(self.v)) self.assertEqual([1, 2], wrapped_sess.observers["run_start_cli_run_numbers"]) self.assertEqual([3, 4], wrapped_sess.observers["run_end_cli_run_numbers"]) self.assertEqual(2, len(wrapped_sess.observers["debug_dumps"])) self.assertEqual([None, None], wrapped_sess.observers["tf_errors"]) def testRuntimeErrorShouldBeCaught(self): wrapped_sess = LocalCLIDebuggerWrapperSessionForTest( [[], []], self.sess, dump_root=self._tmp_dir) # Do a run that should lead to an TensorFlow runtime error. wrapped_sess.run(self.y, feed_dict={self.ph: [[0.0], [1.0], [2.0]]}) self.assertEqual([1], wrapped_sess.observers["run_start_cli_run_numbers"]) self.assertEqual([1], wrapped_sess.observers["run_end_cli_run_numbers"]) self.assertEqual(1, len(wrapped_sess.observers["debug_dumps"])) # Verify that the runtime error is caught by the wrapped session properly. self.assertEqual(1, len(wrapped_sess.observers["tf_errors"])) tf_error = wrapped_sess.observers["tf_errors"][0] self.assertEqual("y", tf_error.op.name) def testRunTillFilterPassesShouldLaunchCLIAtCorrectRun(self): # Test command sequence: # run -f greater_than_twelve; run -f greater_than_twelve; run; wrapped_sess = LocalCLIDebuggerWrapperSessionForTest( [["-f", "v_greater_than_twelve"], ["-f", "v_greater_than_twelve"], []], self.sess, dump_root=self._tmp_dir) def v_greater_than_twelve(datum, tensor): return datum.node_name == "v" and tensor > 12.0 wrapped_sess.add_tensor_filter("v_greater_than_twelve", v_greater_than_twelve) # run five times. wrapped_sess.run(self.inc_v) wrapped_sess.run(self.inc_v) wrapped_sess.run(self.inc_v) wrapped_sess.run(self.inc_v) wrapped_sess.run(self.inc_v) self.assertAllClose(15.0, self.sess.run(self.v)) self.assertEqual([1], wrapped_sess.observers["run_start_cli_run_numbers"]) # run-end CLI should NOT have been launched for run #2 and #3, because only # starting from run #4 v becomes greater than 12.0. self.assertEqual([4, 5], wrapped_sess.observers["run_end_cli_run_numbers"]) self.assertEqual(2, len(wrapped_sess.observers["debug_dumps"])) self.assertEqual([None, None], wrapped_sess.observers["tf_errors"]) if __name__ == "__main__": googletest.main()
apache-2.0
3,531,258,227,906,135,600
37.576471
80
0.685499
false
mcreenan/eve
eve/logging.py
17
1323
from __future__ import absolute_import import logging from flask import request # TODO right now we are only logging exceptions. We should probably # add support for some INFO and maybe DEBUG level logging (like, log each time # a endpoint is hit, etc.) class RequestFilter(logging.Filter): """ Adds Flask's request metadata to the log record so handlers can log this information too. import logging handler = logging.FileHandler('app.log') handler.setFormatter(logging.Formatter( '%(asctime)s %(levelname)s: %(message)s ' '[in %(filename)s:%(lineno)d] -- ip: %(clientip)s url: %(url)s')) app.logger.addHandler(handler) The above example adds 'clientip' and request 'url' to every log record. Note that the app.logger can also be used by callback functions. def log_a_get(resoure, request, payload): app.logger.info('we just responded to a GET request!') app = Eve() app.on_post_GET += log_a_get .. versionadded:: 0.6 """ def filter(self, record): if request: record.clientip = request.remote_addr record.url = request.url record.method = request.method else: record.clientip = None record.url = None record.method = None return True
bsd-3-clause
3,074,480,693,844,551,700
27.76087
78
0.643235
false
vybstat/scikit-learn
examples/bicluster/plot_spectral_biclustering.py
403
2011
""" ============================================= A demo of the Spectral Biclustering algorithm ============================================= This example demonstrates how to generate a checkerboard dataset and bicluster it using the Spectral Biclustering algorithm. The data is generated with the ``make_checkerboard`` function, then shuffled and passed to the Spectral Biclustering algorithm. The rows and columns of the shuffled matrix are rearranged to show the biclusters found by the algorithm. The outer product of the row and column label vectors shows a representation of the checkerboard structure. """ print(__doc__) # Author: Kemal Eren <[email protected]> # License: BSD 3 clause import numpy as np from matplotlib import pyplot as plt from sklearn.datasets import make_checkerboard from sklearn.datasets import samples_generator as sg from sklearn.cluster.bicluster import SpectralBiclustering from sklearn.metrics import consensus_score n_clusters = (4, 3) data, rows, columns = make_checkerboard( shape=(300, 300), n_clusters=n_clusters, noise=10, shuffle=False, random_state=0) plt.matshow(data, cmap=plt.cm.Blues) plt.title("Original dataset") data, row_idx, col_idx = sg._shuffle(data, random_state=0) plt.matshow(data, cmap=plt.cm.Blues) plt.title("Shuffled dataset") model = SpectralBiclustering(n_clusters=n_clusters, method='log', random_state=0) model.fit(data) score = consensus_score(model.biclusters_, (rows[:, row_idx], columns[:, col_idx])) print("consensus score: {:.1f}".format(score)) fit_data = data[np.argsort(model.row_labels_)] fit_data = fit_data[:, np.argsort(model.column_labels_)] plt.matshow(fit_data, cmap=plt.cm.Blues) plt.title("After biclustering; rearranged to show biclusters") plt.matshow(np.outer(np.sort(model.row_labels_) + 1, np.sort(model.column_labels_) + 1), cmap=plt.cm.Blues) plt.title("Checkerboard structure of rearranged data") plt.show()
bsd-3-clause
6,146,612,982,188,058,000
31.435484
68
0.696668
false
NickDaly/GemRB-MultipleConfigs
gemrb/GUIScripts/pst/Start.py
2
3127
# -*-python-*- # GemRB - Infinity Engine Emulator # Copyright (C) 2003 The GemRB Project # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; either version 2 # of the License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # # Start.py - intro and main menu screens ################################################### import GemRB from GUIDefines import * StartWindow = 0 QuitWindow = 0 def OnLoad(): global StartWindow, QuitWindow skip_videos = GemRB.GetVar ("SkipIntroVideos") if not skip_videos: GemRB.PlayMovie ("BISLOGO") GemRB.PlayMovie ("TSRLOGO") GemRB.PlayMovie ("OPENING") GemRB.SetVar ("SkipIntroVideos", 1) GemRB.LoadWindowPack("START") #quit subwindow QuitWindow = GemRB.LoadWindow(3) QuitTextArea = QuitWindow.GetControl(0) QuitTextArea.SetText(20582) ConfirmButton = QuitWindow.GetControl(1) ConfirmButton.SetText(23787) ConfirmButton.SetEvent(IE_GUI_BUTTON_ON_PRESS, ExitConfirmed) ConfirmButton.SetFlags(IE_GUI_BUTTON_DEFAULT, OP_OR) CancelButton = QuitWindow.GetControl(2) CancelButton.SetText(23789) CancelButton.SetEvent(IE_GUI_BUTTON_ON_PRESS, ExitCancelled) CancelButton.SetFlags(IE_GUI_BUTTON_CANCEL, OP_OR) #main window StartWindow = GemRB.LoadWindow(0) NewLifeButton = StartWindow.GetControl(0) ResumeLifeButton = StartWindow.GetControl(2) ExitButton = StartWindow.GetControl(3) NewLifeButton.SetEvent(IE_GUI_BUTTON_ON_PRESS, NewLifePress) ResumeLifeButton.SetEvent(IE_GUI_BUTTON_ON_PRESS, ResumeLifePress) ExitButton.SetEvent(IE_GUI_BUTTON_ON_PRESS, ExitPress) ExitButton.SetFlags(IE_GUI_BUTTON_CANCEL, OP_OR) StartWindow.CreateLabel(0x0fff0000, 0,415,640,30, "EXOFONT", "", 1) Label=StartWindow.GetControl(0x0fff0000) Label.SetText(GEMRB_VERSION) QuitWindow.SetVisible(WINDOW_INVISIBLE) StartWindow.SetVisible(WINDOW_VISIBLE) GemRB.LoadMusicPL("Main.mus") return def NewLifePress(): if QuitWindow: QuitWindow.Unload() if StartWindow: StartWindow.Unload() #to make difference between ingame change and new life GemRB.SetVar("PlayMode",0) GemRB.SetNextScript("NewLife") return def ResumeLifePress(): if QuitWindow: QuitWindow.Unload() if StartWindow: StartWindow.Unload() #to make difference between ingame load and initial load GemRB.SetVar("PlayMode",0) GemRB.SetNextScript("GUILOAD") return def ExitPress(): StartWindow.SetVisible(WINDOW_GRAYED) QuitWindow.SetVisible(WINDOW_VISIBLE) return def ExitConfirmed(): GemRB.Quit() return def ExitCancelled(): QuitWindow.SetVisible(WINDOW_INVISIBLE) StartWindow.SetVisible(WINDOW_VISIBLE) return
gpl-2.0
-7,505,186,259,014,147,000
27.953704
80
0.760793
false
sander76/home-assistant
homeassistant/components/iaqualink/switch.py
12
1713
"""Support for Aqualink pool feature switches.""" from homeassistant.components.switch import DOMAIN, SwitchEntity from homeassistant.config_entries import ConfigEntry from homeassistant.helpers.typing import HomeAssistantType from . import AqualinkEntity, refresh_system from .const import DOMAIN as AQUALINK_DOMAIN PARALLEL_UPDATES = 0 async def async_setup_entry( hass: HomeAssistantType, config_entry: ConfigEntry, async_add_entities ) -> None: """Set up discovered switches.""" devs = [] for dev in hass.data[AQUALINK_DOMAIN][DOMAIN]: devs.append(HassAqualinkSwitch(dev)) async_add_entities(devs, True) class HassAqualinkSwitch(AqualinkEntity, SwitchEntity): """Representation of a switch.""" @property def name(self) -> str: """Return the name of the switch.""" return self.dev.label @property def icon(self) -> str: """Return an icon based on the switch type.""" if self.name == "Cleaner": return "mdi:robot-vacuum" if self.name == "Waterfall" or self.name.endswith("Dscnt"): return "mdi:fountain" if self.name.endswith("Pump") or self.name.endswith("Blower"): return "mdi:fan" if self.name.endswith("Heater"): return "mdi:radiator" @property def is_on(self) -> bool: """Return whether the switch is on or not.""" return self.dev.is_on @refresh_system async def async_turn_on(self, **kwargs) -> None: """Turn on the switch.""" await self.dev.turn_on() @refresh_system async def async_turn_off(self, **kwargs) -> None: """Turn off the switch.""" await self.dev.turn_off()
apache-2.0
6,473,368,994,462,506,000
30.145455
74
0.645067
false
lupien/pyHegel
pyHegel/instruments_base.py
1
162990
# -*- coding: utf-8 -*- ########################## Copyrights and license ############################ # # # Copyright 2011-2015 Christian Lupien <[email protected]> # # # # This file is part of pyHegel. http://github.com/lupien/pyHegel # # # # pyHegel is free software: you can redistribute it and/or modify it under # # the terms of the GNU Lesser General Public License as published by the # # Free Software Foundation, either version 3 of the License, or (at your # # option) any later version. # # # # pyHegel is distributed in the hope that it will be useful, but WITHOUT # # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or # # FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public # # License for more details. # # # # You should have received a copy of the GNU Lesser General Public License # # along with pyHegel. If not, see <http://www.gnu.org/licenses/>. # # # ############################################################################## from __future__ import absolute_import import numpy as np import string import functools import ctypes import hashlib import os import signal import sys import time import inspect import thread import threading import weakref from collections import OrderedDict # this is a subclass of dict from .qt_wrap import processEvents_managed, sleep from .kbint_util import _sleep_signal_context_manager, _delayed_signal_context_manager from . import visa_wrap from . import instruments_registry from .types import dict_improved rsrc_mngr = None def _load_resource_manager(path=None): global rsrc_mngr rsrc_mngr = None rsrc_mngr = visa_wrap.get_resource_manager(path) try: _load_resource_manager() except ImportError as exc: print 'Error loading visa resource manager. You will have reduced functionality.' try: _globaldict # keep the previous values (when reloading this file) except NameError: _globaldict = {} # This is set in pyHegel _init_pyHegel_globals (from pyHegel.commands) class _CHECKING(): def __init__(self): self.state = False def get(self): return self.state def set(self, state): if not isinstance(state, bool): raise ValueError('The state needs to be a boolean') self.state = state def __call__(self, state=None): """ Called with no arguments, returns current checking mode state With a boolean, sets the check state """ if state is None: return self.get() else: self.set(state) CHECKING = _CHECKING() ################### ### New exceptions class InvalidArgument(ValueError): pass class InvalidAutoArgument(InvalidArgument): pass class KeyError_Choices (KeyError): pass class Runtime_Get_Para_Checked(Exception): """ This exception is to be used to mark the end of parameter checking in a get function """ pass def get_para_checked(*val): """ This function should be called in a _getdev after the parameters have been checked for validity. When in CHECKING only mode, this will skip the rest of the function. you should call this with one parameter (passed to exception) or no parameters When a parameter is given, it will be used as the get value (and cached) """ if CHECKING(): raise Runtime_Get_Para_Checked(*val) ################### class ProxyMethod(object): def __init__(self, bound_method): #self.class_of_method = bound_method.im_class self.instance = weakref.proxy(bound_method.__self__) self.unbound_func = bound_method.__func__ def __call__(self, *arg, **kwarg): return self.unbound_func(self.instance, *arg, **kwarg) ####################################################### ## Have a status line active ####################################################### class time_check(object): def __init__(self, delay=10): self.delay = delay self.restart() def restart(self): self.last_update = time.time() def check(self): now = time.time() if now >= self.last_update + self.delay: self.last_update = now return True return False def __call__(self): return self.check() class UserStatusLine(object): """ The is the object created by MainStatusLine.new You should not create it directly. To use, just call the object with the new string. If the new string is not empty, the status line is also output. You can force an output using the method output. The timed, when True or a time in s (True is equivalent to 10s), makes the screen update slower than that time. """ def __init__(self, main, handle, timed=False): self.main = main self.handle = handle if timed is not None and timed is not False: if timed is True: self._time_check = time_check() else: self._time_check = time_check(timed) else: self._time_check = None @property def delay(self): if self._time_check is not None: return self._time_check.delay return 0 @delay.setter def delay(self, d): if self._time_check is not None: self._time_check.delay = d def restart_time(self): if self._time_check is not None: self._time_check.restart() def check_time(self): if self._time_check is not None: return self._time_check() return True def remove(self): self.main.delete(self.handle) def __del__(self): self.remove() def __call__(self, new_status=''): self.main.change(self.handle, new_status) do_update = self.check_time() if new_status != '' and do_update: self.main.output() def output(self): self.main.output() def __enter__(self): return self def __exit__(self, exc_type, exc_value, exc_traceback): self.remove() class UserStatusLine_dummy(object): """ This is a dummy UserStatusLine so code can be more general. """ def __init__(self, main, handle, timed=False): self.delay = 0. def restart_time(self): pass def check_time(self): return True def __call__(self, new_status=''): pass def remove(self): pass def output(self): pass def __enter__(self): return self def __exit__(self, exc_type, exc_value, exc_traceback): pass class MainStatusLine(object): """ This class provides a tools for combining multiple strings in a status line. The status line the next line on the console which we keep rewriting (using a carriage return). To use, create a new user object (it will properly clean itself on deletion) using a single instance of this class (so should use: mainStatusLine.new()). You can select the priority you want for the status. Larger priority will show before lower ones. You can also put a limit to the update rate with timed (which is passed to UserStatusLine). For information on using the user object see UserStatusLine Can use attribute enable to turn off the status line display Can also use the return object (from new) as a context manager to make sure it is properly cleaned. """ def __init__(self): self.last_handle = 0 self.users = {} self.enable = True self._lock = threading.Lock() self._dummy = UserStatusLine_dummy(self, 0) def new(self, priority=1, timed=False, dummy=False): if dummy: return self._dummy with self._lock: handle = self.last_handle + 1 self.last_handle = handle self.users[handle] = [priority, ''] return UserStatusLine(self, handle, timed) # higher priority shows before lower ones def delete(self, handle): with self._lock: if handle in self.users: del self.users[handle] def change(self, handle, new_status): # This locking might not be necessary but lets do it to be sure. with self._lock: self.users[handle][1] = new_status def output(self): if not self.enable: return # This locking might not be necessary but lets do it to be sure. with self._lock: entries = self.users.values() entries = sorted(entries, key=lambda x: x[0], reverse=True) # sort on decreasing priority only outstr = ' '.join([e[1] for e in entries if e[1] != '']) # join the non-empty status outstr = outstr if len(outstr)<=72 else outstr[:69]+'...' sys.stdout.write('\r%-72s'%outstr) sys.stdout.flush() mainStatusLine = MainStatusLine() def wait(sec, progress_base='Wait', progress_timed=True): """ Time to wait in seconds. It can be stopped with CTRL-C, and it should update the GUI while waiting. if progress_base is None, status line update will be disabled. """ if progress_base is None or sec < 1: sleep(sec) return progress_base += ' {:.1f}/%.1f'%sec to = time.time() with mainStatusLine.new(priority=100, timed=progress_timed) as progress: while True: dif = time.time() - to delay = min(sec - dif, .1) if delay <= 0: break sleep(delay) progress(progress_base.format(dif)) ####################################################### ## find_all_instruments function (for VISA) ####################################################### #can list instruments with : visa.get_instruments_list() # or : visa.get_instruments_list(use_aliases=True) # Based on visa.get_instruments_list def find_all_instruments(use_aliases=True): """Get a list of all connected devices. Parameters: use_aliases -- if True, return an alias name for the device if it has one. Otherwise, always return the standard resource name like "GPIB::10". Return value: A list of strings with the names of all connected devices, ready for being used to open each of them. """ return rsrc_mngr.get_instrument_list(use_aliases) def test_gpib_srq_state(bus=0): """ Test the state of the gpib bus SRQ line. It should normally be False unless an instrument is in the process of communicating. If it is ever True and stays that way, it will prevent further use of the line by any other device. It can be caused by an instrument on the bus that is not openned in any session but that is activating the srq line. Either open that device and clear it or turn it off. """ return rsrc_mngr.get_gpib_intfc_srq_state() def _repr_or_string(val): if isinstance(val, basestring): return val else: return repr(val) def _writevec_flatten_list(vals_list): ret = [] for val in vals_list: if isinstance(val, np.ndarray): ret.extend(list(val.flatten())) elif isinstance(val, (list, tuple)): ret.extend(val) else: ret.append(val) return ret def _writevec(file_obj, vals_list, pre_str=''): """ write a line of data in the open file_obj. vals_list is a list of values or strings, or of np.ndarray which are flatten. Any value that is not a base_string is converted to a string use repr. The columns in the file are separated by tabs. pre_str is prepended to every line. Can use '#' when adding comments. """ vals_list = _writevec_flatten_list(vals_list) strs_list = map(_repr_or_string, vals_list) file_obj.write(pre_str+'\t'.join(strs_list)+'\n') def _get_conf_header_util(header, obj, options): if callable(header): header = header(obj, options) if header: # if either is not None or not '' if isinstance(header, basestring): header=[header] return header # header or header() can be None, '' or False for no output # otherwise it can be a single string for a single line or # a list of strings. Don't include the comment character or the newline. def _get_conf_header(format): header = format['header'] obj = format['obj'] options = format['options'] return _get_conf_header_util(header, obj, options) def _replace_ext(filename, newext=None): if newext is None: return filename root, ext = os.path.splitext(filename) return root+newext def _write_dev(val, filename, format=format, first=False): append = format['append'] bin = format['bin'] dev = format['obj'] multi = format['multi'] extra_conf = format['extra_conf'] doheader = True if bin: doheader = False if append and not first: open_mode = 'a' doheader = False else: open_mode = 'w' if bin: open_mode += 'b' if bin != '.ext': filename = _replace_ext(filename, bin) f=open(filename, open_mode) dev._last_filename = filename header = _get_conf_header(format) if doheader: # if either is not None or not '' if header: for h in header: f.write('#'+h+'\n') if extra_conf: # not None or '' # extra_conf should be a complete string including # and new lines f.write(extra_conf) if isinstance(multi, tuple): _writevec(f, multi, pre_str='#') if append: _writevec(f, val) else: # we assume val is array like, except for bin where it can also be a string # remember that float64 has 53 bits (~16 digits) of precision # for v of shape (2,100) this will output 2 columns and 100 lines # because of .T if bin == '.npy': np.save(f, val) elif bin =='.npz': np.savez_compressed(f, val) elif bin: if isinstance(val, basestring): f.write(val) else: val.tofile(f) else: # force array so single values and lists also work val = np.atleast_1d(val) np.savetxt(f, val.T, fmt='%.18g', delimiter='\t') f.close() def _retry_wait(func, timeout, delay=0.01, progress_base='Wait', progress_timed=True, keep_delay=False): """ this calls func() and stops when the return value is True or timeout seconds have passed. delay is the sleep duration between attempts. progress_base is prefix when using status line (for timeout >= 1s) when set to None, status line update is disabled. progress_timed is mainStatusLine.new timed option. keep_delay when False, will increase the delay to 20 ms (if smaller) after .5s of wait to make sure to update the graphics. """ ret = False dummy = (timeout < 1.) or (progress_base is None) with mainStatusLine.new(priority=100, timed=progress_timed, dummy=dummy) as progress: to = time.time() endtime = to + timeout if progress_base is not None: progress_base = progress_base + ' %.1f/{:.1f}'.format(timeout) while True: ret = func() if ret: break now = time.time() duration = now - to remaining = endtime - now if remaining <= 0: break if progress_base is not None: progress(progress_base%duration) if duration>.5 and not keep_delay: delay = max(delay, 0.02) keep_delay = True delay = min(delay, remaining) sleep(delay) return ret class Lock_Extra(object): def acquire(self): return False __enter__ = acquire def release(self): pass def __exit__(self, exc_type, exc_value, exc_traceback): self.release() def is_owned(self): return False def force_release(self): pass class Lock_Instruments(threading._RLock): """ This is similar to threading.RLock (reentrant lock) except acquire always waits in a non-blocking state. Therefore you can press CTRL-C to stop the wait. However if the other threads does not release the lock for long enough, we might never be able to acquire it. """ def acquire_timeout(self, timeout): func = lambda : super(Lock_Instruments, self).acquire(blocking=0) return _retry_wait(func, timeout, delay=0.001) def acquire(self): return wait_on_event(self.acquire_timeout) __enter__ = acquire def is_owned(self): return self._is_owned() def force_release(self): n = 0 try: while True: self.release() n += 1 except RuntimeError as exc: if exc.message != "cannot release un-acquired lock": raise if n: print 'Released Intrument lock', n, 'time(s)' else: print 'Instrument lock was not held' try: self._RLock__block.release() except thread.error as exc: if exc.message != 'release unlocked lock': raise else: print 'Inner lock was still locked, now released.' # This functions was moved out of _locked_calling_helper # because it was causing different errors in python < 2.7.9 # SyntaxError: unqualified exec is not allowed in function 'locked_calling' it contains a nested function with free variables # see https://bugs.python.org/issue21591 # https://stackoverflow.com/questions/4484872/why-doesnt-exec-work-in-a-function-with-a-subfunction # However fixing that (by using the "exec something in lcl" syntax) leads to another error: # SyntaxError: function 'locked_calling' uses import * and bare exec, which are illegal because it contains a nested function with free variables # which is because I kept the exec in an if else statement. (I need to keep the exec in function form for # future upgrade to python 3) def _locked_calling_helper(argspec, extra): (args, varargs, varkw, defaults) = argspec # find and replace class (like float), functions in defaults # will use obj.__name__ but could also try to find the object name in the # calling locals, globals, __builtin__ if defaults is not None: defaults_repl = [(d, d.__name__) for d in defaults if getattr(d, '__name__', None)] else: defaults_repl = [] defaults_repl_obj = [d[0] for d in defaults_repl] def def_repl_func(obj): try: ind = defaults_repl_obj.index(obj) except ValueError: return '='+repr(obj) return '='+defaults_repl[ind][1] def_arg = inspect.formatargspec(*argspec, formatvalue=def_repl_func) # this is: (self, arg1, arg2, kw1=1, kw2=5, *arg, *kwarg) use_arg = inspect.formatargspec(*argspec, formatvalue=lambda name: '') # this is: (self, arg1, arg2, kw1, kw2, *arg, *kwarg) selfname = args[0]+extra return dict(def_arg=def_arg, use_arg=use_arg, self=selfname) # Use this as a decorator def locked_calling(func, extra=''): """ This function is to be used as a decorator on a class method. It will wrap func with with self._lock_instrument, self._lock_extra: Only use on method in classes derived from BaseInstrument """ argspec = inspect.getargspec(func) frmt_para = _locked_calling_helper(argspec, extra) def_str = """ @functools.wraps(func) def locked_call_wrapper{def_arg}: " locked_call_wrapper is a wrapper that executes func with the instrument locked." with {self}._lock_instrument, {self}._lock_extra: return func{use_arg} """.format(**frmt_para) lcl = {} lcl.update(func=func) lcl.update(functools=functools) # lcl is uses as both globals and locals exec(def_str, lcl) locked_call_wrapper = lcl['locked_call_wrapper'] ### only for ipython 0.12 ### This makes newfunc?? show the correct function def (including decorator) ### note that for doc, ipython tests for getdoc method locked_call_wrapper.__wrapped__ = func return locked_call_wrapper def locked_calling_dev(func): """ Same as locked_calling, but for a BaseDevice subclass. """ return locked_calling(func, extra='.instr') class release_lock_context(object): def __init__(self, instr): self.instr = instr self.n = 0 def __enter__(self): self.n = 0 try: while True: self.instr._lock_release() self.n += 1 except RuntimeError as exc: if exc.message != "cannot release un-acquired lock": raise return self def __exit__(self, exc_type, exc_value, exc_traceback): for i in range(self.n): self.instr._lock_acquire() # Taken from python threading 2.7.2 class FastEvent(threading._Event): def __init__(self, verbose=None): threading._Verbose.__init__(self, verbose) self._Event__cond = FastCondition(threading.Lock()) self._Event__flag = False class FastCondition(threading._Condition): def wait(self, timeout=None, balancing=True): # Newer version of threading have added balencing # the old code is the same as balencing=True which is implemented here if balancing is not True: raise NotImplementedError("FastCondition does not handle balancing other than True") if not self._is_owned(): raise RuntimeError("cannot wait on un-acquired lock") waiter = threading._allocate_lock() waiter.acquire() self._Condition__waiters.append(waiter) saved_state = self._release_save() try: # restore state no matter what (e.g., KeyboardInterrupt) if timeout is None: waiter.acquire() if __debug__: self._note("%s.wait(): got it", self) else: # Balancing act: We can't afford a pure busy loop, so we # have to sleep; but if we sleep the whole timeout time, # we'll be unresponsive. func = lambda : waiter.acquire(0) gotit = _retry_wait(func, timeout, delay=0.01) if not gotit: if __debug__: self._note("%s.wait(%s): timed out", self, timeout) try: self._Condition__waiters.remove(waiter) except ValueError: pass else: if __debug__: self._note("%s.wait(%s): got it", self, timeout) finally: self._acquire_restore(saved_state) #To implement async get: # need multi level get # 0: is initialization (Telling system what to read and to prepare it if necessary) # dmm1 could do :init here if bus/ext trigger # Also start one or multiple threads to capture and save data # Should turn on a flag saying we are busy # Be carefull with locking if more than one thread per instrument # setup srq listening in init or here # The end of the thread could decide to disable the srq # 1: is to start the task # is trigger step. For dmm1 do trigger, or :init: if trigger is immediate # Also setup of producing signal to finish measurment (like *OPC or for dmm1 fetch?) and prevent # other level 0: commands # 2: Check if data has been read # 3: get cache # trigger/flags can be per instrument (visa) or device(acq card) #Enable basic async for any device (like sr830) by allowing a delay before performing mesurement #Allow to chain one device on completion of another one. class asyncThread(threading.Thread): def __init__(self, operations, lock_instrument, lock_extra, init_ops, detect=None, delay=0., trig=None, cleanup=None): super(asyncThread, self).__init__() self.daemon = True self._stop = False self._async_delay = delay self._async_trig = trig self._async_detect = detect self._async_cleanup = cleanup self._operations = operations self._lock_instrument = lock_instrument self._lock_extra = lock_extra self._init_ops = init_ops # a list of (func, args, kwargs) self.results = [] self._replace_index = 0 def add_init_op(self, func, *args, **kwargs): self._init_ops.append((func, args, kwargs)) def change_delay(self, new_delay): self._async_delay = new_delay def change_trig(self, new_trig): self._async_trig = new_trig def change_detect(self, new_detect): self._async_detect = new_detect def change_cleanup(self, new_cleanup): self._async_cleanup = new_cleanup def replace_result(self, val, index=None): if index is None: index = self._replace_index self._replace_index += 1 self.results[index] = val @locked_calling def run(self): #t0 = time.time() for f, args, kwargs in self._init_ops: f(*args, **kwargs) delay = self._async_delay if delay and not CHECKING(): func = lambda: self._stop _retry_wait(func, timeout=delay, delay=0.1) if self._stop: return try: if self._async_trig and not CHECKING(): self._async_trig() #print 'Thread ready to detect ', time.time()-t0 if self._async_detect is not None: while not self._async_detect(): if self._stop: break if self._stop: return finally: if self._async_cleanup and not CHECKING(): self._async_cleanup() #print 'Thread ready to read ', time.time()-t0 for func, kwarg in self._operations: self.results.append(func(**kwarg)) #print 'Thread finished in ', time.time()-t0 def cancel(self): self._stop = True def wait(self, timeout=None): # we use a the context manager because join uses sleep. with _sleep_signal_context_manager(): self.join(timeout) return not self.is_alive() # For proper KeyboardInterrupt handling, the docheck function should # be internally protected with _sleep_signal_context_manager # This is the case for FastEvent and any function using sleep instead of time.sleep def wait_on_event(task_or_event_or_func, check_state = None, max_time=None, progress_base='Event wait', progress_timed=True): # task_or_event_or_func either needs to have a wait attribute with a parameter of # seconds. Or it should be a function accepting a parameter of time in s. # check_state allows to break the loop if check_state._error_state # becomes True # It returns True/False unless it is stopped with check_state in which case it returns None # Note that Event.wait (actually threading.Condition.wait) # tries to wait for 1ms then for 2ms more then 4, 8, 16, 32 and then in blocks # of 50 ms. If the wait would be longer than what is left, the wait is just # what is left. However, on windows 7 (at least), the wait ends up being # rounded to: 1, 2, 4 and 8->10ms, 16->20ms, 32-> 40ms # therefore, using Event.wait can produce times of 10, 20, 30, 40, 60, 100, 150 # 200 ms ... # Can use FastEvent.wait instead of Event.wait to be faster # progress_base is prefix when using status line (for timeout >= 1s) # when set to None, status line update is disabled. # progress_timed is mainStatusLine.new timed option. start_time = time.time() try: # should work for task (threading.Thread) and event (threading.Event) docheck = task_or_event_or_func.wait except AttributeError: # just consider it a function docheck = task_or_event_or_func dummy = (max_time is not None and max_time < 1.) or (progress_base is None) with mainStatusLine.new(priority=100, timed=progress_timed, dummy=dummy) as progress: if progress_base is not None: progress_base += ' %.1f' if max_time is not None: progress_base = progress_base + '/{:.1f}'.format(max_time) while True: if max_time is not None: check_time = max_time - (time.time()-start_time) check_time = max(0., check_time) # make sure it is positive check_time = min(check_time, 0.2) # and smaller than 0.2 s else: check_time = 0.2 if docheck(check_time): return True duration = time.time()-start_time if max_time is not None and duration > max_time: return False if progress_base is not None: progress(progress_base%duration) if check_state is not None and check_state._error_state: break # processEvents is for the current Thread. # if a thread does not have and event loop, this does nothing (not an error) processEvents_managed(max_time_ms = 20) def _general_check(val, min=None, max=None, choices=None, lims=None, msg_src=None): # self is use for perror if lims is not None: if isinstance(lims, tuple): min, max = lims else: choices = lims mintest = maxtest = choicetest = True if min is not None: mintest = val >= min if max is not None: maxtest = val <= max if choices: choicetest = val in choices state = mintest and maxtest and choicetest if state == False: if not mintest: err='{val!s} is below MIN=%r'%min if not maxtest: err='{val!s} is above MAX=%r'%max if not choicetest: err='invalid value({val!s}): use one of {choices!s}' if msg_src is None: err = 'Failed check: '+err else: err = 'Failed check for %s: '%msg_src + err d = dict(val=val, choices=repr(choices)) raise ValueError(err.format(**d), d) ####################################################### ## Base device ####################################################### class BaseDevice(object): """ ---------------- General device documentation All devices provide a get method. Some device also implement set, check methods. Users should not call the get/set methods directly bus instead should use the pyHegel set/get functions. Both get and set use a cache variable which is accessible with getcache, setcache methods The gets have no positional parameters. The sets and check have one positional parameter, which is the value. They can have multiple keyword parameters """ def __init__(self, autoinit=True, doc='', setget=False, allow_kw_as_dict=False, allow_missing_dict=False, allow_val_as_first_dict=False, get_has_check=False, min=None, max=None, choices=None, multi=False, graph=True, trig=False, redir_async=None): # instr and name updated by instrument's _create_devs # doc is inserted before the above doc # autoinit can be False, True or a number. # The number affects the default implementation of force_get: # Bigger numbers are initialized first. 0 is not initialized, True is 1 # setget makes us get the value after setting it # this is usefull for instruments that could change the value # under us. # allow_kw_as_dict allows the conversion of kw to a dict. There needs to be # a choices.field_names list of values (like with ChoiceMultiple) # allow_missing_dict, will fill the missing elements of dict with values # from a get # allow_val_as_first_dict when True, takes val as first element of dictionary. # probably only useful if allow_missing_dict is also True # get_has_check, make it true if the _getdev produces the Runtime_Get_Para_Checked # exception (calls _get_para_checked). This is needed for proper CHECKING mode # or if executing the get has not side effect. self.instr = None self.name = 'foo' # Use thread local data to keep the last_filename and a version of cache self._local_data = threading.local() self._cache = None self._set_delayed_cache = None self._check_cache = {} self._autoinit = autoinit self._setdev_p = None self._getdev_p = None self._setget = setget self._trig = trig self._redir_async = redir_async self._last_filename = None self.min = min self.max = max self.choices = choices self._allow_kw_as_dict = allow_kw_as_dict self._allow_missing_dict = allow_missing_dict self._allow_val_as_first_dict = allow_val_as_first_dict self._get_has_check = get_has_check self._doc = doc # obj is used by _get_conf_header and _write_dev self._format = dict(file=False, multi=multi, xaxis=None, graph=graph, append=False, header=None, bin=False, extra_conf=None, options={}, obj=self) def _delayed_init(self): """ This function is called by instrument's _create_devs once initialization is complete """ pass @property def _last_filename(self): try: return self._local_data.last_filename except AttributeError: return None @_last_filename.setter def _last_filename(self, filename): self._local_data.last_filename = filename def __getattribute__(self, name): # we override __doc__ so for instances we return the result from _get_docstring # But when asking for __doc__ on the class we get the original docstring # Note that __doc__ is automatically set for every class (defaults to None) # and it does not refer to its parent __doc__. # Also __doc__ is not writable. To make it writable, it needs to be # overwritten in a metaclass (cls.__doc__=cls.__doc__ is enough) # Another option is to set __doc__ = property(_get_docstring) in all # classes (or use a metaclass to do that automatically) but then # asking for __doc__ on the class does not return a string but a property object. if name == '__doc__': return self._get_docstring() return super(BaseDevice, self).__getattribute__(name) def _get_docstring(self, added=''): doc_base = BaseDevice.__doc__ if doc_base is None: doc_base = '' doc = self._doc extra = '' if self.choices: extra = '\n-------------\n Possible value to set: %s\n'%repr(self.choices) elif self.min is not None and self.max is not None: extra = '\n-------------\n Value between %r and %r\n'%(self.min, self.max) elif self.min is not None: extra = '\n-------------\n Value at least %r\n'%(self.min) elif self.max is not None: extra = '\n-------------\n Value at most %r\n'%(self.max) return doc + added + extra + doc_base # for cache consistency # get should return the same thing set uses @locked_calling_dev def set(self, *val, **kwarg): if not CHECKING(): # So when checking, self.check will be seen as in a check instead # of a set. self._check_cache['in_set'] = True self.check(*val, **kwarg) if self._check_cache: val = self._check_cache['val'] kwarg = self._check_cache['kwarg'] set_kwarg = self._check_cache['set_kwarg'] else: val = val[0] set_kwarg = kwarg if not CHECKING(): self._set_delayed_cache = None # used in logical devices self._setdev(val, **set_kwarg) if self._setget: val = self.get(**kwarg) elif self._set_delayed_cache is not None: val = self._set_delayed_cache # only change cache after succesfull _setdev self.setcache(val) def _get_para_checked(self, *val): get_para_checked(*val) @locked_calling_dev def get(self, **kwarg): if self._getdev_p is None: raise NotImplementedError, self.perror('This device does not handle _getdev') if not CHECKING() or self._get_has_check: self._last_filename = None format = self.getformat(**kwarg) kwarg.pop('graph', None) #now remove graph from parameters (was needed by getformat) kwarg.pop('bin', None) #same for bin kwarg.pop('extra_conf', None) to_finish = False if kwarg.get('filename', False) and not format['file']: #we did not ask for a filename but got one. #since _getdev probably does not understand filename #we handle it here filename = kwarg.pop('filename') to_finish = True try: ret = self._getdev(**kwarg) except Runtime_Get_Para_Checked as e: if len(e.args) == 1: ret = e.args[0] elif len(e.args) > 1: ret = e.args else: ret = self.getcache() if to_finish: _write_dev(ret, filename, format=format) if format['bin']: ret = None else: ret = self.getcache() self.setcache(ret) return ret #@locked_calling_dev def getcache(self, local=False): """ With local=True, returns thread local _cache. If it does not exist yet, returns None. Use this for the data from a last fetch if another thread is also doing fetches. (For example between after a get to make sure getcache obtains the result from the current thread (unless they are protected with a lock)) With local=False (default), returns the main _cache which is shared between threads (but not process). When the value is None and autoinit is set, it will return the result of get. Use this if another thread might be changing the cached value and you want the last one. However if another thread is changing values, or the user changed the values on the instrument maually (using the front panel), than you better do get instead of getcache to really get the up to date value. """ if local: try: return self._local_data.cache except AttributeError: return None # local is False with self.instr._lock_instrument: # only local data, so don't need _lock_extra if self._cache is None and self._autoinit and not CHECKING(): # This can fail, but getcache should not care for #InvalidAutoArgument exceptions try: return self.get() except InvalidAutoArgument: self._cache = None return self._cache def _do_redir_async(self): obj = self # go through all redirections while obj._redir_async: obj = obj._redir_async return obj def getasync(self, async, **kwarg): obj = self._do_redir_async() if async != 3 or self == obj: ret = obj.instr._get_async(async, obj, trig=obj._trig, **kwarg) # now make sure obj._cache and self._cache are the same else: # async == 3 and self != obj: # async thread is finished, so lock should be available with self.instr._lock_instrument: # only local data, so don't need _lock_extra #_get_async blocks if it is not in the correct thread and is not #complete. Here we just keep the lock until setcache is complete # so setcache does not have to wait for a lock. ret = obj.instr._get_async(async, obj, **kwarg) self.setcache(ret) self._last_filename = obj._last_filename if async == 3: # update the obj local thread cache data. obj._local_data.cache = ret return ret #@locked_calling_dev def setcache(self, val, nolock=False): if nolock == True: self._cache = val else: with self.instr._lock_instrument: # only local data, so don't need _lock_extra self._cache = val self._local_data.cache = val # thread local, requires no lock def __call__(self, val=None): raise SyntaxError, """Do NOT call a device directly, like instr.dev(). Instead use set/get on the device or functions that use set/get like sweep or record.""" def __repr__(self): gn, cn, p = self.instr._info() return '<device "%s" of %s=(class "%s" at 0x%08x)>'%(self.name, gn, cn, p) def __set__(self, instance, val): #print instance self.set(val) def perror(self, error_str='', **dic): dic.update(name=self.name, instr=self.instr, gname=self.instr.find_global_name()) return ('{gname}.{name}: '+error_str).format(**dic) # Implement these in a derived class def _setdev(self, val, **kwarg): raise NotImplementedError, self.perror('This device does not handle _setdev') def _getdev(self, **kwarg): raise NotImplementedError, self.perror('This device does not handle _getdev') def _general_check(self, val, min=None, max=None, choices=None, lims=None, msg_src=None, str_return=False): # This wraps the _general_check function to wrap the error message with perror # with str_return, it either returns a error string or None instead of producting an exception try: _general_check(val, min, max, choices, lims, msg_src) except (ValueError, KeyError) as e: new_message = self.perror(e.args[0]) # new_message = self.perror(e.args[0],**e.args[1]) if str_return: return new_message raise e.__class__(new_message) def _pre_check(self, *val, **kwarg): # This cleans up *val and **kwarg to handle _allow_kw_as_dict # It returns a single val and a cleaned up kwarg. # This will also always create a new _check_cache with at least the keys # fnct_set, val, kwarg, fnct_str, set_kwarg # in_set should be removed (so check after a set should work) # kwarg should contain all the keyword (except for the _allow_kw_as_dict) # that are needed for get # set_kwarg are the kwarg passed to setdev # Note that the returned kwarg is a copy so you can pop values out of it # without modifying _check_cache['kwarg'] in_set = self._check_cache.get('in_set', False) fnct_str = 'set' if in_set else 'check' self._check_cache = {'fnct_set': in_set, 'fnct_str': fnct_str} if self._setdev_p is None: raise NotImplementedError, self.perror('This device does not handle %s'%fnct_str) nval = len(val) if nval == 1: val = val[0] elif nval == 0: val = None else: raise RuntimeError(self.perror('%s can only have one positional parameter'%fnct_str)) allow_var_kw = False if nval and self._allow_val_as_first_dict and not isinstance(val, dict): val = {self.choices.field_names[0]:val} allow_var_kw = True if self._allow_kw_as_dict: if val is None or allow_var_kw: if val is None: val = dict() for k in kwarg.keys(): if k in self.choices.field_names: val[k] = kwarg.pop(k) elif nval == 0: # this permits to set a value to None raise RuntimeError(self.perror('%s requires a value.'%fnct_str)) self._check_cache['val'] = val self._check_cache['kwarg'] = kwarg self._check_cache['set_kwarg'] = kwarg.copy() return val, kwarg.copy() def _set_missing_dict_helper(self, val, _allow=None, **kwarg): """ This will replace missing values if necessary. _allow can be None (which uses self._allow_missing_dict) or it can be False, True (which uses get) or 'cache' which uses the cache Actually using False is an error it returns the possibly update val """ if _allow is None: _allow = self._allow_missing_dict if _allow == 'cache': old_val = self.getcache() elif _allow is True: old_val = self.get(**kwarg) else: raise ValueError(self.perror('Called _set_missing_dict_helper with _allow=False')) old_val.update(val) return old_val def _checkdev(self, val): # This default _checkdev handles a general check with _allow_missing_dict # but no extra kwarg. The caller should have tested and removed them try: self._general_check(val, self.min, self.max, self.choices) except KeyError_Choices: # need to catch the exception instead of always filling all the variables # some device might accept partial entries # they could override _set_missing_dict_helper to only add some entries. if not self._allow_missing_dict: raise kwarg = self._check_cache['kwarg'] val = self._set_missing_dict_helper(val, **kwarg) self._check_cache['val'] = val self._general_check(val, self.min, self.max, self.choices) @locked_calling_dev def check(self, *val, **kwarg): # This raises an exception if set does not work (_setdev_p is None) val, kwarg = self._pre_check(*val, **kwarg) self._checkdev(val, **kwarg) def getformat(self, filename=None, **kwarg): # we need to absorb any filename argument # This function should not communicate with the instrument. # first handle options we don't want saved in 'options' graph = kwarg.pop('graph', None) extra_conf = kwarg.pop('extra_conf', None) self._format['options'] = kwarg #now handle the other overides bin = kwarg.pop('bin', None) xaxis = kwarg.pop('xaxis', None) # we need to return a copy so changes to dict here and above does not # affect the devices dict permanently format = self._format.copy() if graph is not None: format['graph'] = graph if bin is not None: format['file'] = False format['bin'] = bin if xaxis is not None and format['xaxis'] is not None: format['xaxis'] = xaxis format['extra_conf'] = extra_conf return format def getfullname(self): return self.instr.header.getcache()+'.'+self.name def force_get(self): """ Force a reread of the instrument attached to this device. This should be called before saving headers. """ self.instr.force_get() class wrapDevice(BaseDevice): def __init__(self, setdev=None, getdev=None, checkdev=None, getformat=None, **extrak): # auto insert documentation if setdev or getdev has one. if not extrak.has_key('doc'): if setdev is not None and setdev.__doc__: extrak['doc'] = setdev.__doc__ elif getdev is not None and getdev.__doc__: extrak['doc'] = getdev.__doc__ BaseDevice.__init__(self, **extrak) # the methods are unbounded methods. self._setdev_p = setdev self._getdev_p = getdev self._checkdev_p = checkdev self._getformat = getformat def _setdev(self, val, **kwarg): self._setdev_p(val, **kwarg) def _getdev(self, **kwarg): return self._getdev_p(**kwarg) def _checkdev(self, val, **kwarg): if self._checkdev_p is not None: self._checkdev_p(val, **kwarg) else: super(wrapDevice, self)._checkdev(val, **kwarg) def getformat(self, **kwarg): if self._getformat is not None: return self._getformat(**kwarg) else: return super(wrapDevice, self).getformat(**kwarg) class cls_wrapDevice(BaseDevice): def __init__(self, setdev=None, getdev=None, checkdev=None, getformat=None, **extrak): # auto insert documentation if setdev or getdev has one. if not extrak.has_key('doc'): if setdev is not None and setdev.__doc__: extrak['doc'] = setdev.__doc__ elif getdev is not None and getdev.__doc__: extrak['doc'] = getdev.__doc__ BaseDevice.__init__(self, **extrak) # the methods are unbounded methods. self._setdev_p = setdev self._getdev_p = getdev self._checkdev_p = checkdev self._getformat = getformat def _setdev(self, val, **kwarg): self._setdev_p(self.instr, val, **kwarg) def _getdev(self, **kwarg): return self._getdev_p(self.instr, **kwarg) def _checkdev(self, val, **kwarg): if self._checkdev_p is not None: self._checkdev_p(self.instr, val, **kwarg) else: super(cls_wrapDevice, self)._checkdev(val, **kwarg) def getformat(self, **kwarg): if self._getformat is not None: return self._getformat(self.instr, **kwarg) else: return super(cls_wrapDevice, self).getformat(**kwarg) def _find_global_name(obj): dic = _globaldict try: return [k for k,v in dic.iteritems() if v is obj and k[0]!='_'][0] except IndexError: return "name_not_found" # Using this metaclass, the class method # _add_class_devs will be executed at class creation. # Hence added devices will be part of the class and will # allow the inst.dev=2 syntax # (Since for the device __set__ to work requires the # object to be part of the class, not the instance) class MetaClassInit(type): def __init__(cls, name, bases, dct): cls._add_class_devs() type.__init__(cls, name, bases, dct) #TODO: maybe override classmethod, automatically call _add_class_devs for all devices... ####################################################### ## Base Instrument ####################################################### # Async behavior changed 2015-06-03 # Before, the device would select either trig or delay # trig would use triggering, delay would use async_delay # If multiple device used both, they would both be turned on # and run_and_wait would only ever use trig, never async_delay # That was never really used and did not provide flexibility # like for devices that can sometimes need one or the other # or making run_and_wait behave like async for delay # Now, to improve the situation, I removed the option of # delay for devices. Device can only say they need triggerring # or not. They also use it when then need a delay. # async_delay is always respected for every and all devices, # and for both async and run_and_wait. It is used before the trig # For the wait option in a trig, we use async_wait device. # Finally the selection of whether to use a trigger or # a delay is left to _async_trig and _async_detect. # They both use information from _async_mode which should be # set by _async_select which is called in the async thread (init_list) # and by ReadvalDev class BaseInstrument(object): __metaclass__ = MetaClassInit alias = None # add _quiet_delete here in case we call __del__ before __init__ because of problem in subclass _quiet_delete = False def __init__(self, quiet_delete=False): self._quiet_delete = quiet_delete self.header_val = None self._lock_instrument = Lock_Instruments() if not hasattr(self, '_lock_extra'): # don't overwrite what is assigned in subclasses self._lock_extra = Lock_Extra() self._async_mode = 'wait' self._create_devs() self._async_local_data = threading.local() self._async_wait_check = True # The _async_statusLinecan be used in _async_detect to update the user # on the progress. self._async_statusLine = mainStatusLine.new(timed=True) self._last_force = time.time() self._conf_helper_cache = None # this is filled by conf_helper (should be under a locked state to prevent troubles) self.init(full=True) def __del__(self): if not self._quiet_delete: print 'Destroying '+repr(self) def _async_select(self, devs): """ It receives a list of devices to help decide how to wait. The list entries can be in the form (dev, option_dict) or just dev """ pass def _async_detect(self, max_time=.5): # subclasses should only call this if they need async_wait data = self._get_async_local_data() cur = time.time() left = data.async_wait - (cur - data.async_wait_start) if left <= 0.: return True if left <= max_time: sleep(left) return True sleep(max_time) return False @locked_calling def _async_trig(self): # subclasses can always call this self._async_statusLine.restart_time() data = self._get_async_local_data() if self._async_mode.startswith('wait'): self._async_wait_check_helper() data = self._get_async_local_data() data.async_wait_start = time.time() data.async_wait = self.async_wait.getcache() def _async_cleanup_after(self): # subclasses overides should call this. Called unconditionnaly after async/run_and_wait self._async_statusLine('') def _async_wait_check_helper(self): if self._async_wait_check and self.async_wait.getcache() == 0.: print self.perror('***** WARNING You should give a value for async_wait *****') self._async_wait_check = False @locked_calling def wait_after_trig(self): """ waits until the triggered event is finished """ try: ret = wait_on_event(self._async_detect) finally: self._async_cleanup_after() return ret # Always make sure that asyncThread run behaves in the same way @locked_calling def run_and_wait(self): """ This initiate a trigger and waits for it to finish. """ sleep(self.async_delay.getcache()) try: self._async_trig() self.wait_after_trig() finally: # in case we were stopped because of KeyboardInterrupt or something else. self._async_cleanup_after() def _get_async_local_data(self): d = self._async_local_data try: d.async_level except AttributeError: d.async_list = [] d.async_select_list = [] d.async_list_init = [] d.async_level = -1 d.async_counter = 0 d.async_task = None d.async_wait_start = 0. d.async_wait = 0. return d def _under_async_setup(self, task): self._async_running_task = task def _under_async(self): try: return self._async_running_task.is_alive() except AttributeError: return False def _get_async(self, async, obj, trig=False, **kwarg): # get_async should note change anything about the instrument until # we run the asyncThread. Should only change local thread data. # we are not protected by a lock until that. data = self._get_async_local_data() if async == -1: # we reset task if data.async_level > 1: data.async_task.cancel() data.async_level = -1 if async != 3 and not (async == 2 and data.async_level == -1) and ( async < data.async_level or async > data.async_level + 1): if data.async_level > 1: data.async_task.cancel() data.async_level = -1 raise ValueError, 'Async in the wrong order. Reseting order. Try again..' if async == 0: # setup async task if data.async_level == -1: # first time through data.async_list = [] data.async_select_list = [] data.async_list_init = [(self._async_select, (data.async_select_list, ), {})] delay = self.async_delay.getcache() data.async_task = asyncThread(data.async_list, self._lock_instrument, self._lock_extra, data.async_list_init, delay=delay) data.async_list_init.append((self._under_async_setup, (data.async_task,), {})) data.async_level = 0 if trig: data.async_task.change_detect(self._async_detect) data.async_task.change_trig(self._async_trig) data.async_task.change_cleanup(self._async_cleanup_after) data.async_list.append((obj.get, kwarg)) data.async_list.append((lambda: obj._last_filename, {})) data.async_select_list.append((obj, kwarg)) elif async == 1: # Start async task (only once) #print 'async', async, 'self', self, 'time', time.time() if data.async_level == 0: # First time through data.async_task.start() data.async_level = 1 elif async == 2: # Wait for task to finish #print 'async', async, 'self', self, 'time', time.time() if data.async_level == 1: # First time through (no need to wait for subsequent calls) wait_on_event(data.async_task) data.async_level = -1 data.async_counter = 0 elif async == 3: # get values #print 'async', async, 'self', self, 'time', time.time() #return obj.getcache() ret = data.async_task.results[data.async_counter] # Need to copy the _last_filename item because it is thread local self._last_filename = data.async_task.results[data.async_counter+1] data.async_counter += 2 if data.async_counter == len(data.async_task.results): # delete task so that instrument can be deleted del data.async_task del data.async_list del data.async_select_list del data.async_list_init del self._async_running_task return ret def find_global_name(self): return _find_global_name(self) @classmethod def _cls_devwrap(cls, name): # Only use this if the class will be using only one instance # Otherwise multiple instances will collide (reuse same wrapper) setdev = getdev = checkdev = getformat = None for s in dir(cls): if s == '_'+name+'_setdev': setdev = getattr(cls, s) if s == '_'+name+'_getdev': getdev = getattr(cls, s) if s == '_'+name+'_checkdev': checkdev = getattr(cls, s) if s == '_'+name+'_getformat': getformat = getattr(cls, s) wd = cls_wrapDevice(setdev, getdev, checkdev, getformat) setattr(cls, name, wd) def _getdev_para_checked(self, *val): """ This function should be called in a _getdev (devwrap with get_has_check option enabled) after the parameters have been checked for validity. When in CHECKING only mode, this will skip the rest of the function. you should call this with one parameter (passed to exception) or no parameters When a parameter is given, it will be used as the get value (and cached) """ get_para_checked(*val) def _devwrap(self, name, **extrak): setdev = getdev = checkdev = getformat = None cls = type(self) for s in dir(self): if s == '_'+name+'_setdev': setdev = getattr(cls, s) if s == '_'+name+'_getdev': getdev = getattr(cls, s) if s == '_'+name+'_checkdev': checkdev = getattr(cls, s) if s == '_'+name+'_getformat': getformat = getattr(cls, s) wd = cls_wrapDevice(setdev, getdev, checkdev, getformat, **extrak) setattr(self, name, wd) def devs_iter(self): for devname in dir(self): obj = getattr(self, devname) if devname != 'alias' and isinstance(obj, BaseDevice): yield devname, obj def _create_devs_helper(self, once=False): """ Users can call this function after creating new device for an instrument that already exists. It will properly initialize the new devices. The user might call it with once=True. """ # if instrument had a _current_config function and the device does # not specify anything for header in its format string than # we assign it. # # need the ProxyMethod to prevent binding which blocks __del__ if hasattr(self, '_current_config'): conf = ProxyMethod(self._current_config) else: conf = None for devname, obj in self.devs_iter(): if once and obj.instr is not None: continue obj.instr = weakref.proxy(self) obj.name = devname if conf and not obj._format['header']: obj._format['header'] = conf for devname, obj in self.devs_iter(): # some device depend on others. So finish all initialization before delayed_init obj._delayed_init() def _create_devs(self): # devices need to be created here (not at class level) # because we want each instrument instance to use its own # device instance (otherwise they would share the instance data) self.async_delay = MemoryDevice(0., doc= "In seconds. This is the delay before the trigger in async and run_and_wait.") self.async_wait = MemoryDevice(0., doc= "In seconds. This is the wait time after a trig for devices that don't use a real trig/detect sequence.") self._async_base_dev = MemoryDevice(0, doc="internal dummy device used for triggering", trig=True, autoinit=False, choices=[0]) self.run_and_wait_dev = ReadvalDev(self._async_base_dev, doc="This is a dummy device to be used when requiring a trigger (run_and_wait) from the instrument.") self._devwrap('header') self._create_devs_helper() # def _current_config(self, dev_obj, get_options): # pass def _conf_helper(self, *devnames, **kwarg): """ The positional arguments are either device name strings or a dictionnary. When given a dictionnary, it will be shown as options. no_default: when True, skips adding some default entries (like idn) It can only be a kwarg. if not given, it behaves as True unless one of the options is a dictionnary, the it behaves as False. So for the default use of _conf_helper were only one the calls includes the options dictionnary (and there is always one), then there is no need to specify this values. The default behavior is correct. """ ret = [] no_default = kwarg.pop('no_default', None) if len(kwarg): raise InvalidArgument('Invalid keyword arguments %s'%kwarg) if no_default is None: no_default = True for devname in devnames[::-1]: # start from the end if isinstance(devname, dict): no_default = False # by default we will append add_to = lambda base, x: base.append(x) if isinstance(devnames[-1], dict): # unless last item is a dict then we insert before it add_to = lambda base, x: base.insert(-1, x) if not no_default: async_delay = self.async_delay.getcache() if async_delay != 0: devnames = list(devnames) # need to convert from tuple to a mutable list add_to(devnames, 'async_delay') for devname in devnames: if isinstance(devname, dict): val = repr(devname) devname = 'options' else: try: val = _repr_or_string(getattr(self, devname).getcache()) except AttributeError: val = _repr_or_string(getattr(self, devname)()) ret.append('%s=%s'%(devname, val)) if not no_default: add_to(ret, 'class_name="%s"'%self.__class__.__name__) add_to(ret, 'idn="%s"'%self.idn()) self._conf_helper_cache = no_default, add_to return ret def read(self): raise NotImplementedError, self.perror('This instrument class does not implement read') def write(self, val): raise NotImplementedError, self.perror('This instrument class does not implement write') def ask(self, question): raise NotImplementedError, self.perror('This instrument class does not implement ask') def ask_write(self, command): """ Automatically selects between ask or write depending on the presence of a ? """ if '?' in command: return self.ask(command) else: self.write(command) def init(self, full=False): """ Do instrument initialization (full=True)/reset (full=False) here """ # Your function should try and not interfere with another thread/process # already using the instrument (if it is allowed). So it should only set things # to values that should not change afterwards, or reset things that are protected # with locks pass # This allows instr.get() ... to be redirected to instr.alias.get() def __getattr__(self, name): if name in ['get', 'set', 'check', 'getcache', 'setcache', 'instr', 'name', 'getformat', 'getasync', 'getfullname']: if self.alias is None: raise AttributeError, self.perror('This instrument does not have an alias for {nm}', nm=name) return getattr(self.alias, name) else: raise AttributeError, self.perror('{nm} is not an attribute of this instrument', nm=name) def __call__(self): if self.alias is None: raise TypeError, self.perror('This instrument does not have an alias for call') return self.alias() @locked_calling def force_get(self): """ Rereads all devices that have autoinit=True This should be called when a user might have manualy changed some settings on an instrument. It is limited to once per 2 second. """ if time.time()-self._last_force < 2: # less than 2s since last force, skip it return l = [] for s, obj in self.devs_iter(): if obj._autoinit: l.append( (float(obj._autoinit), obj) ) l.sort(reverse=True) for flag,obj in l: try: obj.get() except InvalidAutoArgument: pass self._last_force = time.time() @locked_calling def iprint(self, force=False): poptions = np.get_printoptions() if force: self.force_get() ret = '' np.set_printoptions(threshold=50) for s, obj in self.devs_iter(): if obj is self._async_base_dev: continue if self.alias == obj: ret += 'alias = ' val = obj.getcache() ret += s+" = "+repr(val)+"\n" np.set_printoptions(**poptions) return ret def idn(self): """ This method should return a string that uniquely identify the instrument. For scpi it is often: <company name>,<model number>,<serial number>,<firmware revision> """ return "Undefined identification,X,0,0" def idn_split(self): idn = self.idn() parts = idn.split(',', 4) # There could be , in serial firmware revision # I also use lstrip because some device put a space after the comma. return dict(vendor=parts[0], model=parts[1].lstrip(), serial=parts[2].lstrip(), firmware=parts[3].lstrip()) def _info(self): return self.find_global_name(), self.__class__.__name__, id(self) def __repr__(self): gn, cn, p = self._info() return '%s = <"%s" instrument at 0x%08x>'%(gn, cn, p) def perror(self, error_str='', **dic): dic.update(instr=self, gname=self.find_global_name()) return ('{gname}: '+error_str).format(**dic) def _header_getdev(self): if self.header_val is None: return self.find_global_name() else: return self.header_val def _header_setdev(self, val): self.header_val = val @classmethod def _add_class_devs(cls): pass def trigger(self): pass def lock_force_release(self): self._lock_instrument.force_release() self._lock_extra.force_release() def lock_is_owned(self): return self._lock_instrument.is_owned() or self._lock_extra.is_owned() def _lock_acquire(self): self._lock_instrument.acquire() self._lock_extra.acquire() def _lock_release(self): self._lock_instrument.release() self._lock_extra.release() ####################################################### ## Memory device ####################################################### class MemoryDevice(BaseDevice): def __init__(self, initval=None, **kwarg): """ Provides _tostr and _fromstr using the choices functions if choices are given. Otherwise it uses the type of initval. autoinit and setget are disabled internally (they are useless for a Memory device.) """ kwarg['autoinit'] = False kwarg['setget'] = False kwarg['get_has_check'] = True BaseDevice.__init__(self, **kwarg) self.setcache(initval, nolock=True) self._setdev_p = True # needed to enable BaseDevice set in checking mode and also the check function self._getdev_p = True # needed to enable BaseDevice get in Checking mode if self.choices is not None and isinstance(self.choices, ChoiceBase): self.type = self.choices else: self.type = type(initval) def _getdev(self): self._get_para_checked() # This is not necessary, since in CHECKING we will read the cache anyway # but place it here as an example and to test the code. return self.getcache() def _setdev(self, val): self.setcache(val) def _tostr(self, val): # This function converts from val to a str for the command t = self.type return _tostr_helper(val, t) def _fromstr(self, valstr): # This function converts from the query result to a value t = self.type return _fromstr_helper(valstr, t) def _tostr_helper(val, t): # This function converts from val to a str for the command if t == bool: # True= 1 or ON, False= 0 or OFF return str(int(bool(val))) if t == float or t == int: # use repr instead of str to keep full precision return repr(val) if t == complex: return '%r,%r'%(val.real, val.imag) if t is None or (type(t) == type and issubclass(t, basestring)): return val return t.tostr(val) def _fromstr_helper(valstr, t): # This function converts from the query result to a value if t == bool: # it is '0' or '1' return bool(int(valstr)) #if t == bool: # it is '0' or '1' or ON or OFF #try: # return bool(int(valstr)) #except ValueError: # if valstr.upper() == 'ON': # return True # elif valstr.upper() == 'OFF': # return False # else: # raise if t == float or t == int: return t(valstr) if t == complex: vals = valstr.split(',') vals = map(float, vals) return complex(*vals) if t is None or (type(t) == type and issubclass(t, basestring)): return valstr return t(valstr) def _get_dev_min_max(instr, ask_str, str_type=float, ask='both'): """ ask_str is the question string. ask can be both, min or max. It always returns a tuple (min, max). If the value was not obtained it will be None See also dev._get_dev_min_max, instr._get_dev_min_max """ if ask not in ['both', 'min', 'max']: raise ValueError('Invalid ask in _get_dev_min_max') min = max = None if ask in ['both', 'min']: min = _fromstr_helper(instr.ask(ask_str+' min'), str_type) if ask in ['both', 'max']: max = _fromstr_helper(instr.ask(ask_str+' max'), str_type) return min, max ####################################################### ## SCPI device ####################################################### class scpiDevice(BaseDevice): _autoset_val_str = ' {val}' def __init__(self,setstr=None, getstr=None, raw=False, chunk_size=None, autoinit=True, autoget=True, get_cached_init=None, str_type=None, choices=None, doc='', auto_min_max=False, options={}, options_lim={}, options_apply=[], options_conv={}, extra_check_func=None, extra_set_func=None, extra_set_after_func=None, ask_write_opt={}, **kwarg): """ str_type can be float, int, None If choices is a subclass of ChoiceBase, then str_Type will be set to that object if unset. If only getstr is not given and autoget is true a getstr is created by appending '?' to setstr. If autoget is false and there is no getstr, autoinit is set to False. When autoget is false, if get_cached_init is not None, then the cache is used instead of get and is initialized to the value of get_cached_init. You probably should initialize it during the instrument init. auto_min_max can be False, True, 'min' or 'max'. True will do both 'min' and 'max'. It will call the getstr with min, max to obtain the limits. raw when True will use read_raw instead of the default raw (in get) chunk_size is the option for ask. options is a list of optional parameters for get and set. It is a dictionnary, where the keys are the option name and the values are the default value for each option. If the value is a device. Then by default the cache of the device is used. An option like 'ch' can be used in the setstr/getstr parameter as {ch} (see string.format) For the setstr string you can use {val} to specify the position of the value, otherwise ' {val}' is automatically appended. Note that if specify {val} in the setstr, autoget is disabled. options_lim is dict of the range of values: It can be -None (the default) which means no limit -a tuple of (min, max) either one can be None to be unset -a list of choices (the object needs to handle __contains__) options_conv is a dict of functions to convert the value to a useful format. the functions receives 2 parameters (val, _tostr(val)) options_apply is a list of options that need to be set. In that order when defined. By default, autoinit=True is transformed to 10 (higher priority) unless options contains another device, then it is set to 1. extra_check_func, extra_set_func, extra_set_after_func are all functions called before or after (when in the name) the internal implementation proceeds. They allow modification of the default behavior (useful for more complicated range check). There signatures are: extra_check_func(val, dev_obj) can return "__SKIP__NEXT__" to jump the internal implementation extra_set_func(val, dev_obj, **kwargs) can return "__SKIP__NEXT__" to jump the internal implementation extra_set_after_func(val, dev_obj, **kwargs) ask_write_options are options passed to the ask and write methods """ if setstr is None and getstr is None: raise ValueError, 'At least one of setstr or getstr needs to be specified' if auto_min_max not in [False, True, 'min', 'max']: raise ValueError('Invalid auto_min_max values') self._auto_min_max = auto_min_max if setstr is not None and getstr is None and autoget == False: # we don't have get, so we remove autoinit to prevent problems with cache and force_get (iprint) autoinit = False if isinstance(choices, ChoiceBase) and str_type is None: str_type = choices if autoinit == True: autoinit = 10 test = [ True for k,v in options.iteritems() if isinstance(v, BaseDevice)] if len(test): autoinit = 1 BaseDevice.__init__(self, doc=doc, autoinit=autoinit, choices=choices, get_has_check=True, **kwarg) self._setdev_p = setstr if setstr is not None: fmtr = string.Formatter() val_present = False for txt, name, spec, conv in fmtr.parse(setstr): if name == 'val': val_present = True autoget = False if not val_present: self._setdev_p = setstr + self._autoset_val_str self._getdev_cache = False if getstr is None: if autoget: getstr = setstr+'?' elif get_cached_init is not None: self.setcache(get_cached_init, nolock=True) self._getdev_cache = True getstr = True self._getdev_p = getstr self._options = options self._options_lim = options_lim self._options_apply = options_apply self._options_conv = options_conv self._ask_write_opt = ask_write_opt self._extra_check_func = extra_check_func self._extra_set_func = extra_set_func self._extra_set_after_func = extra_set_after_func self.type = str_type self._raw = raw self._chunk_size = chunk_size self._option_cache = {} def _delayed_init(self): """ This is called after self.instr is set """ auto_min_max = self._auto_min_max if auto_min_max in ['min', 'max']: self._auto_set_min_max(auto_min_max) elif auto_min_max: self._auto_set_min_max() super(scpiDevice, self)._delayed_init() def _auto_set_min_max(self, ask='both'): mnmx = self._get_dev_min_max(ask) self._set_dev_min_max(*mnmx) @locked_calling_dev def _get_dev_min_max(self, ask='both'): """ ask can be both, min or max. It always returns a tuple (min, max). If the value was not obtained it will be None. See also instr._get_dev_min_max """ options = self._combine_options() command = self._getdev_p command = command.format(**options) return _get_dev_min_max(self.instr, command, self.type, ask) def _set_dev_min_max(self, min=None, max=None): if min is not None: self.min = min if max is not None: self.max = max def _get_docstring(self, added=''): # we don't include options starting with _ if len(self._options) > 0: added += '---------- Optional Parameters\n' for optname, optval in self._options.iteritems(): basedev = False if isinstance(optval, BaseDevice): basedev = True if optname[0] != '_': added += '{optname}: has default value {optval!r}\n'.format(optname=optname, optval=optval) lim = self._options_lim.get(optname, None) if lim is not None: if basedev: added += ' current choices (above device): ' else: added += ' current choices: ' if isinstance(lim, tuple): if lim[0] is None and lim[1] is None: added += 'any value allowed' else: if lim[0] is not None: added += '%r <= '%lim[0] added += '%s'%optname if lim[1] is not None: added += ' <= %r'%lim[1] else: added += repr(lim) added += '\n' return super(scpiDevice, self)._get_docstring(added=added) def _tostr(self, val): # This function converts from val to a str for the command t = self.type return _tostr_helper(val, t) def _fromstr(self, valstr): # This function converts from the query result to a value t = self.type return _fromstr_helper(valstr, t) def _get_option_values(self, extradict={}): opt = self._options.copy() d = {k:v.getcache() for k, v in opt.iteritems() if isinstance(v, BaseDevice)} opt.update(d) opt.update(extradict) return opt @locked_calling_dev def getcache(self, local=False): if local: return super(scpiDevice, self).getcache(local=True) #we need to check if we still are using the same options curr_cache = self._get_option_values() if self._option_cache != curr_cache: self.setcache(None) return super(scpiDevice, self).getcache() def _check_option(self, option, val): """ Checks the option with value val If it is not an option, raise an KeyError If it is not within min/max or choices for this option, returns an error string If everything is fine, return None """ if option not in self._options.keys(): raise KeyError, self.perror('This device does not handle option "%s".'%option) lim = self._options_lim.get(option) # if no limits were given but this is a device, use the limits from the device. # TODO use dev.check (trap error) if lim is None and isinstance(self._options[option], BaseDevice): dev = self._options[option] lim = (dev.min, dev.max) if dev.choices is not None: lim = dev.choices return self._general_check(val, lims=lim, msg_src='Option "%s"'%option, str_return=True) def _combine_options(self, **kwarg): # get values from devices when needed. # The list of correct values could be a subset so push them to kwarg # for testing. # clean up kwarg by removing all None values kwarg = { k:v for k, v in kwarg.iteritems() if v is not None} for k, v in kwarg.iteritems(): ck = self._check_option(k, v) if ck is not None: # in case of error, raise it raise InvalidArgument(ck) # Some device need to keep track of current value so we set them # if changed for k in self._options_apply: if k in kwarg.keys(): v = kwarg[k] opt_dev = self._options[k] if opt_dev.getcache() != v: opt_dev.set(v) # Now get default values and check them if necessary options = self._get_option_values(kwarg) for k,v in options.iteritems(): if k not in kwarg: ck = self._check_option(k, v) if ck is not None: # There was an error, returned value not currently valid # so return it instead of dictionnary raise InvalidAutoArgument(ck) # everything checks out so use those kwarg options.update(kwarg) self._option_cache = options.copy() for k in options.iterkeys(): val = options[k] option_dev = self._options[k] option_lim = self._options_lim.get(k, None) if isinstance(option_dev, BaseDevice): try: tostr_val = option_dev._tostr(val) except AttributeError: # Some devices like BaseDevice, cls_WrapDevice don't have _tostr tostr_val = repr(val) #elif isinstance(option_lim, ChoiceBase): elif option_lim is not None: try: tostr_val = option_lim.tostr(val) except AttributeError: tostr_val = repr(val) else: tostr_val = repr(val) try: conv = self._options_conv[k] except KeyError: options[k] = tostr_val else: options[k] = conv(val, tostr_val) return options def _setdev(self, val): # We only reach here if self._setdev_p is not None if self._extra_set_func: if self._extra_set_func(val, self) == "__SKIP__NEXT__": return val = self._tostr(val) options = self._check_cache['options'] command = self._setdev_p command = command.format(val=val, **options) self.instr.write(command, **self._ask_write_opt) if self._extra_set_after_func: self._extra_set_after_func(val, self) def _getdev(self, **kwarg): if self._getdev_cache: if kwarg == {}: return self.getcache() else: raise SyntaxError, self.perror('This device does not handle _getdev with optional arguments') try: options = self._combine_options(**kwarg) except InvalidAutoArgument: self.setcache(None) raise command = self._getdev_p command = command.format(**options) self._get_para_checked() ret = self.instr.ask(command, raw=self._raw, chunk_size=self._chunk_size, **self._ask_write_opt) return self._fromstr(ret) def _checkdev(self, val, **kwarg): options = self._combine_options(**kwarg) # all kwarg have been tested self._check_cache['set_kwarg'] = {} self._check_cache['options'] = options if self._extra_check_func: if self._extra_check_func(val, self) == "__SKIP__NEXT__": return super(scpiDevice, self)._checkdev(val) ####################################################### ## Readval device ####################################################### class ReadvalDev(BaseDevice): def _get_docstring(self, added=''): if not self._do_local_doc: return super(ReadvalDev, self)._get_docstring(added=added) else: basename = self._slave_dev.name subdoc = self._slave_dev.__doc__ doc = """ This device behaves like doing a run_and_wait followed by a {0}. When in async mode, it will trigger the device and then do the {0}. It has the same parameters as the {0} device, so look for its documentation. It is appended here for convenience: --- {0} doc {1} """.format(basename, subdoc) return doc def __init__(self, dev, autoinit=None, doc=None, **kwarg): if doc is None: self._do_local_doc = True else: self._do_local_doc = False self._slave_dev = dev if autoinit is None: autoinit = dev._autoinit super(ReadvalDev,self).__init__(redir_async=dev, autoinit=autoinit, doc=doc, get_has_check=True, **kwarg) self._getdev_p = True def _getdev(self, **kwarg): self.instr._async_select([(self._slave_dev, kwarg)]) self.instr.run_and_wait() ret = self._slave_dev.get(**kwarg) self._last_filename = self._slave_dev._last_filename return ret def getformat(self, **kwarg): d = self._slave_dev.getformat(**kwarg) d['obj'] = self return d def _decode_block_header(s): """ Takes a string with the scpi block header #niiiiivvvvvvvvvv where n gives then number of i and i gives the number of bytes v It returns slice, nbytes, nheaders i.e. a slice on the str to return the data a value for the number of bytes and a value for the length of the header If the strings does not start with a block format returns a full slice (:), nbytes=-1, 0 """ if len(s)==0 or s[0] != '#': return slice(None), -1, 0 nh = int(s[1]) if nh: # a value of 0 is possible nbytes = int(s[2:2+nh]) else: nbytes = -1 # nh=0 is used for unknown length or lengths that require more than 9 digits. return slice(2+nh, None), nbytes, 2+nh def _decode_block_base(s, skip=None): sl, nb, nh = _decode_block_header(s) block = s[sl] lb = len(block) if nb != -1: if lb < nb : raise IndexError, 'Missing data for decoding. Got %i, expected %i'%(lb, nb) elif lb > nb : if lb-nb == 1 and (s[-1] in '\r\n'): return block[:-1] elif lb-nb == 2 and s[-2:] == '\r\n': return block[:-2] raise IndexError, 'Extra data in for decoding. Got %i ("%s ..."), expected %i'%(lb, block[nb:nb+10], nb) elif skip: if isinstance(skip, basestring): if block.endswith(skip): return block[:-len(skip)] else: raise RuntimeError('Data is not terminated by requested skip string.') else: return block[:-skip] return block def _encode_block_base(s): """ This inserts the scpi block header before the string start. see _decode_block_header for the description of the header """ N = len(s) N_as_string = str(N) N_as_string_len = len(N_as_string) if N_as_string_len >= 10: # starting at 1G header = '#0' else: header = '#%i'%N_as_string_len + N_as_string return header+s def _decode_block(s, t='<f8', sep=None, skip=None): """ sep can be None for binaray encoding or ',' for ascii csv encoding type can be np.float64 float32 int8 int16 int32 uint8 uint16 ... or it can be entered as a string like 'float64' skip can be used when the data length is unknown (#0....) then you can enter the termination string you want removed from the end, or an integer of the number of character to remove from the end. """ block = _decode_block_base(s, skip=skip) if sep is None or len(block) == 0: return np.fromstring(block, t) return np.fromstring(block, t, sep=sep) def _encode_block(v, sep=None): """ Encodes the iterable v (array, list ..., or just a string) into either a scpi binary block (including header) when sep=None (default) or into a sep separated string. Often sep is ',' for scpi """ if sep is not None: return ','.join(map(repr, v)) if isinstance(v, basestring): s = v else: s = np.asarray(v).tostring() return _encode_block_base(s) def _decode_block_auto(s, t='<f8', skip=None): if len(s) and s[0] == '#': sep = None else: sep = ',' return _decode_block(s, t, sep=sep, skip=skip) class Block_Codec(object): def __init__(self, dtype='<f8', sep=None, skip=None, single_not_array=False, empty=None): self._dtype = dtype self._sep = sep self._skip = skip self._single_not_array = single_not_array self._empty = empty def __call__(self, input_str): ret = _decode_block(input_str, self._dtype, self._sep, self._skip) empty = self._empty if empty is not None and len(ret) == 0: ret = np.append(ret, empty) if len(ret) == 1: ret = ret[0] return ret def tostr(self, array): dtype = self._dtype if isinstance(array, (int, long, float)): array = np.array([array], dtype=dtype) elif isinstance(array, (list, tuple)): array = np.array(array, dtype=dtype) if array.dtype != self._dtype: array = array.astype(self._dtype) return _encode_block(array, self._sep) class Block_Codec_Raw(object): def __init__(self, dtype='<f8', sep=None): self._dtype = dtype def __call__(self, input_str): return np.fromstring(input_str, self._dtype) def tostr(self, array): return array.tostring() decode_float64 = functools.partial(_decode_block_auto, t='<f8') # np.float64, little-endian decode_float32 = functools.partial(_decode_block_auto, t='<f4') # np.float32, little-endian decode_uint32 = functools.partial(_decode_block_auto, t='<u4') # np.uint32, little-endian decode_uint8_bin = functools.partial(_decode_block, t=np.uint8) decode_uint16_bin = functools.partial(_decode_block, t='<u2') # np.uint16, little-endian decode_int32 = functools.partial(_decode_block_auto, t='<i4') # np.int32, little-endian decode_int8 = functools.partial(_decode_block_auto, t=np.int8) decode_int16 = functools.partial(_decode_block_auto, t='<i2') # np.int16, little-endian decode_complex128 = functools.partial(_decode_block_auto, t='<c16') # np.complex128, little-endian def decode_float64_2col(s): v = _decode_block_auto(s, t='<f8') v.shape = (-1,2) return v.T def decode_float64_avg(s): return _decode_block_auto(s, t='<f8').mean() def decode_float64_std(s): return _decode_block_auto(s, t='<f8').std(ddof=1) def decode_float64_meanstd(s): data = _decode_block_auto(s, t='<f8') return data.std(ddof=1)/np.sqrt(len(data)) class scaled_float(object): def __init__(self, scale): """ scale is used as: python_val = instrument_val * scale instrument_val = python_val / scale """ self.scale = scale def __call__(self, input_str): return _fromstr_helper(input_str, float)*self.scale def tostr(self, val): return _tostr_helper(val/self.scale, float) class quoted_string(object): def __init__(self, quote_char='"', quiet=False, tostr=True, fromstr=True): """ tostr, fromstr: are True to enable the quote adding/removal. They can also be a string to use a different quote_char """ self._quote_char = quote_char self._quiet = quiet self._fromstr = fromstr self._tostr = tostr def __call__(self, quoted_str): if not self._fromstr: return quoted_str if isinstance(self._fromstr, basestring): quote_char = self._fromstr else: quote_char = self._quote_char if len(quoted_str) and quote_char == quoted_str[0] and quote_char == quoted_str[-1]: return quoted_str[1:-1] else: if not self._quiet: print 'Warning, string <%s> does not start and end with <%s>'%(quoted_str, quote_char) return quoted_str def tostr(self, unquoted_str): if not self._tostr: return unquoted_str if isinstance(self._tostr, basestring): quote_char = self._tostr else: quote_char = self._quote_char if quote_char in unquoted_str: raise ValueError, 'The given string already contains a quote :%s:'%quote_char return quote_char+unquoted_str+quote_char class quoted_list(quoted_string): def __init__(self, sep=',', element_type=None, protect_sep=None, **kwarg): super(quoted_list,self).__init__(**kwarg) self._sep = sep # element_type can be a list of types. If it is not long enough for the input # it is repeated. self._element_type = element_type self._protect_sep = protect_sep def calc_element_type(self, input_list_len): elem_type = self._element_type if elem_type is not None: N = input_list_len if isinstance(elem_type, list): Nt = len(elem_type) if N%Nt != 0: raise RuntimeError('Unexpected number of elements') elem_type = elem_type*(N//Nt) else: elem_type = [elem_type]*N return elem_type def __call__(self, quoted_l, skip_type=False): unquoted = super(quoted_list,self).__call__(quoted_l) if self._protect_sep is not None: start_sym, end_sym = self._protect_sep lst = [] s = 0 i = 0 while i<len(unquoted): # skip a start_sym to end_sym region c = unquoted[i] if c in start_sym: ind = start_sym.find(c) i = unquoted.find(end_sym[ind],i+1) if i == -1: i = len(unquoted) break elif c in self._sep: lst.append(unquoted[s:i]) i += 1 s = i else: i += 1 lst.append(unquoted[s:]) else: lst = unquoted.split(self._sep) if self._element_type is not None and not skip_type: elem_type = self.calc_element_type(len(lst)) lst = [_fromstr_helper(elem, et) for elem, et in zip(lst, elem_type)] return lst def tostr(self, unquoted_l, skip_type=False): if self._element_type is not None and not skip_type: elem_type = self.calc_element_type(len(unquoted_l)) unquoted_l = [_tostr_helper(elem, et) for elem,et in zip(unquoted_l, elem_type)] unquoted = self._sep.join(unquoted_l) return super(quoted_list,self).tostr(unquoted) class quoted_dict(quoted_list): def __init__(self, empty='NO CATALOG', **kwarg): super(quoted_dict,self).__init__(**kwarg) self._empty = empty def __call__(self, quoted_l): l = super(quoted_dict,self).__call__(quoted_l, skip_type=True) if l == [self._empty]: return OrderedDict() l = super(quoted_dict,self).__call__(quoted_l) return OrderedDict(zip(l[0::2], l[1::2])) def tostr(self, d): skip_type = False if d == {}: l = [self._empty] skip_type = True else: l = [] for k,v in d.iteritems(): l.extend([k ,v]) return super(quoted_dict,self).tostr(l, skip_type=skip_type) # NOTE: a choice function tostr and __call__ (fromstr) # is used when not specifying the str_type to scpi_device # and when it is used as an option device for scpi_device (to obtain # the string replacement for the command/question) # Therefore even if you override the functions (by defining str_type) # they could still be used if they are within options. # Therefore it is recommended to make them work all the time # (this might require passing in a type during __init__) # See ChoiceDevDep for example class ChoiceBase(object): def __call__(self, input_str): raise NotImplementedError, 'ChoiceBase subclass should overwrite __call__' def tostr(self, val): raise NotImplementedError, 'ChoiceBase subclass should overwrite __tostr__' def __repr__(self): raise NotImplementedError, 'ChoiceBase subclass should overwrite __repr__' def __contains__(self, val): raise NotImplementedError, 'ChoiceBase subclass should overwrite __contains__' class ChoiceLimits(ChoiceBase): """ This ChoiceBase implements a min/max check. """ def __init__(self, min=None, max=None, str_type=None): self.min = min self.max = max self.str_type = str_type def __call__(self, input_str): return _fromstr_helper(input_str, self.str_type) def tostr(self, val): return _tostr_helper(val, self.str_type) def __contains__(self, val): try: _general_check(val, min=self.min, max=self.max) except ValueError: return False else: return True def __repr__(self): if self.min is None and self.max is None: return 'Limits: any val' elif self.min is None: return 'Limits: val <= %s'%self.max elif self.max is None: return 'Limits: %s <= val'%self.min else: return 'Limits: %s <= val <= %s'%(self.min, self.max) class ChoiceStrings(ChoiceBase): """ Initialize the class with a list of strings s=ChoiceStrings('Aa', 'Bb', ..) then 'A' in s or 'aa' in s will return True irrespective of capitalization. if no_short=True option is given, then only the long names are allowed The elements need to have the following format: ABCdef where: ABC is known as the short name and abcdef is known has the long name. When using in or searching with index method both long and short names are looked for normalizelong and normalizeshort return the above (short is upper, long is lower) Long and short name can be the same. redirects option is a dictionnary of input strings to some other element. It can be useful for device that list ON or 1 as possible values. use it like {'1': 'ON'} """ def __init__(self, *values, **kwarg): # use **kwarg because we can't have keyword arguments after *arg self.quotes = kwarg.pop('quotes', False) no_short = kwarg.pop('no_short', False) self.redirects = kwarg.pop('redirects', {}) if kwarg != {}: raise TypeError, 'ChoiceStrings only has quotes=False and no_short=False as keyword arguments' self.values = values self.long = [v.lower() for v in values] if no_short: self.short = self.long else: self.short = [v.translate(None, string.ascii_lowercase).lower() for v in values] # for short having '', use the long name instead # this happens for a string all in lower cap. self.short = [s if s!='' else l for s,l in zip(self.short, self.long)] def __contains__(self, x): # performs x in y; with y=Choice() xl = x.lower() inshort = xl in self.short inlong = xl in self.long return inshort or inlong def index(self, value): xl = value.lower() try: return self.short.index(xl) except ValueError: pass return self.long.index(xl) def normalizelong(self, x): return self.long[self.index(x)] def normalizeshort(self, x): return self.short[self.index(x)].upper() def __call__(self, input_str): # this is called by dev._fromstr to convert a string to the needed format input_str = self.redirects.get(input_str, input_str) if self.quotes: if input_str[0] != '"' or input_str[-1] != '"': raise ValueError, 'The value --%s-- is not quoted properly'%input_str return self.normalizelong(input_str[1:-1]) return self.normalizelong(input_str) def tostr(self, input_choice): # this is called by dev._tostr to convert a choice to the format needed by instrument if self.quotes: return '"%s"'%input_choice return input_choice # no need to change. Already a proper string. def __repr__(self): return repr(self.values) def __getitem__(self, index): # index can be a single value: return it # or it can be a slice or a list, return a new object with only the selected elements # the list can be numbers or strings (which finds number with index) if not isinstance(index, (slice, list)): return self.values[index] if isinstance(index, slice): return ChoiceStrings(*self.values[index], quotes=self.quotes) # we have a list values = [] for i in index: if isinstance(i, basestring): i = self.index(i) values.append(self.values[i]) return ChoiceStrings(*values, quotes=self.quotes) class ChoiceSimpleMap(ChoiceBase): """ Given a dictionnary where keys are what is used on the instrument, and the values are what are used on the python side. filter, when given, is a function applied to the input from the instrument. It can be used to normalize the input entries """ def __init__(self, input_dict, filter=None): self.dict = input_dict self.keys = input_dict.keys() self.values = input_dict.values() self.filter = filter if filter is not None: for x in self.keys: if filter(x) != x: raise ValueError, "The input dict has at least one key where filter(key)!=key." def __contains__(self, x): return x in self.values def __call__(self, input_key): if self.filter is not None: input_key = self.filter(input_key) return self.dict[input_key] def tostr(self, input_choice): return self.keys[self.values.index(input_choice)] def __repr__(self): return repr(self.values) Choice_bool_OnOff = ChoiceSimpleMap(dict(ON=True, OFF=False), filter=string.upper) Choice_bool_YesNo = ChoiceSimpleMap(dict(YES=True, NO=False), filter=string.upper) class ChoiceIndex(ChoiceBase): """ Initialize the class with a list of values or a dictionnary The instrument uses the index of a list or the key of the dictionnary which needs to be integers. If you want a dictionnary with keys that are strings see ChoiceSimpleMap. option normalize when true rounds up the float values for better comparison. Use it with a list created from a calculation. """ def __init__(self, list_or_dict, offset=0, normalize=False): self._normalize = normalize self._list_or_dict = list_or_dict if isinstance(list_or_dict, np.ndarray): list_or_dict = list(list_or_dict) if isinstance(list_or_dict, list): if self._normalize: list_or_dict = [self.normalize_N(v) for v in list_or_dict] self.keys = range(offset,offset+len(list_or_dict)) # instrument values self.values = list_or_dict # pyHegel values self.dict = dict(zip(self.keys, self.values)) else: # list_or_dict is dict if self._normalize: list_or_dict = {k:self.normalize_N(v) for k,v in list_or_dict.iteritems()} self.dict = list_or_dict self.keys = list_or_dict.keys() self.values = list_or_dict.values() @staticmethod def normalize_N(v): """ This transforms 9.9999999999999991e-06 into 1e-05 so can compare the result of a calcualtion with the theoretical one v can only be a single value Anything with +-1e-25 becomes 0. """ if abs(v) < 1e-25: return 0. return float('%.13e'%v) def index(self, val): if self._normalize: val = self.normalize_N(val) return self.values.index(val) def __getitem__(self, key): # negative indices will not work return self.dict[key] def __call__(self, input_str): # this is called by dev._fromstr to convert a string to the needed format val = int(input_str) return self[val] def tostr(self, input_choice): # this is called by dev._tostr to convert a choice to the format needed by instrument i = self.index(input_choice) return str(self.keys[i]) def __contains__(self, x): if self._normalize: x = self.normalize_N(x) return x in self.values def __repr__(self): return repr(self.values) class ChoiceDevDep(ChoiceBase): """ This class selects options from a dictionnary of lists or instances of ChoiceBase, based on the value of dev (match to the dictionnary keys). The keys can be values or and object that handles 'in' testing. A default choice can be given with a key of None sub_type is used to provide the proper from/to str converters. Works the same as str_type from scpi_device. if sub_type is None, it calls the to/from str of the selected value of the dictionnary (which should be an instance of ChoiceBase). """ def __init__(self, dev, choices, sub_type=None): self.choices = choices self.dev = dev self.sub_type = sub_type def _get_choice(self): val = self.dev.getcache() for k, v in self.choices.iteritems(): if isinstance(k, (tuple, ChoiceBase)) and val in k: return v elif val == k: return v return self.choices.get(None, []) def __call__(self, input_str): if self.sub_type: return _fromstr_helper(input_str, self.sub_type) else: return self._get_choice()(input_str) def tostr(self, input_choice): if self.sub_type: return _tostr_helper(input_choice, self.sub_type) else: return self._get_choice().tostr(input_choice) def __contains__(self, x): return x in self._get_choice() def __repr__(self): return repr(self._get_choice()) class ChoiceDev(ChoiceBase): """ Get the choices from a device Wether device return a dict or a list, it should work the same For a dict you can use keys or values (when keys fail) Indexing with one of the allowed values returns the value for list or the key/value pair for dict. For a list also using an integer is allowed, and it picks the nth value. sub_type is used to provide the proper from/to str converters. Works the same as str_type from scpi_device. sub_type=None (default) is the same as sub_type=str (i.e. no conversion). The tostr converter uses the key of the dict. """ def __init__(self, dev, sub_type=None): self.dev = dev self.sub_type = sub_type def _get_choices(self): return self.dev.getcache() def __call__(self, input_str): return _fromstr_helper(input_str, self.sub_type) def tostr(self, input_choice): choices = self._get_choices() ch = self[input_choice] if isinstance(choices, dict): ch = ch[0] return _tostr_helper(ch, self.sub_type) def __contains__(self, x): choices = self._get_choices() if isinstance(choices, dict): if x in choices.keys(): return True choices = choices.values() return x in choices def __getitem__(self, key): choices = self._get_choices() if key not in self and isinstance(choices, list): # key might be an integer return choices[key] if key in self: if isinstance(choices, dict): if key not in choices.keys() and key in choices.values(): key = [k for k,v in choices.iteritems() if v == key][0] return key, choices[key] else: return key raise IndexError, 'Invalid index. choose among: %r'%choices def __repr__(self): return repr(self._get_choices()) class ChoiceDevSwitch(ChoiceDev): """ Same as ChoiceDev but the value for set/check can also be something other (a type different than in_base_type), in which case the other_conv function should convert it to the in_base_type. """ def __init__(self, dev, other_conv, sub_type=None, in_base_type=basestring): self.other_conv = other_conv self.in_base_type = in_base_type super(ChoiceDevSwitch, self).__init__(dev, sub_type=sub_type) def cleanup_entry(self, x): if not isinstance(x, self.in_base_type): x = self.other_conv(x) return x def __getitem__(self, input_choice): input_choice = self.cleanup_entry(input_choice) return super(ChoiceDevSwitch, self).__getitem__(input_choice) def __contains__(self, input_choice): input_choice = self.cleanup_entry(input_choice) return super(ChoiceDevSwitch, self).__contains__(input_choice) def make_choice_list(list_values, start_exponent, end_exponent): """ given list_values=[1,3] start_exponent =-6 stop_expoenent = -3 produces [1e-6, 3e-6, 1e-5, 3e-5, 1e-4, 3e-4, 1e-3, 3e-3] """ powers = np.logspace(start_exponent, end_exponent, end_exponent-start_exponent+1) return (powers[:,None] * np.array(list_values)).flatten() class ChoiceMultiple(ChoiceBase): def __init__(self, field_names, fmts=int, sep=',', ending_sep=False, ending_sep_get=None, allow_missing_keys=False, reading_sep=None): """ This handles scpi commands that return a list of options like 1,2,On,1.34 We convert it into a dictionary to name and acces the individual parameters. fmts can be a single converter or a list of converters the same length as field_names A converter is either a type or a (type, lims) tuple where lims can be a tuple (min, max) with either one being None or a list/object of choices. Not that if you use a ChoiceBase object, you only need to specify it as the type. It is automatically used as a choice also. If one element of a list can affect the choices for a subsequent one, see ChoiceMultipleDev ending_sep, when True it adds (on writing) or remove (on reading) an extra sep at the end of the string ending_sep_get, when not None it overrides ending_sep for reading. allow_missing_keys when True, will not produce error for missing field_names when checking readding_sep when not None is used instead of sep when obtaining data from instrument. """ self.field_names = field_names if not isinstance(fmts, (list, np.ndarray)): fmts = [fmts]*len(field_names) fmts_type = [] fmts_lims = [] for f in fmts: if not isinstance(f, tuple): if isinstance(f, ChoiceBase): f = (f,f) else: f = (f, None) fmts_type.append(f[0]) fmts_lims.append(f[1]) self.fmts_type = fmts_type self.fmts_lims = fmts_lims self.sep = sep self.ending_sep_set = ending_sep self.ending_sep_get = ending_sep if ending_sep_get is not None: self.ending_sep_get = ending_sep_get self.reading_sep = sep if reading_sep is None else reading_sep self.allow_missing_keys = allow_missing_keys def __call__(self, fromstr): sep = self.reading_sep if self.ending_sep_get: if fromstr.endswith(sep): fromstr = fromstr[:-len(sep)] else: raise ValueError('Expected ending sep in class %s'%self.__class__.__name__) v_base = fromstr.split(sep) if len(v_base) != len(self.field_names): raise ValueError('Invalid number of parameters in class %s'%self.__class__.__name__) v_conv = [] names = [] for k, val, fmt in zip(self.field_names, v_base, self.fmts_type): if isinstance(fmt, ChoiceMultipleDep): fmt.set_current_vals(dict(zip(names, v_conv))) v_conv.append(_fromstr_helper(val, fmt)) names.append(k) return dict_improved(zip(self.field_names, v_conv), _freeze=True) def tostr(self, fromdict=None, **kwarg): # we assume check (__contains__) was called so we don't need to # do fmt.set_current_vals again or check validity if dictionnary keys if fromdict is None: fromdict = kwarg ret = [] for k, fmt in zip(self.field_names, self.fmts_type): v = fromdict[k] ret.append(_tostr_helper(v, fmt)) ret = self.sep.join(ret) if self.ending_sep_set: ret += self.sep return ret def __contains__(self, x): # performs x in y; with y=Choice(). Used for check # Returns True if everything is fine. # Otherwise raise a ValueError, a KeyError or a KeyError_Choices (for missing values) xorig = x x = x.copy() # make sure we don't change incoming dict for k, fmt, lims in zip(self.field_names, self.fmts_type, self.fmts_lims): if isinstance(fmt, ChoiceMultipleDep): fmt.set_current_vals(xorig) try: val = x.pop(k) # generates KeyError if k not in x except KeyError: if self.allow_missing_keys: continue raise KeyError_Choices('key %s is missing'%k) _general_check(val, lims=lims, msg_src='key %s'%k) if x != {}: raise KeyError('The following keys in the dictionnary are incorrect: %r'%x.keys()) return True def __repr__(self): r = '' first = True for k, lims in zip(self.field_names, self.fmts_lims): if not first: r += '\n' first = False r += 'key %s has limits %r'%(k, lims) return r class ChoiceMultipleDep(ChoiceBase): """ This class selects options from a dictionnary of lists or instances of ChoiceBase, based on the value of key (match to the dictionnary keys). It is similar to ChoiceDevDep but selects on a ChoiceMultiple element instead of a device. It can only be used as a type for a ChoiceMultiple element. The dictionnary keys can be values or and object that handles 'in' testing. A default choice can be given with a key of None sub_type is used to provide the proper from/to str converters. Works the same as str_type from scpi_device. if sub_type is None, it calls the to/from str of the selected value of the dictionnary (which should be an instance of ChoiceBase). Note that the dependent option currently requires the key to come before. i.e. if the base is {'a':1, 'B':2} then 'B' can depend on 'a' but not the reverse (the problem is with ChoiceMultiple __contains__, __call__ and tostr). """ def __init__(self, key, choices, sub_type=None): self.choices = choices self.key = key self.all_vals = {key:None} self.sub_type = sub_type def set_current_vals(self, all_vals): self.all_vals = all_vals def _get_choice(self): val = self.all_vals[self.key] for k, v in self.choices.iteritems(): if isinstance(k, (tuple, ChoiceBase)) and val in k: return v elif val == k: return v return self.choices.get(None, []) def __call__(self, input_str): if self.sub_type: return _fromstr_helper(input_str, self.sub_type) else: return self._get_choice()(input_str) def tostr(self, input_choice): if self.sub_type: return _tostr_helper(input_choice, self.sub_type) else: return self._get_choice().tostr(input_choice) def __contains__(self, x): return x in self._get_choice() def __repr__(self): return repr(self.choices) class Dict_SubDevice(BaseDevice): """ Use this to gain access to a single/multiple element of a device returning a dictionary from ChoiceMultiple. """ def __init__(self, subdevice, key, force_default=False, **kwarg): """ This device and the subdevice need to be part of the same instrument (otherwise async will not work properly) Here we will only modify the value of key in dictionary. key can be a single value, or a list of values (in which case set/get will work on a list) force_default, set the default value of force used in check/set. It can be True, False or 'slave' which means to let the subdevice handle the insertion of the missing parameters """ self._subdevice = subdevice self._sub_key = key self._force_default = force_default subtype = self._subdevice.type self._single_key = False if not isinstance(key, list): key = [key] self._single_key = True multi = False else: multi = key self._sub_key = key lims = [] for k in key: if k not in subtype.field_names: raise IndexError, "The key '%s' is not present in the subdevice"%k lims.append( subtype.fmts_lims[subtype.field_names.index(k)] ) self._sub_lims = lims setget = subdevice._setget autoinit = subdevice._autoinit trig = subdevice._trig get_has_check = True super(Dict_SubDevice, self).__init__( setget=setget, autoinit=autoinit, trig=trig, multi=multi, get_has_check=get_has_check, **kwarg) self._setdev_p = subdevice._setdev_p # needed to enable BaseDevice set in checking mode and also the check function self._getdev_p = True # needed to enable BaseDevice get in Checking mode def _get_docstring(self, added=''): # we don't include options starting with _ if self._single_key: added = """ This device set/get the '%s' dictionnary element of a subdevice. It uses the same options as that subdevice (%s) """%(self._sub_key[0], self._subdevice) else: added = """ This device set/get the '%s' dictionnary elements of a subdevice. It uses the same options as that subdevice (%s) """%(self._sub_key, self._subdevice) return super(Dict_SubDevice, self)._get_docstring(added=added) def setcache(self, val, nolock=False): if nolock: # no handled because getcache can lock raise ValueError('Dict_SubDevice setcache does not handle nolock=True') vals = self._subdevice.getcache() if vals is not None: vals = vals.copy() if self._single_key: val = [val] if len(self._sub_key) != len(val): raise ValueError('This Dict_SubDevice requires %i elements'%len(self._sub_key)) for k, v in zip(self._sub_key, val): vals[k] = v self._subdevice.setcache(vals) def getcache(self, local=False): if local: vals = self._subdevice.getcache(local=True) else: vals = self._subdevice.getcache() if vals is None: ret = None else: ret = [vals[k] for k in self._sub_key] if self._single_key: ret = ret[0] # Lets set the _cache variable anyway but it should never # be used. _cache should always be accessed with getcache and this will # bypass the value we set here. super(Dict_SubDevice, self).setcache(ret) return ret def _force_helper(self, force): if force is None: force = self._force_default return force def _checkdev(self, val, force=None, **kwarg): if self._single_key: val = [val] self._check_cache['cooked_val'] = val if len(self._sub_key) != len(val): raise ValueError(self.perror('This Dict_SubDevice requires %i elements'%len(self._sub_key))) # Lets check the parameters individually, in order to help the user with # a more descriptive message. for i, limv in enumerate(zip(self._sub_lims, val)): lim, v = limv msg_src = None if not self._single_key: msg_src = 'element %i'%i self._general_check(v, lims=lim, msg_src=msg_src) force = self._force_helper(force) allow = {True:True, False:'cache', 'slave':False}[force] self._check_cache['allow'] = allow op = self._check_cache['fnct_str'] # otherwise, the check will be done by set in _setdev below if op == 'check': # we need to complete the test as much as possible vals = {k:v for k, v in zip(self._sub_key, val)} if allow: vals = self._subdevice._set_missing_dict_helper(vals, _allow=allow, **kwarg) self._subdevice.check(vals, **kwarg) def _getdev(self, **kwarg): vals = self._subdevice.get(**kwarg) if vals is None: # When checking and value not initialized ret = [0] * len(self._sub_key) else: ret = [vals[k] for k in self._sub_key] if self._single_key: ret = ret[0] return ret def _setdev(self, val, force=None, **kwarg): """ force when True, it make sure to obtain the subdevice value with get. when False, it uses getcache. The default is in self._force_default """ val = self._check_cache['cooked_val'] allow = self._check_cache['allow'] vals = {k:v for k, v in zip(self._sub_key, val)} if allow: vals = self._subdevice._set_missing_dict_helper(vals, _allow=allow, **kwarg) self._subdevice.set(vals, **kwarg) class Lock_Visa(object): """ This handles the locking of the visa session. Once locked, this prevents any other visa session (same process or not) to the same instrument from communicating with it. It is a reentrant lock (release the same number of times as acquire to fully unlock). """ def __init__(self, vi): self._vi = vi self._count = 0 def _visa_lock(self, timeout=0.001): """ It returns True if the lock was acquired before timeout, otherwise it returns False """ timeout = max(int(timeout/1e-3),1) # convert from seconds to milliseconds try: if not CHECKING(): self._vi.lock_excl(timeout) except visa_wrap.VisaIOError as exc: if exc.error_code == visa_wrap.constants.VI_ERROR_TMO: # This is for Agilent IO visa library return False elif exc.error_code == visa_wrap.constants.VI_ERROR_RSRC_LOCKED: # This is for National Instruments visa library return False else: raise else: # we have lock self._count += 1 return True def release(self): if not CHECKING(): self._vi.unlock() # could produce VI_ERROR_SESN_NLOCKED else: if self._count < 1: raise visa_wrap.VisaIOError(visa_wrap.constants.VI_ERROR_SESN_NLOCKED) self._count -= 1 def acquire(self): return wait_on_event(self._visa_lock) __enter__ = acquire def __exit__(self, exc_type, exc_value, exc_traceback): self.release() def is_owned(self): return self._count != 0 def force_release(self): n = 0 expect = self._count try: while True: self.release() n += 1 except visa_wrap.VisaIOError as exc: if exc.error_code != visa_wrap.constants.VI_ERROR_SESN_NLOCKED: raise if n: print 'Released Visa lock', n, 'time(s) (expected %i releases)'%expect else: print 'Visa lock was not held (expected %i releases)'%expect self._count = 0 ####################################################### ## VISA Instrument ####################################################### _SharedStructure_debug = False class _SharedStructure(object): """ This shares a single ctype object across multiple processes. Access it with the data attribute. If the data attribute has members, accessing it directly on this object will be forwared to the data object. Should only use this if the memory access are protected with locks (between process). Visa can do that (otherwise have a look at multiprocessing.synchronize._multiprocessing.SemLock) """ def __init__(self, somectype, tagname): import mmap self._tagname = tagname counter_type = ctypes.c_int32 counter_size = ctypes.sizeof(ctypes.c_int32) size = counter_size + ctypes.sizeof(somectype) if os.name != 'nt': # we assume posix like. on linux need python-posix_ipc package (fedora) import posix_ipc self._shared_obj = posix_ipc.SharedMemory(tagname, posix_ipc.O_CREAT, size=size) self.buffer = mmap.mmap(self._shared_obj.fd, size) self._shared_obj.close_fd() else: # for windows self.buffer = mmap.mmap(-1, size, tagname=tagname) self._counter = counter_type.from_buffer(self.buffer, 0) self.data = somectype.from_buffer(self.buffer, counter_size) self._add_count() if _SharedStructure_debug: print 'There are now %i users of %r'%(self._get_count(), tagname) def __getattr__(self, name): return getattr(self.data, name) def __setattr__(self, name, value): try: data = object.__getattribute__(self, 'data') if hasattr(data, name): setattr(self.data, name, value) return except AttributeError: pass object.__setattr__(self, name, value) def _get_count(self): return self._counter.value def _add_count(self): self._counter.value += 1 def _dec_count(self): self._counter.value -= 1 def __del__(self): self._dec_count() count = self._get_count() if _SharedStructure_debug: print 'Cleaned up mmap, counter now %i'%self._get_count() self.buffer.close() if count == 0 and os.name != 'nt': self._shared_obj.unlink() class _LastTime(ctypes.Structure): _fields_ = [('write_time', ctypes.c_double), ('read_time', ctypes.c_double)] def resource_info(visa_addr): if isinstance(visa_addr, int): visa_addr = _normalize_gpib(visa_addr) return rsrc_mngr.resource_info(visa_addr) class Keep_Alive(threading.Thread): def __init__(self, interval, keep_alive_func): # the function keep_alive_func should call update_time somewhere. super(Keep_Alive, self).__init__() self.keep_alive_func = ProxyMethod(keep_alive_func) self.interval = interval self.lck = threading.RLock() self.update_time() self.stop = False self.daemon = True # This will allow python to exit def run(self): while True: with self.lck: if self.stop: break delta = time.time() - self.last if delta >= self.interval: self.keep_alive_func() continue # skipt wait (we just changed self.last) wait = min(self.interval - delta, 5) # wait at most 5s time.sleep(wait) def cancel(self): with self.lck: self.stop = True def update_time(self, no_lock=False): with self.lck: self.last = time.time() #def __del__(self): # print 'cleaning up keep_alive thread.' class visaInstrument(BaseInstrument): """ Open visa instrument with a visa address. If the address is an integer, it is taken as the gpib address of the instrument on the first gpib bus. Otherwise use a visa string like: 'GPIB0::12::INSTR' 'GPIB::12' 'USB0::0x0957::0x0118::MY49012345::0::INSTR' 'USB::0x0957::0x0118::MY49012345' """ def __init__(self, visa_addr, skip_id_test=False, quiet_delete=False, keep_alive=False, keep_alive_time=15*60, **kwarg): """ skip_id_test when True will skip doing the idn test. quiet_delete when True will prevent the print following an instrument delete keep_alive can True/False/'auto'. If auto, it is activated only when on a tcpip connection (hislip, socket, instr) keep_alive_time is the time in seconds between keep alive requests. """ # need to initialize visa before calling BaseInstrument init # which might require access to device if isinstance(visa_addr, int): visa_addr = _normalize_gpib(visa_addr) self.visa_addr = visa_addr self._keep_alive_thread = None if not CHECKING(): self.visa = rsrc_mngr.open_resource(visa_addr, **kwarg) self._lock_extra = Lock_Visa(self.visa) #self.visa.timeout = 3 # in seconds # use 2.9 because I was getting 3.0 rounded to 10s timeouts on some visa lib configuration # 2.9 seemed to be rounded up to 3s instead self.set_timeout = 2.9 # in seconds to = time.time() self._last_rw_time = _LastTime(to, to) # When wait time are not 0, it will be replaced self._write_write_wait = 0. self._read_write_wait = 0. if (keep_alive == 'auto' and self.visa.is_tcpip) or keep_alive is True: # TODO handle keep_alive (get inspired by bluefors) # Could use the keep_alive setting for visa (at least socket/hislip) # However it is 2 hours by default on windows. Which is often too long. # self.visa.set_visa_attribute(visa_wrap.constants.VI_ATTR_TCPIP_KEEPALIVE, True) # Also note that writing an empty string (not even the newline) will not produce any tcpip # communication. So keepalive should send at least '\n' if that is valid. self._keep_alive_thread = Keep_Alive(keep_alive_time, self._keep_alive_func) BaseInstrument.__init__(self, quiet_delete=quiet_delete) if not CHECKING(): if not skip_id_test: idns = self.idn_split() if not instruments_registry.check_instr_id(self.__class__, idns['vendor'], idns['model'], idns['firmware']): print 'WARNING: this particular instrument idn is not attached to this class: operations might misbehave.' #print self.__class__, idns if self._keep_alive_thread: self._keep_alive_thread.start() def __del__(self): #print 'Destroying '+repr(self) # no need to call self.visa.close() # because self.visa does that when it is deleted if self._keep_alive_thread: self._keep_alive_thread.cancel() super(visaInstrument, self).__del__() # Do NOT enable locked_calling for read_status_byte, otherwise we get a hang # when instrument is on gpib using agilent visa. But do use lock visa # otherwise read_stb could fail because of lock held in another thread/process # The locked_calling problem is that the handler runs in a separate thread, # appart from the main locked thread (when using getasync) #@locked_calling def read_status_byte(self): # since on serial visa does the *stb? request for us # might as well be explicit and therefore handle the rw_wait properly # and do the locking. if CHECKING(): return 0 if self.visa.is_serial(): return int(self.ask('*stb?')) else: with self._lock_extra: return self.visa.read_stb() self._keep_alive_update() @locked_calling def control_remotelocal(self, remote=False, local_lockout=False, all=False): """ For all=True: remote=True: REN line is asserted -> when instruments are addressed they will go remote remote=False: REN line is deasserted -> All instruments go local and will NOT go remote when addressed This also clears lockout state For local_lockout=True: remote=True: All instruments on the bus go to local lockout state Also current instrument goes remote. remote=False: Same as all=True, remote=False followed by all=True, remote=True local lockout state means the local button is disabled on the instrument. The instrument can be switch for local to remote by gpib interface but cannot be switched from remote to local using the instrument local button. Not all instruments implement this lockout. Otherwise: remote=True: only this instrument goes into remote state. remote=False: only this instrument goes into local state. The instrument keeps its lockout state unchanged. """ # False for both all and local_lockout(first part) should proceed in a same way # Here I use a different instruction but I think they both do the same # i.e. VI_GPIB_REN_DEASSERT == VI_GPIB_REN_DEASSERT_GTL # possibly they might behave differently on some other bus (gpib, tcp?) # or for instruments that don't conform to proper 488.2 rules # For those reason I keep the 2 different so it can be tested later. # Unused state: # VI_GPIB_REN_ASSERT_LLO : lockout only (no addressing) if CHECKING(): return cnsts = visa_wrap.constants if all: if remote: val = cnsts.VI_GPIB_REN_ASSERT else: val = cnsts.VI_GPIB_REN_DEASSERT elif local_lockout: if remote: val = cnsts.VI_GPIB_REN_ASSERT_ADDRESS_LLO else: val = cnsts.VI_GPIB_REN_DEASSERT_GTL self.visa.control_ren(val) val = cnsts.VI_GPIB_REN_ASSERT else: if remote: val = cnsts.VI_GPIB_REN_ASSERT_ADDRESS else: val = cnsts.VI_GPIB_REN_ADDRESS_GTL self.visa.control_ren(val) self._keep_alive_update() def _keep_alive_func(self): self.write('') # should send just a newline. def _keep_alive_update(self): if self._keep_alive_thread: self._keep_alive_thread.update_time() def _do_wr_wait(self): if self._last_rw_time.read_time > self._last_rw_time.write_time: # last operation was a read last_time = self._last_rw_time.read_time wait_time = self._read_write_wait else: # last operation was a write last_time = self._last_rw_time.write_time wait_time = self._write_write_wait if wait_time == 0.: return if not isinstance(self._last_rw_time, _SharedStructure): # The timeout needs to work across process, So we now share the last time values tagname = 'pyHegel-' + self.__class__.__name__ + '-' + hashlib.sha1(self.visa_addr).hexdigest() old = self._last_rw_time self._last_rw_time = _SharedStructure(_LastTime, tagname) self._last_rw_time.read_time = old.read_time self._last_rw_time.write_time = old.write_time cur_time = time.time() delta = (last_time+wait_time) - cur_time if delta >0: sleep(delta) @locked_calling def read(self, raw=False, count=None, chunk_size=None): """ reads data. The default is to read until an end is resived in chunk_size blocks (if chunk_size is not given, uses the default chunk_size) It then strips then termination characters unless raw is False. When a count is given, it does not wait for an end. It only reads exactly count characters. It never strips the termination characters. """ if CHECKING(): return '' if count: ret = self.visa.read_raw_n_all(count, chunk_size=chunk_size) elif raw: ret = self.visa.read_raw(size=chunk_size) else: ret = self.visa.read(chunk_size=chunk_size) self._last_rw_time.read_time = time.time() self._keep_alive_update() return ret @locked_calling def write(self, val, termination='default'): self._do_wr_wait() if not CHECKING(): self.visa.write(val, termination=termination) else: if not isinstance(val, basestring): raise ValueError(self.perror('The write val is not a string.')) self._last_rw_time.write_time = time.time() self._keep_alive_update() @locked_calling def ask(self, question, raw=False, chunk_size=None): """ Does write then read. With raw=True, replaces read with a read_raw. This is needed when dealing with binary data. The base read strips newlines from the end always. """ # we prevent CTRL-C from breaking between write and read using context manager with _delayed_signal_context_manager(): self.write(question) ret = self.read(raw=raw, chunk_size=chunk_size) return ret def idn(self): return self.ask('*idn?') def idn_usb(self): """ Returns the usb names attached to the vendor/product ids and the serial number The return is a tuple (vendor, product, serial) """ if CHECKING(): return ('vendor', 'product', 'serial') vendor = self.visa.get_visa_attribute(visa_wrap.constants.VI_ATTR_MANF_NAME) product = self.visa.get_visa_attribute(visa_wrap.constants.VI_ATTR_MODEL_NAME) serial = self.visa.get_visa_attribute(visa_wrap.constants.VI_ATTR_USB_SERIAL_NUM) return (vendor, product, serial) @locked_calling def factory_reset(self): """ This returns the instrument to a known state. Use CAREFULLY! """ self.write('*RST') self.force_get() @locked_calling def clear(self): """ This sends the *cls 488.2 command that should clear the status/event/ errors (but not change the enable registers.) It also cleans up any buffered status byte. """ self.write('*cls') #some device buffer status byte so clear them while self.read_status_byte()&0x40: pass @locked_calling def _dev_clear(self): """ This is the device clear instruction. For some devices it will clear the output buffers. (it should reset the interface state, but not change the state of status/event registers, errors states. See clear for that.) """ if CHECKING(): return self.visa.clear() self._keep_alive_update() @property def set_timeout(self): if CHECKING(): return None timeout_ms = self.visa.timeout if timeout_ms is None: return None else: return timeout_ms/1000. # return in seconds @set_timeout.setter def set_timeout(self, seconds): if seconds is None: val = None else: val = int(seconds*1000.) if CHECKING(): return self.visa.timeout = val def get_error(self): return self.ask('SYSTem:ERRor?') def _info(self): gn, cn, p = BaseInstrument._info(self) return gn, cn+'(%s)'%self.visa_addr, p @locked_calling def trigger(self): # This should produce the hardware GET on gpib # Another option would be to use the *TRG 488.2 command if CHECKING(): return self.visa.trigger() self._keep_alive_update() @locked_calling def _get_dev_min_max(self, ask_str, str_type=float, ask='both'): """ ask_str is the question string. ask can be both, min or max. It always returns a tuple (min, max). If the value was not obtained it will be None See also dev._get_dev_min_max """ return _get_dev_min_max(self, ask_str, str_type, ask) # To properly use self._conf_helper_cache, the caller (probably _current_config) should be locked. def _conf_helper(self, *devnames, **kwarg): ret = super(visaInstrument, self)._conf_helper(*devnames, **kwarg) no_default, add_to = self._conf_helper_cache if not no_default: add_to(ret, 'visa_addr="%s"'%self.visa_addr) return ret ####################################################### ## VISA Async Instrument ####################################################### # Note about async: # only one thread/process will have access to the device at a time # others are waiting for a lock # I only enable events (Queue or handlers) when I am about to use them # and disable them when I am done waiting. # wait_after_trig, run_and_wait and run in async should properly cleanup. # In case where the cleanup is not done properly, it would leave # some events/status in buffers and should be cleaned up on the # next run. # For agilent gpib, all device on bus will receive a handler/queue event. # I use the handler (only one should be enabled, If not then only one will have # the lock, the others will be waiting on read_status_byte: so only the important one # will actually reset the srq.) # For NI gpib, only the device that has SRQ on will receive the handler/queue event. # handlers are called within the gpib notify callback. All handlers # across all process are called. If one of the callback is slow, it only affects that process # thread. While in the callback, it does not add other events. # However queued events are only produced when waiting for the events, # they are not generated otherwise (for queued events, the driver does not setup # a notify callback). It is possible to loose events if the read_status # occurs between ibwait (which is every 1ms). However, again, the status read is protected # by the lock, and only one thread should be running anyway. # Note also that the auto serial poll is not jammed if the device holding the line SRQ is # not open. The driver will just keep autoprobing (during ibwait requests) and update the # device status so it can still find out if the device is requesting service. class visaInstrumentAsync(visaInstrument): def __init__(self, visa_addr, poll=False): # poll can be True (for always polling) 'not_gpib' for polling for lan and usb but # use the regular technique for gpib # or force_handler to always use the handler # the _async_sre_flag should match an entry somewhere (like in init) self._async_sre_flag = 0x20 #=32 which is standard event status byte (contains OPC) self._async_last_status = 0 self._async_last_status_time = 0 self._async_last_esr = 0 self._async_do_cleanup = False super(visaInstrumentAsync, self).__init__(visa_addr) self._async_mode = 'srq' if CHECKING(): is_gpib = False is_agilent = False self._async_polling = True self._RQS_status = -1 return is_gpib = self.visa.is_gpib() is_agilent = rsrc_mngr.is_agilent() self._async_polling = False if poll == True or (poll == 'not_gpib' and not is_gpib): self._async_polling = True self._RQS_status = -1 elif (is_gpib and is_agilent) or poll == 'force_handler': # Note that the agilent visa using a NI usb gpib adapter (at least) # disables the autopoll settings of NI # Hence a SRQ on the bus produces events for all devices on the bus. # If those events are not read, the buffer eventually fills up. # This is a problem when using more than one visaInstrumentAsync # To avoid that problem, I use a handler in that case. self._RQS_status = 0 #-1: no handler, 0 not ready, other is status byte self._RQS_done = FastEvent() #starts in clear state self._proxy_handler = ProxyMethod(self._RQS_handler) # _handler_userval is the ctype object representing the user value (0 here) # It is needed for uninstall if not CHECKING(): self._handler_userval = self.visa.install_visa_handler(visa_wrap.constants.VI_EVENT_SERVICE_REQ, self._proxy_handler, 0) else: self._RQS_status = -1 if self.visa.is_usb() and not self.visa.resource_manager.is_agilent(): # For some weird reason, for National Instruments visalib on usb # the service request are queued by default until I enable/disable the service # just disabling does not work (says it is already disabled) # this with NI visa 14.0.0f0 self.visa.enable_event(visa_wrap.constants.VI_EVENT_SERVICE_REQ, visa_wrap.constants.VI_QUEUE) self.visa.disable_event(visa_wrap.constants.VI_EVENT_SERVICE_REQ, visa_wrap.constants.VI_QUEUE) def __del__(self): if self._RQS_status != -1: # Not absolutely necessary, but lets be nice self.visa.disable_event(visa_wrap.constants.VI_ALL_ENABLED_EVENTS, visa_wrap.constants.VI_ALL_MECH) # only necessary to keep handlers list in sync # the actual handler is removed when the visa is deleted (vi closed) self.visa.uninstall_visa_handler(visa_wrap.constants.VI_EVENT_SERVICE_REQ, self._proxy_handler, self._handler_userval) super(visaInstrumentAsync, self).__del__() def init(self, full=False): # This clears the error state, and status/event flags? self.clear() if full: self.write('*ese 1;*sre 32') # OPC flag def _RQS_handler(self, vi, event_type, context, userHandle): # For Agilent visalib (auto serial poll is off): # Reading the status will clear the service request of this instrument # if the SRQ line is still active, another call to the handler will occur # after a short delay (30 ms I think) everytime a read_status_byte is done # on the bus (and SRQ is still active). # For agilent visa, the SRQ status is queried every 30ms. So # you we might have to wait that time after the hardware signal is active # before this handler is called. # Because of locking, this only succeeds if we are owning the lock # (so we are the ones waiting for data or nobody is.) # Remember that we are called when any instrument on the gpib bus # requests service (not only for this instrument) status = self.read_status_byte() #if status&0x40 and status & self._async_sre_flag: #if status & self._async_sre_flag: if status&0x40: self._RQS_status = status self._async_last_status = status self._async_last_status_time = time.time() #sleep(0.01) # give some time for other handlers to run self._RQS_done.set() #print 'Got it', vi return visa_wrap.constants.VI_SUCCESS def _get_esr(self): if CHECKING(): return 0 return int(self.ask('*esr?')) def _async_detect_poll_func(self): if CHECKING(): status = 0x40 else: status = self.read_status_byte() if status & 0x40: self._async_last_status = status self._async_last_status_time = time.time() self._async_last_esr = self._get_esr() return True return False def _async_detect(self, max_time=.5): # 0.5 s max by default """ handles _async_mode of 'wait' (only wait delay), 'srq' (only detects srq) 'wait+srq' (wait followed by srq, so minimum of wait) all the options starting with wait will warn once if async_wait is 0. If you don't want the warning, replace 'wait' with '_wait' in the above strings. """ if self._async_mode not in ['wait', '_wait', 'wait+srq', '_wait+srq', 'srq']: raise RuntimeError('Invalid async_mode selected') if self._async_mode in ['wait', '_wait']: return super(visaInstrumentAsync, self)._async_detect(max_time) ret = False if self._async_mode in ['wait+srq', '_wait+srq']: if not super(visaInstrumentAsync, self)._async_detect(max_time): return False if self._async_polling: if _retry_wait(self._async_detect_poll_func, max_time, delay=0.05): ret = True elif self._RQS_status == -1: # On National Instrument (NI) visa # the timeout actually used seems to be 16*ceil(max_time*1000/16) in ms. wait_resp = self.visa.wait_on_event(visa_wrap.constants.VI_EVENT_SERVICE_REQ, int(max_time*1000), capture_timeout=True) # context in wait_resp will be closed automatically #if wait_resp.context is not None: if not wait_resp.timed_out: # only reset event flag. We know the bit that is set already (OPC) self._async_last_esr = self._get_esr() # only reset SRQ flag. We know the bit that is set already self._async_last_status = self.read_status_byte() self._async_last_status_time = time.time() ret = True else: if self._RQS_done.wait(max_time): #we assume status only had bit 0x20(event) and 0x40(RQS) set #and event only has OPC set # status has already been reset. Now reset event flag. self._async_last_esr = self._get_esr() self._RQS_done.clear() # so that we can detect the next SRQ if needed without _doing async_trig (_async_trig_cleanup) ret = True return ret def _async_cleanup_after(self): super(visaInstrumentAsync, self)._async_cleanup_after() if self._async_do_cleanup: self.visa.disable_event(visa_wrap.constants.VI_EVENT_SERVICE_REQ, visa_wrap.constants.VI_ALL_MECH) self._async_do_cleanup = False def _async_trigger_helper(self): self.write('INITiate;*OPC') # this assume trig_src is immediate for agilent multi def _async_trig_cleanup(self): if not self._async_polling: self._async_do_cleanup = True if self._RQS_status != -1: self.visa.enable_event(visa_wrap.constants.VI_EVENT_SERVICE_REQ, visa_wrap.constants.VI_HNDLR) else: self.visa.enable_event(visa_wrap.constants.VI_EVENT_SERVICE_REQ, visa_wrap.constants.VI_QUEUE) # We detect the end of acquisition using *OPC and status byte. if self._get_esr() & 0x01: print 'Unread event byte!' # A while loop is needed when National Instrument (NI) gpib autopoll is active # This is the default when using the NI Visa. n = 0 while self.read_status_byte() & 0x40: # This is SRQ bit if self.visa.is_usb() and not self.visa.resource_manager.is_agilent(): # National instruments visa buffers a usb status byte (the SRQ bit only) # Therefore a request will be seen in multiple threads/process. # so it is normal to have left overs pass else: n += 1 if n > 0: print 'Unread(%i) status byte!'%n if self._async_polling: pass elif self._RQS_status != -1: self._RQS_status = 0 self._RQS_done.clear() else: # could use self.visa.discard_events(visa_wrap.constants.VI_EVENT_SERVICE_REQ, # visa_wrap.constans.VI_QUEUE) # but then we would not know how many events were discarded. n = 0 try: while True: self.visa.wait_on_event(visa_wrap.constants.VI_EVENT_SERVICE_REQ, 0) n += 1 except visa_wrap.VisaIOError as exc: if exc.error_code == visa_wrap.constants.VI_ERROR_TMO: pass else: raise if n>0: print 'Unread(%i) event queue!'%n self._async_last_status = 0 self._async_last_esr = 0 @locked_calling def _async_trig(self): super(visaInstrumentAsync, self)._async_trig() if 'srq' in self._async_mode: self._async_trig_cleanup() self._async_trigger_helper() def _normalize_usb(usb_resrc): usb_resrc = usb_resrc.upper() # make sure it is all upercase split = usb_resrc.split('::') if split[-1] == 'INSTR': del split[-1] if len(split) != 5: split.append('0') usbn, manuf, model, serial, interfaceN = split manuf = int(manuf, base=0) model = int(model, base=0) interfaceN = int(interfaceN, base=0) return 'USB0::0x%04X::0x%04X::%s::%i'%(manuf, model, serial, interfaceN), manuf, model def _normalize_gpib(gpib_resrc): if isinstance(gpib_resrc, basestring): gpib_resrc = gpib_resrc.upper() split = gpib_resrc.split('::') bus = 0 # split[0] is 'GPIB' if len(split[0]) > 4: bus = int(split[0][4:]) if split[-1] == 'INSTR': del split[-1] prim = int(split[1]) ret = 'GPIB%i::%i'%(bus, prim) if len(split) > 2: sec = int(split[2]) ret += '::%i'%sec return ret+'::INSTR' elif isinstance(gpib_resrc, int): return 'GPIB0::%i::INSTR'%gpib_resrc else: raise TypeError('the address is not in an acceptable type.') def _get_visa_idns(visa_addr, *args, **kwargs): vi = visaInstrument(visa_addr, *args, skip_id_test=True, quiet_delete=True, **kwargs) idns = vi.idn_split() del vi return idns class visaAutoLoader(visaInstrument): """ You can use this class to automatically select the proper class to load according to the idn returned by the instrument and the info in the registry. It returns another class (it is a factory class). Provide it at least a visa address. For usb devices it will try the usb registry first. Otherwise, like for all other device it will open it with visaInstrument first to read the idn then properly load it with the correct class. if skip_usb is set to True, then the usb search is skipped """ def __new__(cls, visa_addr, skip_usb=False, *args, **kwargs): if not skip_usb and isinstance(visa_addr, basestring) and visa_addr.upper().startswith('USB'): usb, manuf, model = _normalize_usb(visa_addr) try: cls = instruments_registry.find_usb(manuf, model) except KeyError: pass else: print 'Autoloading(USB) using instruments class "%s"'%cls.__name__ return cls(visa_addr, *args, **kwargs) idns = _get_visa_idns(visa_addr, *args, **kwargs) try: cls = instruments_registry.find_instr(idns['vendor'], idns['model'], idns['firmware']) except KeyError: idn = '{vendor},{model},{firmware}'.format(**idns) raise RuntimeError('Could not find an instrument for: %s (%s)'%(visa_addr, idn)) else: print 'Autoloading using instruments class "%s"'%cls.__name__ return cls(visa_addr, *args, **kwargs)
lgpl-3.0
-8,544,140,090,590,581,000
42.050713
166
0.580011
false