repo_name
stringlengths 5
100
| path
stringlengths 4
299
| copies
stringclasses 990
values | size
stringlengths 4
7
| content
stringlengths 666
1.03M
| license
stringclasses 15
values | hash
int64 -9,223,351,895,964,839,000
9,223,297,778B
| line_mean
float64 3.17
100
| line_max
int64 7
1k
| alpha_frac
float64 0.25
0.98
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
h0nIg/ansible-modules-extras | cloud/google/gce_img.py | 23 | 5760 | #!/usr/bin/python
# Copyright 2015 Google Inc. All Rights Reserved.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
"""An Ansible module to utilize GCE image resources."""
DOCUMENTATION = '''
---
module: gce_img
version_added: "1.9"
short_description: utilize GCE image resources
description:
- This module can create and delete GCE private images from gzipped
compressed tarball containing raw disk data or from existing detached
disks in any zone. U(https://cloud.google.com/compute/docs/images)
options:
name:
description:
- the name of the image to create or delete
required: true
default: null
description:
description:
- an optional description
required: false
default: null
source:
description:
- the source disk or the Google Cloud Storage URI to create the image from
required: false
default: null
state:
description:
- desired state of the image
required: false
default: "present"
choices: ["present", "absent"]
zone:
description:
- the zone of the disk specified by source
required: false
default: "us-central1-a"
timeout:
description:
- timeout for the operation
required: false
default: 180
version_added: "2.0"
service_account_email:
description:
- service account email
required: false
default: null
pem_file:
description:
- path to the pem file associated with the service account email
required: false
default: null
project_id:
description:
- your GCE project ID
required: false
default: null
requirements:
- "python >= 2.6"
- "apache-libcloud"
author: "Peter Tan (@tanpeter)"
'''
EXAMPLES = '''
# Create an image named test-image from the disk 'test-disk' in zone us-central1-a.
- gce_img:
name: test-image
source: test-disk
zone: us-central1-a
state: present
# Create an image named test-image from a tarball in Google Cloud Storage.
- gce_img:
name: test-image
source: https://storage.googleapis.com/bucket/path/to/image.tgz
# Alternatively use the gs scheme
- gce_img:
name: test-image
source: gs://bucket/path/to/image.tgz
# Delete an image named test-image.
- gce_img:
name: test-image
state: absent
'''
import sys
try:
from libcloud.compute.types import Provider
from libcloud.compute.providers import get_driver
from libcloud.common.google import GoogleBaseError
from libcloud.common.google import ResourceExistsError
from libcloud.common.google import ResourceNotFoundError
_ = Provider.GCE
has_libcloud = True
except ImportError:
has_libcloud = False
GCS_URI = 'https://storage.googleapis.com/'
def create_image(gce, name, module):
"""Create an image with the specified name."""
source = module.params.get('source')
zone = module.params.get('zone')
desc = module.params.get('description')
timeout = module.params.get('timeout')
if not source:
module.fail_json(msg='Must supply a source', changed=False)
if source.startswith(GCS_URI):
# source is a Google Cloud Storage URI
volume = source
elif source.startswith('gs://'):
# libcloud only accepts https URI.
volume = source.replace('gs://', GCS_URI)
else:
try:
volume = gce.ex_get_volume(source, zone)
except ResourceNotFoundError:
module.fail_json(msg='Disk %s not found in zone %s' % (source, zone),
changed=False)
except GoogleBaseError, e:
module.fail_json(msg=str(e), changed=False)
old_timeout = gce.connection.timeout
try:
gce.connection.timeout = timeout
gce.ex_create_image(name, volume, desc, False)
return True
except ResourceExistsError:
return False
except GoogleBaseError, e:
module.fail_json(msg=str(e), changed=False)
finally:
gce.connection.timeout = old_timeout
def delete_image(gce, name, module):
"""Delete a specific image resource by name."""
try:
gce.ex_delete_image(name)
return True
except ResourceNotFoundError:
return False
except GoogleBaseError, e:
module.fail_json(msg=str(e), changed=False)
def main():
module = AnsibleModule(
argument_spec=dict(
name=dict(required=True),
description=dict(),
source=dict(),
state=dict(default='present', choices=['present', 'absent']),
zone=dict(default='us-central1-a'),
service_account_email=dict(),
pem_file=dict(),
project_id=dict(),
timeout=dict(type='int', default=180)
)
)
if not has_libcloud:
module.fail_json(msg='libcloud with GCE support is required.')
gce = gce_connect(module)
name = module.params.get('name')
state = module.params.get('state')
changed = False
# user wants to create an image.
if state == 'present':
changed = create_image(gce, name, module)
# user wants to delete the image.
if state == 'absent':
changed = delete_image(gce, name, module)
module.exit_json(changed=changed, name=name)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.gce import *
main()
| gpl-3.0 | 7,208,064,137,263,877,000 | 26.169811 | 83 | 0.683333 | false |
dd00/commandergenius | project/jni/python/src/Lib/test/test_richcmp.py | 55 | 11262 | # Tests for rich comparisons
import unittest
from test import test_support
import operator
class Number:
def __init__(self, x):
self.x = x
def __lt__(self, other):
return self.x < other
def __le__(self, other):
return self.x <= other
def __eq__(self, other):
return self.x == other
def __ne__(self, other):
return self.x != other
def __gt__(self, other):
return self.x > other
def __ge__(self, other):
return self.x >= other
def __cmp__(self, other):
raise test_support.TestFailed, "Number.__cmp__() should not be called"
def __repr__(self):
return "Number(%r)" % (self.x, )
class Vector:
def __init__(self, data):
self.data = data
def __len__(self):
return len(self.data)
def __getitem__(self, i):
return self.data[i]
def __setitem__(self, i, v):
self.data[i] = v
__hash__ = None # Vectors cannot be hashed
def __nonzero__(self):
raise TypeError, "Vectors cannot be used in Boolean contexts"
def __cmp__(self, other):
raise test_support.TestFailed, "Vector.__cmp__() should not be called"
def __repr__(self):
return "Vector(%r)" % (self.data, )
def __lt__(self, other):
return Vector([a < b for a, b in zip(self.data, self.__cast(other))])
def __le__(self, other):
return Vector([a <= b for a, b in zip(self.data, self.__cast(other))])
def __eq__(self, other):
return Vector([a == b for a, b in zip(self.data, self.__cast(other))])
def __ne__(self, other):
return Vector([a != b for a, b in zip(self.data, self.__cast(other))])
def __gt__(self, other):
return Vector([a > b for a, b in zip(self.data, self.__cast(other))])
def __ge__(self, other):
return Vector([a >= b for a, b in zip(self.data, self.__cast(other))])
def __cast(self, other):
if isinstance(other, Vector):
other = other.data
if len(self.data) != len(other):
raise ValueError, "Cannot compare vectors of different length"
return other
opmap = {
"lt": (lambda a,b: a< b, operator.lt, operator.__lt__),
"le": (lambda a,b: a<=b, operator.le, operator.__le__),
"eq": (lambda a,b: a==b, operator.eq, operator.__eq__),
"ne": (lambda a,b: a!=b, operator.ne, operator.__ne__),
"gt": (lambda a,b: a> b, operator.gt, operator.__gt__),
"ge": (lambda a,b: a>=b, operator.ge, operator.__ge__)
}
class VectorTest(unittest.TestCase):
def checkfail(self, error, opname, *args):
for op in opmap[opname]:
self.assertRaises(error, op, *args)
def checkequal(self, opname, a, b, expres):
for op in opmap[opname]:
realres = op(a, b)
# can't use assertEqual(realres, expres) here
self.assertEqual(len(realres), len(expres))
for i in xrange(len(realres)):
# results are bool, so we can use "is" here
self.assert_(realres[i] is expres[i])
def test_mixed(self):
# check that comparisons involving Vector objects
# which return rich results (i.e. Vectors with itemwise
# comparison results) work
a = Vector(range(2))
b = Vector(range(3))
# all comparisons should fail for different length
for opname in opmap:
self.checkfail(ValueError, opname, a, b)
a = range(5)
b = 5 * [2]
# try mixed arguments (but not (a, b) as that won't return a bool vector)
args = [(a, Vector(b)), (Vector(a), b), (Vector(a), Vector(b))]
for (a, b) in args:
self.checkequal("lt", a, b, [True, True, False, False, False])
self.checkequal("le", a, b, [True, True, True, False, False])
self.checkequal("eq", a, b, [False, False, True, False, False])
self.checkequal("ne", a, b, [True, True, False, True, True ])
self.checkequal("gt", a, b, [False, False, False, True, True ])
self.checkequal("ge", a, b, [False, False, True, True, True ])
for ops in opmap.itervalues():
for op in ops:
# calls __nonzero__, which should fail
self.assertRaises(TypeError, bool, op(a, b))
class NumberTest(unittest.TestCase):
def test_basic(self):
# Check that comparisons involving Number objects
# give the same results give as comparing the
# corresponding ints
for a in xrange(3):
for b in xrange(3):
for typea in (int, Number):
for typeb in (int, Number):
if typea==typeb==int:
continue # the combination int, int is useless
ta = typea(a)
tb = typeb(b)
for ops in opmap.itervalues():
for op in ops:
realoutcome = op(a, b)
testoutcome = op(ta, tb)
self.assertEqual(realoutcome, testoutcome)
def checkvalue(self, opname, a, b, expres):
for typea in (int, Number):
for typeb in (int, Number):
ta = typea(a)
tb = typeb(b)
for op in opmap[opname]:
realres = op(ta, tb)
realres = getattr(realres, "x", realres)
self.assert_(realres is expres)
def test_values(self):
# check all operators and all comparison results
self.checkvalue("lt", 0, 0, False)
self.checkvalue("le", 0, 0, True )
self.checkvalue("eq", 0, 0, True )
self.checkvalue("ne", 0, 0, False)
self.checkvalue("gt", 0, 0, False)
self.checkvalue("ge", 0, 0, True )
self.checkvalue("lt", 0, 1, True )
self.checkvalue("le", 0, 1, True )
self.checkvalue("eq", 0, 1, False)
self.checkvalue("ne", 0, 1, True )
self.checkvalue("gt", 0, 1, False)
self.checkvalue("ge", 0, 1, False)
self.checkvalue("lt", 1, 0, False)
self.checkvalue("le", 1, 0, False)
self.checkvalue("eq", 1, 0, False)
self.checkvalue("ne", 1, 0, True )
self.checkvalue("gt", 1, 0, True )
self.checkvalue("ge", 1, 0, True )
class MiscTest(unittest.TestCase):
def test_misbehavin(self):
class Misb:
def __lt__(self, other): return 0
def __gt__(self, other): return 0
def __eq__(self, other): return 0
def __le__(self, other): raise TestFailed, "This shouldn't happen"
def __ge__(self, other): raise TestFailed, "This shouldn't happen"
def __ne__(self, other): raise TestFailed, "This shouldn't happen"
def __cmp__(self, other): raise RuntimeError, "expected"
a = Misb()
b = Misb()
self.assertEqual(a<b, 0)
self.assertEqual(a==b, 0)
self.assertEqual(a>b, 0)
self.assertRaises(RuntimeError, cmp, a, b)
def test_not(self):
# Check that exceptions in __nonzero__ are properly
# propagated by the not operator
import operator
class Exc(Exception):
pass
class Bad:
def __nonzero__(self):
raise Exc
def do(bad):
not bad
for func in (do, operator.not_):
self.assertRaises(Exc, func, Bad())
def test_recursion(self):
# Check that comparison for recursive objects fails gracefully
from UserList import UserList
a = UserList()
b = UserList()
a.append(b)
b.append(a)
self.assertRaises(RuntimeError, operator.eq, a, b)
self.assertRaises(RuntimeError, operator.ne, a, b)
self.assertRaises(RuntimeError, operator.lt, a, b)
self.assertRaises(RuntimeError, operator.le, a, b)
self.assertRaises(RuntimeError, operator.gt, a, b)
self.assertRaises(RuntimeError, operator.ge, a, b)
b.append(17)
# Even recursive lists of different lengths are different,
# but they cannot be ordered
self.assert_(not (a == b))
self.assert_(a != b)
self.assertRaises(RuntimeError, operator.lt, a, b)
self.assertRaises(RuntimeError, operator.le, a, b)
self.assertRaises(RuntimeError, operator.gt, a, b)
self.assertRaises(RuntimeError, operator.ge, a, b)
a.append(17)
self.assertRaises(RuntimeError, operator.eq, a, b)
self.assertRaises(RuntimeError, operator.ne, a, b)
a.insert(0, 11)
b.insert(0, 12)
self.assert_(not (a == b))
self.assert_(a != b)
self.assert_(a < b)
class DictTest(unittest.TestCase):
def test_dicts(self):
# Verify that __eq__ and __ne__ work for dicts even if the keys and
# values don't support anything other than __eq__ and __ne__ (and
# __hash__). Complex numbers are a fine example of that.
import random
imag1a = {}
for i in range(50):
imag1a[random.randrange(100)*1j] = random.randrange(100)*1j
items = imag1a.items()
random.shuffle(items)
imag1b = {}
for k, v in items:
imag1b[k] = v
imag2 = imag1b.copy()
imag2[k] = v + 1.0
self.assert_(imag1a == imag1a)
self.assert_(imag1a == imag1b)
self.assert_(imag2 == imag2)
self.assert_(imag1a != imag2)
for opname in ("lt", "le", "gt", "ge"):
for op in opmap[opname]:
self.assertRaises(TypeError, op, imag1a, imag2)
class ListTest(unittest.TestCase):
def assertIs(self, a, b):
self.assert_(a is b)
def test_coverage(self):
# exercise all comparisons for lists
x = [42]
self.assertIs(x<x, False)
self.assertIs(x<=x, True)
self.assertIs(x==x, True)
self.assertIs(x!=x, False)
self.assertIs(x>x, False)
self.assertIs(x>=x, True)
y = [42, 42]
self.assertIs(x<y, True)
self.assertIs(x<=y, True)
self.assertIs(x==y, False)
self.assertIs(x!=y, True)
self.assertIs(x>y, False)
self.assertIs(x>=y, False)
def test_badentry(self):
# make sure that exceptions for item comparison are properly
# propagated in list comparisons
class Exc(Exception):
pass
class Bad:
def __eq__(self, other):
raise Exc
x = [Bad()]
y = [Bad()]
for op in opmap["eq"]:
self.assertRaises(Exc, op, x, y)
def test_goodentry(self):
# This test exercises the final call to PyObject_RichCompare()
# in Objects/listobject.c::list_richcompare()
class Good:
def __lt__(self, other):
return True
x = [Good()]
y = [Good()]
for op in opmap["lt"]:
self.assertIs(op(x, y), True)
def test_main():
test_support.run_unittest(VectorTest, NumberTest, MiscTest, DictTest, ListTest)
if __name__ == "__main__":
test_main()
| lgpl-2.1 | -6,808,104,336,942,220,000 | 32.517857 | 83 | 0.539869 | false |
madj4ck/ansible | lib/ansible/new_inventory/host.py | 236 | 1551 | # (c) 2012-2014, Michael DeHaan <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
class Host:
def __init__(self, name):
self._name = name
self._connection = None
self._ipv4_address = ''
self._ipv6_address = ''
self._port = 22
self._vars = dict()
def __repr__(self):
return self.get_name()
def get_name(self):
return self._name
def get_groups(self):
return []
def set_variable(self, name, value):
''' sets a variable for this host '''
self._vars[name] = value
def get_vars(self):
''' returns all variables for this host '''
all_vars = self._vars.copy()
all_vars.update(dict(inventory_hostname=self._name))
return all_vars
| gpl-3.0 | 2,247,443,092,952,365,800 | 29.411765 | 70 | 0.643456 | false |
google-research/lasertagger | run_lasertagger_test.py | 3 | 1196 | # coding=utf-8
# Copyright 2019 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import run_lasertagger
import tensorflow as tf
class RunLasertaggerTest(tf.test.TestCase):
def test_step_calculation(self):
num_examples = 10
batch_size = 2
num_epochs = 3
warmup_proportion = 0.5
steps, warmup_steps = run_lasertagger._calculate_steps(
num_examples, batch_size, num_epochs, warmup_proportion)
self.assertEqual(steps, 15)
self.assertEqual(warmup_steps, 7)
if __name__ == '__main__':
tf.test.main()
| apache-2.0 | 9,144,926,343,302,260,000 | 28.9 | 74 | 0.728261 | false |
msdx321/android_kernel_samsung_heroXqltechn | lazy-prebuilt/aarch64-linux-android-4.9/share/gdb/python/gdb/command/explore.py | 126 | 26824 | # GDB 'explore' command.
# Copyright (C) 2012-2014 Free Software Foundation, Inc.
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Implementation of the GDB 'explore' command using the GDB Python API."""
import gdb
import sys
if sys.version_info[0] > 2:
# Python 3 renamed raw_input to input
raw_input = input
class Explorer(object):
"""Internal class which invokes other explorers."""
# This map is filled by the Explorer.init_env() function
type_code_to_explorer_map = { }
_SCALAR_TYPE_LIST = (
gdb.TYPE_CODE_CHAR,
gdb.TYPE_CODE_INT,
gdb.TYPE_CODE_BOOL,
gdb.TYPE_CODE_FLT,
gdb.TYPE_CODE_VOID,
gdb.TYPE_CODE_ENUM,
)
@staticmethod
def guard_expr(expr):
length = len(expr)
guard = False
if expr[0] == '(' and expr[length-1] == ')':
pass
else:
i = 0
while i < length:
c = expr[i]
if (c == '_' or ('a' <= c and c <= 'z') or
('A' <= c and c <= 'Z') or ('0' <= c and c <= '9')):
pass
else:
guard = True
break
i += 1
if guard:
return "(" + expr + ")"
else:
return expr
@staticmethod
def explore_expr(expr, value, is_child):
"""Main function to explore an expression value.
Arguments:
expr: The expression string that is being explored.
value: The gdb.Value value of the expression.
is_child: Boolean value to indicate if the expression is a child.
An expression is a child if it is derived from the main
expression entered by the user. For example, if the user
entered an expression which evaluates to a struct, then
when exploring the fields of the struct, is_child is set
to True internally.
Returns:
No return value.
"""
type_code = value.type.code
if type_code in Explorer.type_code_to_explorer_map:
explorer_class = Explorer.type_code_to_explorer_map[type_code]
while explorer_class.explore_expr(expr, value, is_child):
pass
else:
print ("Explorer for type '%s' not yet available.\n" %
str(value.type))
@staticmethod
def explore_type(name, datatype, is_child):
"""Main function to explore a data type.
Arguments:
name: The string representing the path to the data type being
explored.
datatype: The gdb.Type value of the data type being explored.
is_child: Boolean value to indicate if the name is a child.
A name is a child if it is derived from the main name
entered by the user. For example, if the user entered
the name of struct type, then when exploring the fields
of the struct, is_child is set to True internally.
Returns:
No return value.
"""
type_code = datatype.code
if type_code in Explorer.type_code_to_explorer_map:
explorer_class = Explorer.type_code_to_explorer_map[type_code]
while explorer_class.explore_type(name, datatype, is_child):
pass
else:
print ("Explorer for type '%s' not yet available.\n" %
str(datatype))
@staticmethod
def init_env():
"""Initializes the Explorer environment.
This function should be invoked before starting any exploration. If
invoked before an exploration, it need not be invoked for subsequent
explorations.
"""
Explorer.type_code_to_explorer_map = {
gdb.TYPE_CODE_CHAR : ScalarExplorer,
gdb.TYPE_CODE_INT : ScalarExplorer,
gdb.TYPE_CODE_BOOL : ScalarExplorer,
gdb.TYPE_CODE_FLT : ScalarExplorer,
gdb.TYPE_CODE_VOID : ScalarExplorer,
gdb.TYPE_CODE_ENUM : ScalarExplorer,
gdb.TYPE_CODE_STRUCT : CompoundExplorer,
gdb.TYPE_CODE_UNION : CompoundExplorer,
gdb.TYPE_CODE_PTR : PointerExplorer,
gdb.TYPE_CODE_REF : ReferenceExplorer,
gdb.TYPE_CODE_TYPEDEF : TypedefExplorer,
gdb.TYPE_CODE_ARRAY : ArrayExplorer
}
@staticmethod
def is_scalar_type(type):
"""Checks whether a type is a scalar type.
A type is a scalar type of its type is
gdb.TYPE_CODE_CHAR or
gdb.TYPE_CODE_INT or
gdb.TYPE_CODE_BOOL or
gdb.TYPE_CODE_FLT or
gdb.TYPE_CODE_VOID or
gdb.TYPE_CODE_ENUM.
Arguments:
type: The type to be checked.
Returns:
'True' if 'type' is a scalar type. 'False' otherwise.
"""
return type.code in Explorer._SCALAR_TYPE_LIST
@staticmethod
def return_to_parent_value():
"""A utility function which prints that the current exploration session
is returning to the parent value. Useful when exploring values.
"""
print ("\nReturning to parent value...\n")
@staticmethod
def return_to_parent_value_prompt():
"""A utility function which prompts the user to press the 'enter' key
so that the exploration session can shift back to the parent value.
Useful when exploring values.
"""
raw_input("\nPress enter to return to parent value: ")
@staticmethod
def return_to_enclosing_type():
"""A utility function which prints that the current exploration session
is returning to the enclosing type. Useful when exploring types.
"""
print ("\nReturning to enclosing type...\n")
@staticmethod
def return_to_enclosing_type_prompt():
"""A utility function which prompts the user to press the 'enter' key
so that the exploration session can shift back to the enclosing type.
Useful when exploring types.
"""
raw_input("\nPress enter to return to enclosing type: ")
class ScalarExplorer(object):
"""Internal class used to explore scalar values."""
@staticmethod
def explore_expr(expr, value, is_child):
"""Function to explore scalar values.
See Explorer.explore_expr and Explorer.is_scalar_type for more
information.
"""
print ("'%s' is a scalar value of type '%s'." %
(expr, value.type))
print ("%s = %s" % (expr, str(value)))
if is_child:
Explorer.return_to_parent_value_prompt()
Explorer.return_to_parent_value()
return False
@staticmethod
def explore_type(name, datatype, is_child):
"""Function to explore scalar types.
See Explorer.explore_type and Explorer.is_scalar_type for more
information.
"""
if datatype.code == gdb.TYPE_CODE_ENUM:
if is_child:
print ("%s is of an enumerated type '%s'." %
(name, str(datatype)))
else:
print ("'%s' is an enumerated type." % name)
else:
if is_child:
print ("%s is of a scalar type '%s'." %
(name, str(datatype)))
else:
print ("'%s' is a scalar type." % name)
if is_child:
Explorer.return_to_enclosing_type_prompt()
Explorer.return_to_enclosing_type()
return False
class PointerExplorer(object):
"""Internal class used to explore pointer values."""
@staticmethod
def explore_expr(expr, value, is_child):
"""Function to explore pointer values.
See Explorer.explore_expr for more information.
"""
print ("'%s' is a pointer to a value of type '%s'" %
(expr, str(value.type.target())))
option = raw_input("Continue exploring it as a pointer to a single "
"value [y/n]: ")
if option == "y":
deref_value = None
try:
deref_value = value.dereference()
str(deref_value)
except gdb.MemoryError:
print ("'%s' a pointer pointing to an invalid memory "
"location." % expr)
if is_child:
Explorer.return_to_parent_value_prompt()
return False
Explorer.explore_expr("*%s" % Explorer.guard_expr(expr),
deref_value, is_child)
return False
option = raw_input("Continue exploring it as a pointer to an "
"array [y/n]: ")
if option == "y":
while True:
index = 0
try:
index = int(raw_input("Enter the index of the element you "
"want to explore in '%s': " % expr))
except ValueError:
break
element_expr = "%s[%d]" % (Explorer.guard_expr(expr), index)
element = value[index]
try:
str(element)
except gdb.MemoryError:
print ("Cannot read value at index %d." % index)
continue
Explorer.explore_expr(element_expr, element, True)
return False
if is_child:
Explorer.return_to_parent_value()
return False
@staticmethod
def explore_type(name, datatype, is_child):
"""Function to explore pointer types.
See Explorer.explore_type for more information.
"""
target_type = datatype.target()
print ("\n%s is a pointer to a value of type '%s'." %
(name, str(target_type)))
Explorer.explore_type("the pointee type of %s" % name,
target_type,
is_child)
return False
class ReferenceExplorer(object):
"""Internal class used to explore reference (TYPE_CODE_REF) values."""
@staticmethod
def explore_expr(expr, value, is_child):
"""Function to explore array values.
See Explorer.explore_expr for more information.
"""
referenced_value = value.referenced_value()
Explorer.explore_expr(expr, referenced_value, is_child)
return False
@staticmethod
def explore_type(name, datatype, is_child):
"""Function to explore pointer types.
See Explorer.explore_type for more information.
"""
target_type = datatype.target()
Explorer.explore_type(name, target_type, is_child)
return False
class ArrayExplorer(object):
"""Internal class used to explore arrays."""
@staticmethod
def explore_expr(expr, value, is_child):
"""Function to explore array values.
See Explorer.explore_expr for more information.
"""
target_type = value.type.target()
print ("'%s' is an array of '%s'." % (expr, str(target_type)))
index = 0
try:
index = int(raw_input("Enter the index of the element you want to "
"explore in '%s': " % expr))
except ValueError:
if is_child:
Explorer.return_to_parent_value()
return False
element = None
try:
element = value[index]
str(element)
except gdb.MemoryError:
print ("Cannot read value at index %d." % index)
raw_input("Press enter to continue... ")
return True
Explorer.explore_expr("%s[%d]" % (Explorer.guard_expr(expr), index),
element, True)
return True
@staticmethod
def explore_type(name, datatype, is_child):
"""Function to explore array types.
See Explorer.explore_type for more information.
"""
target_type = datatype.target()
print ("%s is an array of '%s'." % (name, str(target_type)))
Explorer.explore_type("the array element of %s" % name, target_type,
is_child)
return False
class CompoundExplorer(object):
"""Internal class used to explore struct, classes and unions."""
@staticmethod
def _print_fields(print_list):
"""Internal function which prints the fields of a struct/class/union.
"""
max_field_name_length = 0
for pair in print_list:
if max_field_name_length < len(pair[0]):
max_field_name_length = len(pair[0])
for pair in print_list:
print (" %*s = %s" % (max_field_name_length, pair[0], pair[1]))
@staticmethod
def _get_real_field_count(fields):
real_field_count = 0;
for field in fields:
if not field.artificial:
real_field_count = real_field_count + 1
return real_field_count
@staticmethod
def explore_expr(expr, value, is_child):
"""Function to explore structs/classes and union values.
See Explorer.explore_expr for more information.
"""
datatype = value.type
type_code = datatype.code
fields = datatype.fields()
if type_code == gdb.TYPE_CODE_STRUCT:
type_desc = "struct/class"
else:
type_desc = "union"
if CompoundExplorer._get_real_field_count(fields) == 0:
print ("The value of '%s' is a %s of type '%s' with no fields." %
(expr, type_desc, str(value.type)))
if is_child:
Explorer.return_to_parent_value_prompt()
return False
print ("The value of '%s' is a %s of type '%s' with the following "
"fields:\n" % (expr, type_desc, str(value.type)))
has_explorable_fields = False
choice_to_compound_field_map = { }
current_choice = 0
print_list = [ ]
for field in fields:
if field.artificial:
continue
field_full_name = Explorer.guard_expr(expr) + "." + field.name
if field.is_base_class:
field_value = value.cast(field.type)
else:
field_value = value[field.name]
literal_value = ""
if type_code == gdb.TYPE_CODE_UNION:
literal_value = ("<Enter %d to explore this field of type "
"'%s'>" % (current_choice, str(field.type)))
has_explorable_fields = True
else:
if Explorer.is_scalar_type(field.type):
literal_value = ("%s .. (Value of type '%s')" %
(str(field_value), str(field.type)))
else:
if field.is_base_class:
field_desc = "base class"
else:
field_desc = "field"
literal_value = ("<Enter %d to explore this %s of type "
"'%s'>" %
(current_choice, field_desc,
str(field.type)))
has_explorable_fields = True
choice_to_compound_field_map[str(current_choice)] = (
field_full_name, field_value)
current_choice = current_choice + 1
print_list.append((field.name, literal_value))
CompoundExplorer._print_fields(print_list)
print ("")
if has_explorable_fields:
choice = raw_input("Enter the field number of choice: ")
if choice in choice_to_compound_field_map:
Explorer.explore_expr(choice_to_compound_field_map[choice][0],
choice_to_compound_field_map[choice][1],
True)
return True
else:
if is_child:
Explorer.return_to_parent_value()
else:
if is_child:
Explorer.return_to_parent_value_prompt()
return False
@staticmethod
def explore_type(name, datatype, is_child):
"""Function to explore struct/class and union types.
See Explorer.explore_type for more information.
"""
type_code = datatype.code
type_desc = ""
if type_code == gdb.TYPE_CODE_STRUCT:
type_desc = "struct/class"
else:
type_desc = "union"
fields = datatype.fields()
if CompoundExplorer._get_real_field_count(fields) == 0:
if is_child:
print ("%s is a %s of type '%s' with no fields." %
(name, type_desc, str(datatype)))
Explorer.return_to_enclosing_type_prompt()
else:
print ("'%s' is a %s with no fields." % (name, type_desc))
return False
if is_child:
print ("%s is a %s of type '%s' "
"with the following fields:\n" %
(name, type_desc, str(datatype)))
else:
print ("'%s' is a %s with the following "
"fields:\n" %
(name, type_desc))
has_explorable_fields = False
current_choice = 0
choice_to_compound_field_map = { }
print_list = [ ]
for field in fields:
if field.artificial:
continue
if field.is_base_class:
field_desc = "base class"
else:
field_desc = "field"
rhs = ("<Enter %d to explore this %s of type '%s'>" %
(current_choice, field_desc, str(field.type)))
print_list.append((field.name, rhs))
choice_to_compound_field_map[str(current_choice)] = (
field.name, field.type, field_desc)
current_choice = current_choice + 1
CompoundExplorer._print_fields(print_list)
print ("")
if len(choice_to_compound_field_map) > 0:
choice = raw_input("Enter the field number of choice: ")
if choice in choice_to_compound_field_map:
if is_child:
new_name = ("%s '%s' of %s" %
(choice_to_compound_field_map[choice][2],
choice_to_compound_field_map[choice][0],
name))
else:
new_name = ("%s '%s' of '%s'" %
(choice_to_compound_field_map[choice][2],
choice_to_compound_field_map[choice][0],
name))
Explorer.explore_type(new_name,
choice_to_compound_field_map[choice][1], True)
return True
else:
if is_child:
Explorer.return_to_enclosing_type()
else:
if is_child:
Explorer.return_to_enclosing_type_prompt()
return False
class TypedefExplorer(object):
"""Internal class used to explore values whose type is a typedef."""
@staticmethod
def explore_expr(expr, value, is_child):
"""Function to explore typedef values.
See Explorer.explore_expr for more information.
"""
actual_type = value.type.strip_typedefs()
print ("The value of '%s' is of type '%s' "
"which is a typedef of type '%s'" %
(expr, str(value.type), str(actual_type)))
Explorer.explore_expr(expr, value.cast(actual_type), is_child)
return False
@staticmethod
def explore_type(name, datatype, is_child):
"""Function to explore typedef types.
See Explorer.explore_type for more information.
"""
actual_type = datatype.strip_typedefs()
if is_child:
print ("The type of %s is a typedef of type '%s'." %
(name, str(actual_type)))
else:
print ("The type '%s' is a typedef of type '%s'." %
(name, str(actual_type)))
Explorer.explore_type(name, actual_type, is_child)
return False
class ExploreUtils(object):
"""Internal class which provides utilities for the main command classes."""
@staticmethod
def check_args(name, arg_str):
"""Utility to check if adequate number of arguments are passed to an
explore command.
Arguments:
name: The name of the explore command.
arg_str: The argument string passed to the explore command.
Returns:
True if adequate arguments are passed, false otherwise.
Raises:
gdb.GdbError if adequate arguments are not passed.
"""
if len(arg_str) < 1:
raise gdb.GdbError("ERROR: '%s' requires an argument."
% name)
return False
else:
return True
@staticmethod
def get_type_from_str(type_str):
"""A utility function to deduce the gdb.Type value from a string
representing the type.
Arguments:
type_str: The type string from which the gdb.Type value should be
deduced.
Returns:
The deduced gdb.Type value if possible, None otherwise.
"""
try:
# Assume the current language to be C/C++ and make a try.
return gdb.parse_and_eval("(%s *)0" % type_str).type.target()
except RuntimeError:
# If assumption of current language to be C/C++ was wrong, then
# lookup the type using the API.
try:
return gdb.lookup_type(type_str)
except RuntimeError:
return None
@staticmethod
def get_value_from_str(value_str):
"""A utility function to deduce the gdb.Value value from a string
representing the value.
Arguments:
value_str: The value string from which the gdb.Value value should
be deduced.
Returns:
The deduced gdb.Value value if possible, None otherwise.
"""
try:
return gdb.parse_and_eval(value_str)
except RuntimeError:
return None
class ExploreCommand(gdb.Command):
"""Explore a value or a type valid in the current context.
Usage:
explore ARG
- ARG is either a valid expression or a type name.
- At any stage of exploration, hit the return key (instead of a
choice, if any) to return to the enclosing type or value.
"""
def __init__(self):
super(ExploreCommand, self).__init__(name = "explore",
command_class = gdb.COMMAND_DATA,
prefix = True)
def invoke(self, arg_str, from_tty):
if ExploreUtils.check_args("explore", arg_str) == False:
return
# Check if it is a value
value = ExploreUtils.get_value_from_str(arg_str)
if value is not None:
Explorer.explore_expr(arg_str, value, False)
return
# If it is not a value, check if it is a type
datatype = ExploreUtils.get_type_from_str(arg_str)
if datatype is not None:
Explorer.explore_type(arg_str, datatype, False)
return
# If it is neither a value nor a type, raise an error.
raise gdb.GdbError(
("'%s' neither evaluates to a value nor is a type "
"in the current context." %
arg_str))
class ExploreValueCommand(gdb.Command):
"""Explore value of an expression valid in the current context.
Usage:
explore value ARG
- ARG is a valid expression.
- At any stage of exploration, hit the return key (instead of a
choice, if any) to return to the enclosing value.
"""
def __init__(self):
super(ExploreValueCommand, self).__init__(
name = "explore value", command_class = gdb.COMMAND_DATA)
def invoke(self, arg_str, from_tty):
if ExploreUtils.check_args("explore value", arg_str) == False:
return
value = ExploreUtils.get_value_from_str(arg_str)
if value is None:
raise gdb.GdbError(
(" '%s' does not evaluate to a value in the current "
"context." %
arg_str))
return
Explorer.explore_expr(arg_str, value, False)
class ExploreTypeCommand(gdb.Command):
"""Explore a type or the type of an expression valid in the current
context.
Usage:
explore type ARG
- ARG is a valid expression or a type name.
- At any stage of exploration, hit the return key (instead of a
choice, if any) to return to the enclosing type.
"""
def __init__(self):
super(ExploreTypeCommand, self).__init__(
name = "explore type", command_class = gdb.COMMAND_DATA)
def invoke(self, arg_str, from_tty):
if ExploreUtils.check_args("explore type", arg_str) == False:
return
datatype = ExploreUtils.get_type_from_str(arg_str)
if datatype is not None:
Explorer.explore_type(arg_str, datatype, False)
return
value = ExploreUtils.get_value_from_str(arg_str)
if value is not None:
print ("'%s' is of type '%s'." % (arg_str, str(value.type)))
Explorer.explore_type(str(value.type), value.type, False)
return
raise gdb.GdbError(("'%s' is not a type or value in the current "
"context." % arg_str))
Explorer.init_env()
ExploreCommand()
ExploreValueCommand()
ExploreTypeCommand()
| gpl-2.0 | 1,975,984,108,683,332,000 | 34.294737 | 79 | 0.540262 | false |
broferek/ansible | test/units/modules/network/fortios/test_fortios_system_sdn_connector.py | 21 | 19557 | # Copyright 2019 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <https://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
import pytest
from mock import ANY
from ansible.module_utils.network.fortios.fortios import FortiOSHandler
try:
from ansible.modules.network.fortios import fortios_system_sdn_connector
except ImportError:
pytest.skip("Could not load required modules for testing", allow_module_level=True)
@pytest.fixture(autouse=True)
def connection_mock(mocker):
connection_class_mock = mocker.patch('ansible.modules.network.fortios.fortios_system_sdn_connector.Connection')
return connection_class_mock
fos_instance = FortiOSHandler(connection_mock)
def test_system_sdn_connector_creation(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'system_sdn_connector': {
'access_key': 'test_value_3',
'azure_region': 'global',
'client_id': 'test_value_5',
'client_secret': 'test_value_6',
'compartment_id': 'test_value_7',
'gcp_project': 'test_value_8',
'key_passwd': 'test_value_9',
'login_endpoint': 'test_value_10',
'name': 'default_name_11',
'oci_cert': 'test_value_12',
'oci_fingerprint': 'test_value_13',
'oci_region': 'phoenix',
'password': 'test_value_15',
'private_key': 'test_value_16',
'region': 'test_value_17',
'resource_group': 'test_value_18',
'resource_url': 'test_value_19',
'secret_key': 'test_value_20',
'server': '192.168.100.21',
'server_port': '22',
'service_account': 'test_value_23',
'status': 'disable',
'subscription_id': 'test_value_25',
'tenant_id': 'test_value_26',
'type': 'aci',
'update_interval': '28',
'use_metadata_iam': 'disable',
'user_id': 'test_value_30',
'username': 'test_value_31',
'vpc_id': 'test_value_32'
},
'vdom': 'root'}
is_error, changed, response = fortios_system_sdn_connector.fortios_system(input_data, fos_instance)
expected_data = {
'access-key': 'test_value_3',
'azure-region': 'global',
'client-id': 'test_value_5',
'client-secret': 'test_value_6',
'compartment-id': 'test_value_7',
'gcp-project': 'test_value_8',
'key-passwd': 'test_value_9',
'login-endpoint': 'test_value_10',
'name': 'default_name_11',
'oci-cert': 'test_value_12',
'oci-fingerprint': 'test_value_13',
'oci-region': 'phoenix',
'password': 'test_value_15',
'private-key': 'test_value_16',
'region': 'test_value_17',
'resource-group': 'test_value_18',
'resource-url': 'test_value_19',
'secret-key': 'test_value_20',
'server': '192.168.100.21',
'server-port': '22',
'service-account': 'test_value_23',
'status': 'disable',
'subscription-id': 'test_value_25',
'tenant-id': 'test_value_26',
'type': 'aci',
'update-interval': '28',
'use-metadata-iam': 'disable',
'user-id': 'test_value_30',
'username': 'test_value_31',
'vpc-id': 'test_value_32'
}
set_method_mock.assert_called_with('system', 'sdn-connector', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
def test_system_sdn_connector_creation_fails(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'error', 'http_method': 'POST', 'http_status': 500}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'system_sdn_connector': {
'access_key': 'test_value_3',
'azure_region': 'global',
'client_id': 'test_value_5',
'client_secret': 'test_value_6',
'compartment_id': 'test_value_7',
'gcp_project': 'test_value_8',
'key_passwd': 'test_value_9',
'login_endpoint': 'test_value_10',
'name': 'default_name_11',
'oci_cert': 'test_value_12',
'oci_fingerprint': 'test_value_13',
'oci_region': 'phoenix',
'password': 'test_value_15',
'private_key': 'test_value_16',
'region': 'test_value_17',
'resource_group': 'test_value_18',
'resource_url': 'test_value_19',
'secret_key': 'test_value_20',
'server': '192.168.100.21',
'server_port': '22',
'service_account': 'test_value_23',
'status': 'disable',
'subscription_id': 'test_value_25',
'tenant_id': 'test_value_26',
'type': 'aci',
'update_interval': '28',
'use_metadata_iam': 'disable',
'user_id': 'test_value_30',
'username': 'test_value_31',
'vpc_id': 'test_value_32'
},
'vdom': 'root'}
is_error, changed, response = fortios_system_sdn_connector.fortios_system(input_data, fos_instance)
expected_data = {
'access-key': 'test_value_3',
'azure-region': 'global',
'client-id': 'test_value_5',
'client-secret': 'test_value_6',
'compartment-id': 'test_value_7',
'gcp-project': 'test_value_8',
'key-passwd': 'test_value_9',
'login-endpoint': 'test_value_10',
'name': 'default_name_11',
'oci-cert': 'test_value_12',
'oci-fingerprint': 'test_value_13',
'oci-region': 'phoenix',
'password': 'test_value_15',
'private-key': 'test_value_16',
'region': 'test_value_17',
'resource-group': 'test_value_18',
'resource-url': 'test_value_19',
'secret-key': 'test_value_20',
'server': '192.168.100.21',
'server-port': '22',
'service-account': 'test_value_23',
'status': 'disable',
'subscription-id': 'test_value_25',
'tenant-id': 'test_value_26',
'type': 'aci',
'update-interval': '28',
'use-metadata-iam': 'disable',
'user-id': 'test_value_30',
'username': 'test_value_31',
'vpc-id': 'test_value_32'
}
set_method_mock.assert_called_with('system', 'sdn-connector', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 500
def test_system_sdn_connector_removal(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
delete_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
delete_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.delete', return_value=delete_method_result)
input_data = {
'username': 'admin',
'state': 'absent',
'system_sdn_connector': {
'access_key': 'test_value_3',
'azure_region': 'global',
'client_id': 'test_value_5',
'client_secret': 'test_value_6',
'compartment_id': 'test_value_7',
'gcp_project': 'test_value_8',
'key_passwd': 'test_value_9',
'login_endpoint': 'test_value_10',
'name': 'default_name_11',
'oci_cert': 'test_value_12',
'oci_fingerprint': 'test_value_13',
'oci_region': 'phoenix',
'password': 'test_value_15',
'private_key': 'test_value_16',
'region': 'test_value_17',
'resource_group': 'test_value_18',
'resource_url': 'test_value_19',
'secret_key': 'test_value_20',
'server': '192.168.100.21',
'server_port': '22',
'service_account': 'test_value_23',
'status': 'disable',
'subscription_id': 'test_value_25',
'tenant_id': 'test_value_26',
'type': 'aci',
'update_interval': '28',
'use_metadata_iam': 'disable',
'user_id': 'test_value_30',
'username': 'test_value_31',
'vpc_id': 'test_value_32'
},
'vdom': 'root'}
is_error, changed, response = fortios_system_sdn_connector.fortios_system(input_data, fos_instance)
delete_method_mock.assert_called_with('system', 'sdn-connector', mkey=ANY, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
def test_system_sdn_connector_deletion_fails(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
delete_method_result = {'status': 'error', 'http_method': 'POST', 'http_status': 500}
delete_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.delete', return_value=delete_method_result)
input_data = {
'username': 'admin',
'state': 'absent',
'system_sdn_connector': {
'access_key': 'test_value_3',
'azure_region': 'global',
'client_id': 'test_value_5',
'client_secret': 'test_value_6',
'compartment_id': 'test_value_7',
'gcp_project': 'test_value_8',
'key_passwd': 'test_value_9',
'login_endpoint': 'test_value_10',
'name': 'default_name_11',
'oci_cert': 'test_value_12',
'oci_fingerprint': 'test_value_13',
'oci_region': 'phoenix',
'password': 'test_value_15',
'private_key': 'test_value_16',
'region': 'test_value_17',
'resource_group': 'test_value_18',
'resource_url': 'test_value_19',
'secret_key': 'test_value_20',
'server': '192.168.100.21',
'server_port': '22',
'service_account': 'test_value_23',
'status': 'disable',
'subscription_id': 'test_value_25',
'tenant_id': 'test_value_26',
'type': 'aci',
'update_interval': '28',
'use_metadata_iam': 'disable',
'user_id': 'test_value_30',
'username': 'test_value_31',
'vpc_id': 'test_value_32'
},
'vdom': 'root'}
is_error, changed, response = fortios_system_sdn_connector.fortios_system(input_data, fos_instance)
delete_method_mock.assert_called_with('system', 'sdn-connector', mkey=ANY, vdom='root')
schema_method_mock.assert_not_called()
assert is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 500
def test_system_sdn_connector_idempotent(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'error', 'http_method': 'DELETE', 'http_status': 404}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'system_sdn_connector': {
'access_key': 'test_value_3',
'azure_region': 'global',
'client_id': 'test_value_5',
'client_secret': 'test_value_6',
'compartment_id': 'test_value_7',
'gcp_project': 'test_value_8',
'key_passwd': 'test_value_9',
'login_endpoint': 'test_value_10',
'name': 'default_name_11',
'oci_cert': 'test_value_12',
'oci_fingerprint': 'test_value_13',
'oci_region': 'phoenix',
'password': 'test_value_15',
'private_key': 'test_value_16',
'region': 'test_value_17',
'resource_group': 'test_value_18',
'resource_url': 'test_value_19',
'secret_key': 'test_value_20',
'server': '192.168.100.21',
'server_port': '22',
'service_account': 'test_value_23',
'status': 'disable',
'subscription_id': 'test_value_25',
'tenant_id': 'test_value_26',
'type': 'aci',
'update_interval': '28',
'use_metadata_iam': 'disable',
'user_id': 'test_value_30',
'username': 'test_value_31',
'vpc_id': 'test_value_32'
},
'vdom': 'root'}
is_error, changed, response = fortios_system_sdn_connector.fortios_system(input_data, fos_instance)
expected_data = {
'access-key': 'test_value_3',
'azure-region': 'global',
'client-id': 'test_value_5',
'client-secret': 'test_value_6',
'compartment-id': 'test_value_7',
'gcp-project': 'test_value_8',
'key-passwd': 'test_value_9',
'login-endpoint': 'test_value_10',
'name': 'default_name_11',
'oci-cert': 'test_value_12',
'oci-fingerprint': 'test_value_13',
'oci-region': 'phoenix',
'password': 'test_value_15',
'private-key': 'test_value_16',
'region': 'test_value_17',
'resource-group': 'test_value_18',
'resource-url': 'test_value_19',
'secret-key': 'test_value_20',
'server': '192.168.100.21',
'server-port': '22',
'service-account': 'test_value_23',
'status': 'disable',
'subscription-id': 'test_value_25',
'tenant-id': 'test_value_26',
'type': 'aci',
'update-interval': '28',
'use-metadata-iam': 'disable',
'user-id': 'test_value_30',
'username': 'test_value_31',
'vpc-id': 'test_value_32'
}
set_method_mock.assert_called_with('system', 'sdn-connector', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 404
def test_system_sdn_connector_filter_foreign_attributes(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'system_sdn_connector': {
'random_attribute_not_valid': 'tag',
'access_key': 'test_value_3',
'azure_region': 'global',
'client_id': 'test_value_5',
'client_secret': 'test_value_6',
'compartment_id': 'test_value_7',
'gcp_project': 'test_value_8',
'key_passwd': 'test_value_9',
'login_endpoint': 'test_value_10',
'name': 'default_name_11',
'oci_cert': 'test_value_12',
'oci_fingerprint': 'test_value_13',
'oci_region': 'phoenix',
'password': 'test_value_15',
'private_key': 'test_value_16',
'region': 'test_value_17',
'resource_group': 'test_value_18',
'resource_url': 'test_value_19',
'secret_key': 'test_value_20',
'server': '192.168.100.21',
'server_port': '22',
'service_account': 'test_value_23',
'status': 'disable',
'subscription_id': 'test_value_25',
'tenant_id': 'test_value_26',
'type': 'aci',
'update_interval': '28',
'use_metadata_iam': 'disable',
'user_id': 'test_value_30',
'username': 'test_value_31',
'vpc_id': 'test_value_32'
},
'vdom': 'root'}
is_error, changed, response = fortios_system_sdn_connector.fortios_system(input_data, fos_instance)
expected_data = {
'access-key': 'test_value_3',
'azure-region': 'global',
'client-id': 'test_value_5',
'client-secret': 'test_value_6',
'compartment-id': 'test_value_7',
'gcp-project': 'test_value_8',
'key-passwd': 'test_value_9',
'login-endpoint': 'test_value_10',
'name': 'default_name_11',
'oci-cert': 'test_value_12',
'oci-fingerprint': 'test_value_13',
'oci-region': 'phoenix',
'password': 'test_value_15',
'private-key': 'test_value_16',
'region': 'test_value_17',
'resource-group': 'test_value_18',
'resource-url': 'test_value_19',
'secret-key': 'test_value_20',
'server': '192.168.100.21',
'server-port': '22',
'service-account': 'test_value_23',
'status': 'disable',
'subscription-id': 'test_value_25',
'tenant-id': 'test_value_26',
'type': 'aci',
'update-interval': '28',
'use-metadata-iam': 'disable',
'user-id': 'test_value_30',
'username': 'test_value_31',
'vpc-id': 'test_value_32'
}
set_method_mock.assert_called_with('system', 'sdn-connector', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
| gpl-3.0 | -3,041,521,662,658,648,000 | 38.993865 | 142 | 0.537301 | false |
ztp-at/RKSV | librksv/test/verification_proxy.py | 1 | 2681 | ###########################################################################
# Copyright 2017 ZT Prentner IT GmbH (www.ztp.at)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
###########################################################################
from ..gettext_helper import _
class RKSVVerificationProxyI(object):
def verify(self, fd, keyStore, aesKey, inState, registerIdx, chunksize):
raise NotImplementedError("Please implement this yourself.")
from sys import version_info
if version_info[0] < 3:
import __builtin__
else:
import builtins as __builtin__
from .. import depparser
from .. import key_store
from .. import receipt
from .. import verification_state
from .. import verify
from .. import verify_receipt
class LibRKSVVerificationProxy(RKSVVerificationProxyI):
def __init__(self, pool, nprocs):
self.pool = pool
self.nprocs = nprocs
def verify(self, fd, keyStore, aesKey, inState, registerIdx, chunksize):
# Save the _() function.
trvec = (
__builtin__._ ,
depparser._,
key_store._,
receipt._,
verification_state._,
verify._,
verify_receipt._,
)
# Temporarily disable translations to make sure error
# messages match.
(
__builtin__._ ,
depparser._,
key_store._,
receipt._,
verification_state._,
verify._,
verify_receipt._,
) = [lambda x: x] * len(trvec)
try:
parser = depparser.IncrementalDEPParser.fromFd(fd, True)
outState = verify.verifyParsedDEP(parser, keyStore, aesKey, inState,
registerIdx, self.pool, self.nprocs, chunksize)
finally:
(
__builtin__._ ,
depparser._,
key_store._,
receipt._,
verification_state._,
verify._,
verify_receipt._,
) = trvec
return outState
| agpl-3.0 | 4,877,088,742,678,757,000 | 32.5125 | 80 | 0.568445 | false |
zmwangx/you-get | src/you_get/extractors/qq_egame.py | 2 | 1652 | import re
import json
from ..common import get_content
from ..extractors import VideoExtractor
from ..util import log
from ..util.strings import unescape_html
__all__ = ['qq_egame_download']
class QQEgame(VideoExtractor):
stream_types = [
{'id': 'original', 'video_profile': '0', 'container': 'flv'},
{'id': '900', 'video_profile': '900kb/s', 'container': 'flv'},
{'id': '550', 'video_profile': '550kb/s', 'container': 'flv'}
]
name = 'QQEgame'
def prepare(self, **kwargs):
page = get_content(self.url)
server_data = re.search(r'serverData\s*=\s*({.+?});', page)
if server_data is None:
log.wtf('cannot find server_data')
json_data = json.loads(server_data.group(1))
live_info = json_data['liveInfo']['data']
self.title = '{}_{}'.format(live_info['profileInfo']['nickName'], live_info['videoInfo']['title'])
for exsited_stream in live_info['videoInfo']['streamInfos']:
for s in self.__class__.stream_types:
if re.search(r'(\d+)', s['video_profile']).group(1) == exsited_stream['bitrate']:
current_stream_id = s['id']
stream_info = dict(src=[unescape_html(exsited_stream['playUrl'])])
stream_info['video_profile'] = exsited_stream['desc']
stream_info['container'] = s['container']
stream_info['size'] = float('inf')
self.streams[current_stream_id] = stream_info
def qq_egame_download(url, **kwargs):
QQEgame().download_by_url(url, **kwargs)
# url dispatching has been done in qq.py
| mit | -184,731,142,138,083,970 | 39.292683 | 106 | 0.573245 | false |
joefutrelle/domdb | utils.py | 1 | 1859 | def rpad(s,l,pad_string=' '):
return s + (pad_string * (l - len(s)))
def asciitable(dicts,disp_cols=None,none_msg=None,border=True):
"""produce an ASCII formatted columnar table from the dicts"""
dicts = list(dicts)
if not dicts:
if none_msg is not None:
yield none_msg
return
if disp_cols is not None:
cols = disp_cols
else:
# set of all keys in dicts
cols = sorted(list(set(reduce(lambda x,y: x+y, [d.keys() for d in dicts]))))
# compute col widths. initially wide enough for the column label
widths = dict([(col,len(col)) for col in cols])
# now create rows, and in doing so compute max width of each column
for row in list(dicts):
for col in cols:
try:
width = len(str(row[col]))
except KeyError:
width = 0
if width > widths[col]:
widths[col] = width
def bord(line,border_char='|',pad_char=' '):
if border:
return border_char + pad_char + line + pad_char + border_char
else:
return line
# now print rows
spacer = bord('-+-'.join(['-' * widths[col] for col in cols]),'+','-')
if border:
yield spacer
yield bord(' | '.join([rpad(col,widths[col]) for col in cols]),'|')
yield spacer
for row in dicts:
yield bord(' | '.join([rpad(str(row[col]),widths[col]) for col in cols]),'|')
if border:
yield spacer
def resultproxy2asciitable(r,empty_message='No rows'):
"""yields an asciitable representation of an SQLAlchemy ResultProxy"""
cols = []
row_proxies = r.fetchall()
rows = []
for r in row_proxies:
if not cols:
cols = r.keys()
rows.append(dict(r.items()))
for line in asciitable(rows,cols,empty_message):
print line
| mit | -3,412,372,783,714,532,000 | 34.075472 | 85 | 0.568047 | false |
drnextgis/QGIS | python/plugins/processing/algs/grass7/Grass7Utils.py | 1 | 16129 | # -*- coding: utf-8 -*-
"""
***************************************************************************
GrassUtils.py
---------------------
Date : February 2015
Copyright : (C) 2014-2015 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
from builtins import str
from builtins import object
__author__ = 'Victor Olaya'
__date__ = 'February 2015'
__copyright__ = '(C) 2014-2015, Victor Olaya'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import stat
import shutil
import subprocess
import os
from qgis.core import QgsApplication
from qgis.PyQt.QtCore import QCoreApplication
from processing.core.ProcessingConfig import ProcessingConfig
from processing.core.ProcessingLog import ProcessingLog
from processing.tools.system import userFolder, isWindows, isMac, tempFolder, mkdir
from processing.tests.TestData import points
class Grass7Utils(object):
GRASS_REGION_XMIN = 'GRASS7_REGION_XMIN'
GRASS_REGION_YMIN = 'GRASS7_REGION_YMIN'
GRASS_REGION_XMAX = 'GRASS7_REGION_XMAX'
GRASS_REGION_YMAX = 'GRASS7_REGION_YMAX'
GRASS_REGION_CELLSIZE = 'GRASS7_REGION_CELLSIZE'
GRASS_FOLDER = 'GRASS7_FOLDER'
GRASS_LOG_COMMANDS = 'GRASS7_LOG_COMMANDS'
GRASS_LOG_CONSOLE = 'GRASS7_LOG_CONSOLE'
sessionRunning = False
sessionLayers = {}
projectionSet = False
isGrass7Installed = False
@staticmethod
def grassBatchJobFilename():
'''This is used in Linux. This is the batch job that we assign to
GRASS_BATCH_JOB and then call GRASS and let it do the work
'''
filename = 'grass7_batch_job.sh'
batchfile = os.path.join(userFolder(), filename)
return batchfile
@staticmethod
def grassScriptFilename():
'''This is used in windows. We create a script that initializes
GRASS and then uses grass commands
'''
filename = 'grass7_script.bat'
filename = os.path.join(userFolder(), filename)
return filename
@staticmethod
def getGrassVersion():
# FIXME: I do not know if this should be removed or let the user enter it
# or something like that... This is just a temporary thing
return '7.0.0'
@staticmethod
def grassPath():
if not isWindows() and not isMac():
return ''
folder = ProcessingConfig.getSetting(Grass7Utils.GRASS_FOLDER) or ''
if not os.path.exists(folder):
folder = None
if folder is None:
if isWindows():
if "OSGEO4W_ROOT" in os.environ:
testfolder = os.path.join(str(os.environ['OSGEO4W_ROOT']), "apps")
else:
testfolder = str(QgsApplication.prefixPath())
testfolder = os.path.join(testfolder, 'grass')
if os.path.isdir(testfolder):
for subfolder in os.listdir(testfolder):
if subfolder.startswith('grass-7'):
folder = os.path.join(testfolder, subfolder)
break
else:
folder = os.path.join(str(QgsApplication.prefixPath()), 'grass7')
if not os.path.isdir(folder):
folder = '/Applications/GRASS-7.0.app/Contents/MacOS'
return folder or ''
@staticmethod
def grassDescriptionPath():
return os.path.join(os.path.dirname(__file__), 'description')
@staticmethod
def createGrass7Script(commands):
folder = Grass7Utils.grassPath()
script = Grass7Utils.grassScriptFilename()
gisrc = os.path.join(userFolder(), 'processing.gisrc7') # FIXME: use temporary file
# Temporary gisrc file
with open(gisrc, 'w') as output:
location = 'temp_location'
gisdbase = Grass7Utils.grassDataFolder()
output.write('GISDBASE: ' + gisdbase + '\n')
output.write('LOCATION_NAME: ' + location + '\n')
output.write('MAPSET: PERMANENT \n')
output.write('GRASS_GUI: text\n')
with open(script, 'w') as output:
output.write('set HOME=' + os.path.expanduser('~') + '\n')
output.write('set GISRC=' + gisrc + '\n')
output.write('set WINGISBASE=' + folder + '\n')
output.write('set GISBASE=' + folder + '\n')
output.write('set GRASS_PROJSHARE=' + os.path.join(folder, 'share', 'proj') + '\n')
output.write('set GRASS_MESSAGE_FORMAT=plain\n')
# Replacement code for etc/Init.bat
output.write('if "%GRASS_ADDON_PATH%"=="" set PATH=%WINGISBASE%\\bin;%WINGISBASE%\\lib;%PATH%\n')
output.write('if not "%GRASS_ADDON_PATH%"=="" set PATH=%WINGISBASE%\\bin;%WINGISBASE%\\lib;%GRASS_ADDON_PATH%;%PATH%\n')
output.write('\n')
output.write('set GRASS_VERSION=' + Grass7Utils.getGrassVersion() + '\n')
output.write('if not "%LANG%"=="" goto langset\n')
output.write('FOR /F "usebackq delims==" %%i IN (`"%WINGISBASE%\\etc\\winlocale"`) DO @set LANG=%%i\n')
output.write(':langset\n')
output.write('\n')
output.write('set PATHEXT=%PATHEXT%;.PY\n')
output.write('set PYTHONPATH=%PYTHONPATH%;%WINGISBASE%\\etc\\python;%WINGISBASE%\\etc\\wxpython\\n')
output.write('\n')
output.write('g.gisenv.exe set="MAPSET=PERMANENT"\n')
output.write('g.gisenv.exe set="LOCATION=' + location + '"\n')
output.write('g.gisenv.exe set="LOCATION_NAME=' + location + '"\n')
output.write('g.gisenv.exe set="GISDBASE=' + gisdbase + '"\n')
output.write('g.gisenv.exe set="GRASS_GUI=text"\n')
for command in commands:
Grass7Utils.writeCommand(output, command)
output.write('\n')
output.write('exit\n')
@staticmethod
def createGrass7BatchJobFileFromGrass7Commands(commands):
with open(Grass7Utils.grassBatchJobFilename(), 'w') as fout:
for command in commands:
Grass7Utils.writeCommand(fout, command)
fout.write('exit')
@staticmethod
def grassMapsetFolder():
folder = os.path.join(Grass7Utils.grassDataFolder(), 'temp_location')
mkdir(folder)
return folder
@staticmethod
def grassDataFolder():
tempfolder = os.path.join(tempFolder(), 'grassdata')
mkdir(tempfolder)
return tempfolder
@staticmethod
def createTempMapset():
'''Creates a temporary location and mapset(s) for GRASS data
processing. A minimal set of folders and files is created in the
system's default temporary directory. The settings files are
written with sane defaults, so GRASS can do its work. The mapset
projection will be set later, based on the projection of the first
input image or vector
'''
folder = Grass7Utils.grassMapsetFolder()
mkdir(os.path.join(folder, 'PERMANENT'))
mkdir(os.path.join(folder, 'PERMANENT', '.tmp'))
Grass7Utils.writeGrass7Window(os.path.join(folder, 'PERMANENT', 'DEFAULT_WIND'))
with open(os.path.join(folder, 'PERMANENT', 'MYNAME'), 'w') as outfile:
outfile.write(
'QGIS GRASS GIS 7 interface: temporary data processing location.\n')
Grass7Utils.writeGrass7Window(os.path.join(folder, 'PERMANENT', 'WIND'))
mkdir(os.path.join(folder, 'PERMANENT', 'sqlite'))
with open(os.path.join(folder, 'PERMANENT', 'VAR'), 'w') as outfile:
outfile.write('DB_DRIVER: sqlite\n')
outfile.write('DB_DATABASE: $GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db\n')
@staticmethod
def writeGrass7Window(filename):
with open(filename, 'w') as out:
out.write('proj: 0\n')
out.write('zone: 0\n')
out.write('north: 1\n')
out.write('south: 0\n')
out.write('east: 1\n')
out.write('west: 0\n')
out.write('cols: 1\n')
out.write('rows: 1\n')
out.write('e-w resol: 1\n')
out.write('n-s resol: 1\n')
out.write('top: 1\n')
out.write('bottom: 0\n')
out.write('cols3: 1\n')
out.write('rows3: 1\n')
out.write('depths: 1\n')
out.write('e-w resol3: 1\n')
out.write('n-s resol3: 1\n')
out.write('t-b resol: 1\n')
@staticmethod
def prepareGrass7Execution(commands):
env = os.environ.copy()
if isWindows():
Grass7Utils.createGrass7Script(commands)
command = ['cmd.exe', '/C ', Grass7Utils.grassScriptFilename()]
else:
gisrc = os.path.join(userFolder(), 'processing.gisrc7')
env['GISRC'] = gisrc
env['GRASS_MESSAGE_FORMAT'] = 'plain'
env['GRASS_BATCH_JOB'] = Grass7Utils.grassBatchJobFilename()
if 'GISBASE' in env:
del env['GISBASE']
Grass7Utils.createGrass7BatchJobFileFromGrass7Commands(commands)
os.chmod(Grass7Utils.grassBatchJobFilename(), stat.S_IEXEC
| stat.S_IREAD | stat.S_IWRITE)
if isMac() and os.path.exists(os.path.join(Grass7Utils.grassPath(), 'grass.sh')):
command = os.path.join(Grass7Utils.grassPath(), 'grass.sh') + ' ' \
+ os.path.join(Grass7Utils.grassMapsetFolder(), 'PERMANENT')
else:
command = 'grass70 ' + os.path.join(Grass7Utils.grassMapsetFolder(), 'PERMANENT')
return command, env
@staticmethod
def executeGrass7(commands, progress, outputCommands=None):
loglines = []
loglines.append(Grass7Utils.tr('GRASS GIS 7 execution console output'))
grassOutDone = False
command, grassenv = Grass7Utils.prepareGrass7Execution(commands)
with subprocess.Popen(
command,
shell=True,
stdout=subprocess.PIPE,
stdin=subprocess.DEVNULL,
stderr=subprocess.STDOUT,
universal_newlines=True,
env=grassenv
) as proc:
for line in iter(proc.stdout.readline, ''):
if 'GRASS_INFO_PERCENT' in line:
try:
progress.setPercentage(int(line[len('GRASS_INFO_PERCENT') + 2:]))
except:
pass
else:
if 'r.out' in line or 'v.out' in line:
grassOutDone = True
loglines.append(line)
progress.setConsoleInfo(line)
# Some GRASS scripts, like r.mapcalculator or r.fillnulls, call
# other GRASS scripts during execution. This may override any
# commands that are still to be executed by the subprocess, which
# are usually the output ones. If that is the case runs the output
# commands again.
if not grassOutDone and outputCommands:
command, grassenv = Grass7Utils.prepareGrass7Execution(outputCommands)
with subprocess.Popen(
command,
shell=True,
stdout=subprocess.PIPE,
stdin=subprocess.DEVNULL,
stderr=subprocess.STDOUT,
universal_newlines=True,
env=grassenv
) as proc:
for line in iter(proc.stdout.readline, ''):
if 'GRASS_INFO_PERCENT' in line:
try:
progress.setPercentage(int(
line[len('GRASS_INFO_PERCENT') + 2:]))
except:
pass
else:
loglines.append(line)
progress.setConsoleInfo(line)
if ProcessingConfig.getSetting(Grass7Utils.GRASS_LOG_CONSOLE):
ProcessingLog.addToLog(ProcessingLog.LOG_INFO, loglines)
# GRASS session is used to hold the layers already exported or
# produced in GRASS between multiple calls to GRASS algorithms.
# This way they don't have to be loaded multiple times and
# following algorithms can use the results of the previous ones.
# Starting a session just involves creating the temp mapset
# structure
@staticmethod
def startGrass7Session():
if not Grass7Utils.sessionRunning:
Grass7Utils.createTempMapset()
Grass7Utils.sessionRunning = True
# End session by removing the temporary GRASS mapset and all
# the layers.
@staticmethod
def endGrass7Session():
shutil.rmtree(Grass7Utils.grassMapsetFolder(), True)
Grass7Utils.sessionRunning = False
Grass7Utils.sessionLayers = {}
Grass7Utils.projectionSet = False
@staticmethod
def getSessionLayers():
return Grass7Utils.sessionLayers
@staticmethod
def addSessionLayers(exportedLayers):
Grass7Utils.sessionLayers = dict(
list(Grass7Utils.sessionLayers.items())
+ list(exportedLayers.items()))
@staticmethod
def checkGrass7IsInstalled(ignorePreviousState=False):
if isWindows():
path = Grass7Utils.grassPath()
if path == '':
return Grass7Utils.tr(
'GRASS GIS 7 folder is not configured. Please configure '
'it before running GRASS GIS 7 algorithms.')
cmdpath = os.path.join(path, 'bin', 'r.out.gdal.exe')
if not os.path.exists(cmdpath):
return Grass7Utils.tr(
'The specified GRASS 7 folder "{}" does not contain '
'a valid set of GRASS 7 modules.\nPlease, go to the '
'Processing settings dialog, and check that the '
'GRASS 7\nfolder is correctly configured'.format(os.path.join(path, 'bin')))
if not ignorePreviousState:
if Grass7Utils.isGrass7Installed:
return
try:
from processing import runalg
result = runalg(
'grass7:v.voronoi',
points(),
False,
False,
None,
-1,
0.0001,
0,
None,
)
if not os.path.exists(result['output']):
return Grass7Utils.tr(
'It seems that GRASS GIS 7 is not correctly installed and '
'configured in your system.\nPlease install it before '
'running GRASS GIS 7 algorithms.')
except:
return Grass7Utils.tr(
'Error while checking GRASS GIS 7 installation. GRASS GIS 7 '
'might not be correctly configured.\n')
Grass7Utils.isGrass7Installed = True
@staticmethod
def tr(string, context=''):
if context == '':
context = 'Grass7Utils'
return QCoreApplication.translate(context, string)
@staticmethod
def writeCommand(output, command):
try:
# Python 2
output.write(command.encode('utf8') + '\n')
except TypeError:
# Python 3
output.write(command + '\n')
| gpl-2.0 | -5,789,570,239,604,929,000 | 39.627204 | 132 | 0.561473 | false |
leki75/ansible | lib/ansible/modules/cloud/rackspace/rax_cbs_attachments.py | 14 | 6902 | #!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# This is a DOCUMENTATION stub specific to this module, it extends
# a documentation fragment located in ansible.utils.module_docs_fragments
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: rax_cbs_attachments
short_description: Manipulate Rackspace Cloud Block Storage Volume Attachments
description:
- Manipulate Rackspace Cloud Block Storage Volume Attachments
version_added: 1.6
options:
device:
description:
- The device path to attach the volume to, e.g. /dev/xvde.
- Before 2.4 this was a required field. Now it can be left to null to auto assign the device name.
default: null
required: false
volume:
description:
- Name or id of the volume to attach/detach
default: null
required: true
server:
description:
- Name or id of the server to attach/detach
default: null
required: true
state:
description:
- Indicate desired state of the resource
choices:
- present
- absent
default: present
required: true
wait:
description:
- wait for the volume to be in 'in-use'/'available' state before returning
default: "no"
choices:
- "yes"
- "no"
wait_timeout:
description:
- how long before wait gives up, in seconds
default: 300
author:
- "Christopher H. Laco (@claco)"
- "Matt Martz (@sivel)"
extends_documentation_fragment: rackspace.openstack
'''
EXAMPLES = '''
- name: Attach a Block Storage Volume
gather_facts: False
hosts: local
connection: local
tasks:
- name: Storage volume attach request
local_action:
module: rax_cbs_attachments
credentials: ~/.raxpub
volume: my-volume
server: my-server
device: /dev/xvdd
region: DFW
wait: yes
state: present
register: my_volume
'''
try:
import pyrax
HAS_PYRAX = True
except ImportError:
HAS_PYRAX = False
def cloud_block_storage_attachments(module, state, volume, server, device,
wait, wait_timeout):
cbs = pyrax.cloud_blockstorage
cs = pyrax.cloudservers
if cbs is None or cs is None:
module.fail_json(msg='Failed to instantiate client. This '
'typically indicates an invalid region or an '
'incorrectly capitalized region name.')
changed = False
instance = {}
volume = rax_find_volume(module, pyrax, volume)
if not volume:
module.fail_json(msg='No matching storage volumes were found')
if state == 'present':
server = rax_find_server(module, pyrax, server)
if (volume.attachments and
volume.attachments[0]['server_id'] == server.id):
changed = False
elif volume.attachments:
module.fail_json(msg='Volume is attached to another server')
else:
try:
volume.attach_to_instance(server, mountpoint=device)
changed = True
except Exception as e:
module.fail_json(msg='%s' % e.message)
volume.get()
for key, value in vars(volume).items():
if (isinstance(value, NON_CALLABLES) and
not key.startswith('_')):
instance[key] = value
result = dict(changed=changed)
if volume.status == 'error':
result['msg'] = '%s failed to build' % volume.id
elif wait:
attempts = wait_timeout / 5
pyrax.utils.wait_until(volume, 'status', 'in-use',
interval=5, attempts=attempts)
volume.get()
result['volume'] = rax_to_dict(volume)
if 'msg' in result:
module.fail_json(**result)
else:
module.exit_json(**result)
elif state == 'absent':
server = rax_find_server(module, pyrax, server)
if (volume.attachments and
volume.attachments[0]['server_id'] == server.id):
try:
volume.detach()
if wait:
pyrax.utils.wait_until(volume, 'status', 'available',
interval=3, attempts=0,
verbose=False)
changed = True
except Exception as e:
module.fail_json(msg='%s' % e.message)
volume.get()
changed = True
elif volume.attachments:
module.fail_json(msg='Volume is attached to another server')
result = dict(changed=changed, volume=rax_to_dict(volume))
if volume.status == 'error':
result['msg'] = '%s failed to build' % volume.id
if 'msg' in result:
module.fail_json(**result)
else:
module.exit_json(**result)
module.exit_json(changed=changed, volume=instance)
def main():
argument_spec = rax_argument_spec()
argument_spec.update(
dict(
device=dict(required=False),
volume=dict(required=True),
server=dict(required=True),
state=dict(default='present', choices=['present', 'absent']),
wait=dict(type='bool', default=False),
wait_timeout=dict(type='int', default=300)
)
)
module = AnsibleModule(
argument_spec=argument_spec,
required_together=rax_required_together()
)
if not HAS_PYRAX:
module.fail_json(msg='pyrax is required for this module')
device = module.params.get('device')
volume = module.params.get('volume')
server = module.params.get('server')
state = module.params.get('state')
wait = module.params.get('wait')
wait_timeout = module.params.get('wait_timeout')
setup_rax_module(module, pyrax)
cloud_block_storage_attachments(module, state, volume, server, device,
wait, wait_timeout)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.rax import *
### invoke the module
if __name__ == '__main__':
main()
| gpl-3.0 | -1,151,460,400,165,906,600 | 29.139738 | 104 | 0.597073 | false |
philotas/enigma2 | lib/python/Components/Converter/ServicePosition.py | 13 | 21710 | from Converter import Converter
from Poll import Poll
from enigma import iPlayableService
from Components.Element import cached, ElementError
from Components.config import config
class ServicePosition(Poll, Converter, object):
TYPE_LENGTH = 0
TYPE_POSITION = 1
TYPE_REMAINING = 2
TYPE_GAUGE = 3
TYPE_SUMMARY = 4
TYPE_VFD_LENGTH = 5
TYPE_VFD_POSITION = 6
TYPE_VFD_REMAINING = 7
TYPE_VFD_GAUGE = 8
TYPE_VFD_SUMMARY = 9
def __init__(self, type):
Poll.__init__(self)
Converter.__init__(self, type)
args = type.split(',')
type = args.pop(0)
self.negate = 'Negate' in args
self.detailed = 'Detailed' in args
self.showHours = 'ShowHours' in args
self.showNoSeconds = 'ShowNoSeconds' in args
self.OnlyMinute = 'OnlyMinute' in args
if type == "Length":
self.type = self.TYPE_LENGTH
elif type == "Position":
self.type = self.TYPE_POSITION
elif type == "Remaining":
self.type = self.TYPE_REMAINING
elif type == "Gauge":
self.type = self.TYPE_GAUGE
elif type == "Summary":
self.type = self.TYPE_SUMMARY
elif type == "VFDLength":
self.type = self.TYPE_VFD_LENGTH
elif type == "VFDPosition":
self.type = self.TYPE_VFD_POSITION
elif type == "VFDRemaining":
self.type = self.TYPE_VFD_REMAINING
elif type == "VFDGauge":
self.type = self.TYPE_VFD_GAUGE
elif type == "VFDSummary":
self.type = self.TYPE_VFD_SUMMARY
else:
raise ElementError("type must be {Length|Position|Remaining|Gauge|Summary} with optional arguments {Negate|Detailed|ShowHours|ShowNoSeconds} for ServicePosition converter")
if self.detailed:
self.poll_interval = 100
elif self.type == self.TYPE_LENGTH or self.type == self.TYPE_VFD_LENGTH:
self.poll_interval = 2000
else:
self.poll_interval = 500
self.poll_enabled = True
def getSeek(self):
s = self.source.service
return s and s.seek()
@cached
def getPosition(self):
seek = self.getSeek()
if seek is None:
return None
pos = seek.getPlayPosition()
if pos[0]:
return 0
return pos[1]
@cached
def getLength(self):
seek = self.getSeek()
if seek is None:
return None
length = seek.getLength()
if length[0]:
return 0
return length[1]
@cached
def getCutlist(self):
service = self.source.service
cue = service and service.cueSheet()
return cue and cue.getCutList()
@cached
def getText(self):
seek = self.getSeek()
if seek is None:
return ""
if self.type == self.TYPE_SUMMARY or self.type == self.TYPE_SUMMARY:
s = self.position / 90000
e = (self.length / 90000) - s
return "%02d:%02d +%2dm" % (s/60, s%60, e/60)
l = self.length
p = self.position
r = self.length - self.position # Remaining
if l < 0:
return ""
if not self.detailed:
l /= 90000
p /= 90000
r /= 90000
if self.negate: l = -l
if self.negate: p = -p
if self.negate: r = -r
if l >= 0:
sign_l = ""
else:
l = -l
sign_l = "-"
if p >= 0:
sign_p = ""
else:
p = -p
sign_p = "-"
if r >= 0:
sign_r = ""
else:
r = -r
sign_r = "-"
if self.type < 5:
if config.usage.elapsed_time_positive_osd.value:
sign_p = "+"
sign_r = "-"
sign_l = ""
else:
sign_p = "-"
sign_r = "+"
sign_l = ""
if config.usage.swap_media_time_display_on_osd.value == "1": # Mins
if self.type == self.TYPE_LENGTH:
return ngettext("%d Min", "%d Mins", (l/60)) % (l/60)
elif self.type == self.TYPE_POSITION:
if config.usage.swap_time_remaining_on_osd.value == "1": # Elapsed
return sign_p + ngettext("%d Min", "%d Mins", (p/60)) % (p/60)
elif config.usage.swap_time_remaining_on_osd.value == "2": # Elapsed & Remaining
return sign_p + "%d " % (p/60) + sign_r + ngettext("%d Min", "%d Mins", (r/60)) % (r/60)
elif config.usage.swap_time_remaining_on_osd.value == "3": # Remaining & Elapsed
return sign_r + "%d " % (r/60) + sign_p + ngettext("%d Min", "%d Mins", (p/60)) % (p/60)
else:
return sign_r + ngettext("%d Min", "%d Mins", (r/60)) % (r/60)
elif self.type == self.TYPE_REMAINING:
if config.usage.swap_time_remaining_on_osd.value == "1": # Elapsed
return sign_p + ngettext("%d Min", "%d Mins", (p/60)) % (p/60)
elif config.usage.swap_time_remaining_on_osd.value == "2" or config.usage.swap_time_remaining_on_osd.value == "3": # Remaining & Elapsed
return ""
else:
return sign_r + ngettext("%d Min", "%d Mins", (r/60)) % (r/60)
elif config.usage.swap_media_time_display_on_osd.value == "2": # Mins Secs
if self.type == self.TYPE_LENGTH:
return sign_l + "%d:%02d" % (l/60, l%60)
elif self.type == self.TYPE_POSITION:
if config.usage.swap_time_remaining_on_osd.value == "1": # Elapsed
return sign_p + "%d:%02d" % (p/60, p%60)
elif config.usage.swap_time_remaining_on_osd.value == "2": # Elapsed & Remaining
return sign_p + "%d:%02d " % (p/60, p%60) + sign_r + "%d:%02d" % (r/60, r%60)
elif config.usage.swap_time_remaining_on_osd.value == "3": # Remaining & Elapsed
return sign_r + "%d:%02d " % (r/60, r%60) + sign_p + "%d:%02d" % (p/60, p%60)
else:
return sign_r + "%d:%02d" % (r/60, r%60)
elif self.type == self.TYPE_REMAINING:
if config.usage.swap_time_remaining_on_osd.value == "1": # Elapsed
return sign_p + "%d:%02d" % (p/60, p%60)
elif config.usage.swap_time_remaining_on_osd.value == "2" or config.usage.swap_time_remaining_on_osd.value == "3": # Remaining & Elapsed
return ""
else:
return sign_r + "%d:%02d" % (r/60, r%60)
elif config.usage.swap_media_time_display_on_osd.value == "3": # Hours Mins
if self.type == self.TYPE_LENGTH:
return sign_l + "%d:%02d" % (l/3600, l%3600/60)
elif self.type == self.TYPE_POSITION:
if config.usage.swap_time_remaining_on_osd.value == "1": # Elapsed
return sign_p + "%d:%02d" % (p/3600, p%3600/60)
elif config.usage.swap_time_remaining_on_osd.value == "2": # Elapsed & Remaining
return sign_p + "%d:%02d " % (p/3600, p%3600/60) + sign_r + "%d:%02d" % (r/3600, r%3600/60)
elif config.usage.swap_time_remaining_on_osd.value == "3": # Remaining & Elapsed
return sign_r + "%d:%02d " % (r/3600, r%3600/60) + sign_p + "%d:%02d" % (p/3600, p%3600/60)
else:
return sign_r + "%d:%02d" % (r/3600, r%3600/60)
elif self.type == self.TYPE_REMAINING:
if config.usage.swap_time_remaining_on_osd.value == "1": # Elapsed
return sign_p + "%d:%02d" % (p/3600, p%3600/60)
elif config.usage.swap_time_remaining_on_osd.value == "2" or config.usage.swap_time_remaining_on_osd.value == "3": # Remaining & Elapsed
return ""
else:
return sign_r + "%d:%02d" % (r/3600, r%3600/60)
elif config.usage.swap_media_time_display_on_osd.value == "4": # Hours Mins Secs
if self.type == self.TYPE_LENGTH:
return sign_l + "%d:%02d:%02d" % (l/3600, l%3600/60, l%60)
elif self.type == self.TYPE_POSITION:
if config.usage.swap_time_remaining_on_osd.value == "1": # Elapsed
return sign_p + "%d:%02d:%02d" % (p/3600, p%3600/60, p%60)
elif config.usage.swap_time_remaining_on_osd.value == "2": # Elapsed & Remaining
return sign_p + "%d:%02d:%02d " % (p/3600, p%3600/60, p%60) + sign_r + "%d:%02d:%02d" % (r/3600, r%3600/60, r%60)
elif config.usage.swap_time_remaining_on_osd.value == "3": # Remaining & Elapsed
return sign_r + "%d:%02d:%02d " % (r/3600, r%3600/60, r%60) + sign_p + "%d:%02d:%02d" % (p/3600, p%3600/60, p%60)
else:
return sign_r + "%d:%02d:%02d" % (r/3600, r%3600/60, r%60)
elif self.type == self.TYPE_REMAINING:
if config.usage.swap_time_remaining_on_osd.value == "1": # Elapsed
return sign_p + "%d:%02d:%02d" % (p/3600, p%3600/60, p%60)
elif config.usage.swap_time_remaining_on_osd.value == "2" or config.usage.swap_time_remaining_on_osd.value == "3": # Remaining & Elapsed
return ""
else:
return sign_r + "%d:%02d:%02d" % (r/3600, r%3600/60, r%60)
elif config.usage.swap_media_time_display_on_osd.value == "5": # Percentage
if self.type == self.TYPE_LENGTH:
return sign_l + "%d:%02d" % (l/3600, l%3600/60)
elif self.type == self.TYPE_POSITION:
if config.usage.swap_time_remaining_on_osd.value == "1": # Elapsed
try:
return sign_p + "%d%%" % ((float(p + 0.0) / float(l + 0.0)) * 100)
except:
return ""
elif config.usage.swap_time_remaining_on_osd.value == "2": # Elapsed & Remaining
try:
return sign_p + "%d%% " % ((float(p + 0.0) / float(l + 0.0)) * 100) + sign_r + "%d%%" % ((float(r + 0.0) / float(l + 0.0)) * 100 + 1)
except:
return ""
elif config.usage.swap_time_remaining_on_osd.value == "3": # Remaining & Elapsed
try:
return sign_r + "%d%% " % ((float(r + 0.0) / float(l + 0.0)) * 100 +1 ) + sign_p + "%d%%" % ((float(p + 0.0) / float(l + 0.0)) * 100)
except:
return ""
else:
try:
return sign_r + "%d%%" % ((float(p + 0.0) / float(l + 0.0)) * 100)
except:
return ""
elif self.type == self.TYPE_REMAINING:
test = 0
if config.usage.swap_time_remaining_on_osd.value == "1": # Elapsed
try:
return sign_p + "%d%%" % ((float(p + 0.0) / float(l + 0.0)) * 100)
except:
return ""
elif config.usage.swap_time_remaining_on_osd.value == "2" or config.usage.swap_time_remaining_on_osd.value == "3": # Elapsed & Remaining
return ""
else:
try:
return sign_r + "%d%%" % ((float(p + 0.0) / float(l + 0.0)) * 100)
except:
return ""
else: # Skin Setting
if not self.detailed:
if self.showHours:
if self.showNoSeconds:
if self.type == self.TYPE_LENGTH:
return sign_l + "%d:%02d" % (l/3600, l%3600/60)
elif self.type == self.TYPE_POSITION:
return sign_p + "%d:%02d" % (p/3600, p%3600/60)
elif self.type == self.TYPE_REMAINING:
return sign_r + "%d:%02d" % (r/3600, r%3600/60)
else:
if self.type == self.TYPE_LENGTH:
return sign_l + "%d:%02d:%02d" % (l/3600, l%3600/60, l%60)
elif self.type == self.TYPE_POSITION:
return sign_p + "%d:%02d:%02d" % (p/3600, p%3600/60, p%60)
elif self.type == self.TYPE_REMAINING:
return sign_r + "%d:%02d:%02d" % (r/3600, r%3600/60, r%60)
else:
if self.showNoSeconds:
if self.type == self.TYPE_LENGTH:
return ngettext("%d Min", "%d Mins", (l/60)) % (l/60)
elif self.type == self.TYPE_POSITION:
return sign_p + ngettext("%d Min", "%d Mins", (p/60)) % (p/60)
elif self.type == self.TYPE_REMAINING and self.OnlyMinute:
return ngettext("%d", "%d", (r/60)) % (r/60)
elif self.type == self.TYPE_REMAINING:
return sign_r + ngettext("%d Min", "%d Mins", (r/60)) % (r/60)
else:
if self.type == self.TYPE_LENGTH:
return sign_l + "%d:%02d" % (l/60, l%60)
elif self.type == self.TYPE_POSITION:
return sign_p + "%d:%02d" % (p/60, p%60)
elif self.type == self.TYPE_REMAINING:
return sign_r + "%d:%02d" % (r/60, r%60)
else:
if self.showHours:
if self.type == self.TYPE_LENGTH:
return sign_l + "%d:%02d:%02d:%03d" % ((l/3600/90000), (l/90000)%3600/60, (l/90000)%60, (l%90000)/90)
elif self.type == self.TYPE_POSITION:
return sign_r + "%d:%02d:%02d:%03d" % ((r/3600/90000), (r/90000)%3600/60, (r/90000)%60, (r%90000)/90)
elif self.type == self.TYPE_REMAINING:
return sign_p + "%d:%02d:%02d:%03d" % ((p/3600/90000), (p/90000)%3600/60, (p/90000)%60, (p%90000)/90)
else:
if self.type == self.TYPE_LENGTH:
return sign_l + "%d:%02d:%03d" % ((l/60/90000), (l/90000)%60, (l%90000)/90)
elif self.type == self.TYPE_POSITION:
return sign_p + "%d:%02d:%03d" % ((p/60/90000), (p/90000)%60, (p%90000)/90)
elif self.type == self.TYPE_REMAINING:
return sign_r + "%d:%02d:%03d" % ((r/60/90000), (r/90000)%60, (r%90000)/90)
else:
if config.usage.elapsed_time_positive_vfd.value:
sign_p = "+"
sign_r = "-"
else:
sign_p = "-"
sign_r = "+"
if config.usage.swap_media_time_display_on_vfd.value == "1": # Mins
if self.type == self.TYPE_VFD_LENGTH:
return ngettext("%d Min", "%d Mins", (l/60)) % (l/60)
elif self.type == self.TYPE_VFD_POSITION:
if config.usage.swap_time_remaining_on_vfd.value == "1": # Elapsed
return sign_p + ngettext("%d Min", "%d Mins", (p/60)) % (p/60)
elif config.usage.swap_time_remaining_on_vfd.value == "2": # Elapsed & Remaining
return sign_p + "%d " % (p/60) + sign_r + ngettext("%d Min", "%d Mins", (r/60)) % (r/60)
elif config.usage.swap_time_remaining_on_vfd.value == "3": # Remaining & Elapsed
return sign_r + "%d " % (r/60) + sign_p + ngettext("%d Min", "%d Mins", (p/60)) % (p/60)
else:
return sign_r + ngettext("%d Min", "%d Mins", (r/60)) % (r/60)
elif self.type == self.TYPE_VFD_REMAINING:
if config.usage.swap_time_remaining_on_vfd.value == "1": # Elapsed
return sign_p + ngettext("%d Min", "%d Mins", (p/60)) % (p/60)
elif config.usage.swap_time_remaining_on_vfd.value == "2" or config.usage.swap_time_remaining_on_vfd.value == "3": # Remaining & Elapsed
return ""
else:
return sign_r + ngettext("%d Min", "%d Mins", (r/60)) % (r/60)
elif config.usage.swap_media_time_display_on_vfd.value == "2": # Mins Secs
if self.type == self.TYPE_VFD_LENGTH:
return sign_l + "%d:%02d" % (l/60, l%60)
elif self.type == self.TYPE_VFD_POSITION:
if config.usage.swap_time_remaining_on_vfd.value == "1": # Elapsed
return sign_p + "%d:%02d" % (p/60, p%60)
elif config.usage.swap_time_remaining_on_vfd.value == "2": # Elapsed & Remaining
return sign_p + "%d:%02d " % (p/60, p%60) + sign_r + "%d:%02d" % (r/60, r%60)
elif config.usage.swap_time_remaining_on_vfd.value == "3": # Remaining & Elapsed
return sign_r + "%d:%02d " % (r/60, r%60) + sign_p + "%d:%02d" % (p/60, p%60)
else:
return sign_r + "%d:%02d" % (r/60, r%60)
elif self.type == self.TYPE_VFD_REMAINING:
if config.usage.swap_time_remaining_on_vfd.value == "1": # Elapsed
return sign_p + "%d:%02d" % (p/60, p%60)
elif config.usage.swap_time_remaining_on_vfd.value == "2" or config.usage.swap_time_remaining_on_vfd.value == "3": # Remaining & Elapsed
return ""
else:
return sign_r + "%d:%02d" % (r/60, r%60)
elif config.usage.swap_media_time_display_on_vfd.value == "3": # Hours Mins
if self.type == self.TYPE_VFD_LENGTH:
return sign_l + "%d:%02d" % (l/3600, l%3600/60)
elif self.type == self.TYPE_VFD_POSITION:
if config.usage.swap_time_remaining_on_vfd.value == "1": # Elapsed
return sign_p + "%d:%02d" % (p/3600, p%3600/60)
elif config.usage.swap_time_remaining_on_vfd.value == "2": # Elapsed & Remaining
return sign_p + "%d:%02d " % (p/3600, p%3600/60) + sign_r + "%d:%02d" % (r/3600, r%3600/60)
elif config.usage.swap_time_remaining_on_vfd.value == "3": # Remaining & Elapsed
return sign_r + "%d:%02d " % (r/3600, r%3600/60) + sign_p + "%d:%02d" % (p/3600, p%3600/60)
else:
return sign_r + "%d:%02d" % (r/3600, r%3600/60)
elif self.type == self.TYPE_VFD_REMAINING:
if config.usage.swap_time_remaining_on_vfd.value == "1": # Elapsed
return sign_p + "%d:%02d" % (p/3600, p%3600/60)
elif config.usage.swap_time_remaining_on_vfd.value == "2" or config.usage.swap_time_remaining_on_vfd.value == "3": # Remaining & Elapsed
return ""
else:
return sign_r + "%d:%02d" % (r/3600, r%3600/60)
elif config.usage.swap_media_time_display_on_vfd.value == "4": # Hours Mins Secs
if self.type == self.TYPE_VFD_LENGTH:
return sign_l + "%d:%02d:%02d" % (l/3600, l%3600/60, l%60)
elif self.type == self.TYPE_VFD_POSITION:
if config.usage.swap_time_remaining_on_vfd.value == "1": # Elapsed
return sign_p + "%d:%02d:%02d" % (p/3600, p%3600/60, p%60)
elif config.usage.swap_time_remaining_on_vfd.value == "2": # Elapsed & Remaining
return sign_p + "%d:%02d:%02d " % (p/3600, p%3600/60, p%60) + sign_r + "%d:%02d:%02d" % (r/3600, r%3600/60, r%60)
elif config.usage.swap_time_remaining_on_vfd.value == "3": # Remaining & Elapsed
return sign_r + "%d:%02d:%02d " % (r/3600, r%3600/60, r%60) + sign_p + "%d:%02d:%02d" % (p/3600, p%3600/60, p%60)
else:
return sign_r + "%d:%02d:%02d" % (r/3600, r%3600/60, r%60)
elif self.type == self.TYPE_VFD_REMAINING:
if config.usage.swap_time_remaining_on_vfd.value == "1": # Elapsed
return sign_p + "%d:%02d:%02d" % (p/3600, p%3600/60, p%60)
elif config.usage.swap_time_remaining_on_vfd.value == "2" or config.usage.swap_time_remaining_on_vfd.value == "3": # Remaining & Elapsed
return ""
else:
return sign_r + "%d:%02d:%02d" % (r/3600, r%3600/60, r%60)
elif config.usage.swap_media_time_display_on_vfd.value == "5": # Percentage
if self.type == self.TYPE_VFD_LENGTH:
return sign_l + "%d:%02d" % (l/3600, l%3600/60)
elif self.type == self.TYPE_VFD_POSITION:
if config.usage.swap_time_remaining_on_vfd.value == "1": # Elapsed
try:
return sign_p + "%d%%" % ((float(p + 0.0) / float(l + 0.0)) * 100)
except:
return ""
elif config.usage.swap_time_remaining_on_vfd.value == "2": # Elapsed & Remaining
try:
return sign_p + "%d%% " % ((float(p + 0.0) / float(l + 0.0)) * 100) + sign_r + "%d%%" % ((float(r + 0.0) / float(l + 0.0)) * 100 + 1)
except:
return ""
elif config.usage.swap_time_remaining_on_vfd.value == "3": # Remaining & Elapsed
try:
return sign_r + "%d%% " % ((float(r + 0.0) / float(l + 0.0)) * 100 +1 ) + sign_p + "%d%%" % ((float(p + 0.0) / float(l + 0.0)) * 100)
except:
return ""
else:
try:
return sign_r + "%d%%" % ((float(p + 0.0) / float(l + 0.0)) * 100)
except:
return ""
elif self.type == self.TYPE_VFD_REMAINING:
test = 0
if config.usage.swap_time_remaining_on_vfd.value == "1": # Elapsed
try:
return sign_p + "%d%%" % ((float(p + 0.0) / float(l + 0.0)) * 100)
except:
return ""
elif config.usage.swap_time_remaining_on_vfd.value == "2" or config.usage.swap_time_remaining_on_vfd.value == "3": # Elapsed & Remaining
return ""
else:
try:
return sign_r + "%d%%" % ((float(p + 0.0) / float(l + 0.0)) * 100)
except:
return ""
else: # Skin Setting
if not self.detailed:
if self.showHours:
if self.showNoSeconds:
if self.type == self.TYPE_VFD_LENGTH:
return sign_l + "%d:%02d" % (l/3600, l%3600/60)
elif self.type == self.TYPE_VFD_POSITION:
return sign_p + "%d:%02d" % (p/3600, p%3600/60)
elif self.type == self.TYPE_REMAINING:
return sign_r + "%d:%02d" % (r/3600, r%3600/60)
else:
if self.type == self.TYPE_VFD_LENGTH:
return sign_l + "%d:%02d:%02d" % (l/3600, l%3600/60, l%60)
elif self.type == self.TYPE_VFD_POSITION:
return sign_p + "%d:%02d:%02d" % (p/3600, p%3600/60, p%60)
elif self.type == self.TYPE_REMAINING:
return sign_r + "%d:%02d:%02d" % (r/3600, r%3600/60, r%60)
else:
if self.showNoSeconds:
if self.type == self.TYPE_VFD_LENGTH:
return ngettext("%d Min", "%d Mins", (l/60)) % (l/60)
elif self.type == self.TYPE_VFD_POSITION:
return sign_p + ngettext("%d Min", "%d Mins", (p/60)) % (p/60)
elif self.type == self.TYPE_VFD_REMAINING:
return sign_r + ngettext("%d Min", "%d Mins", (r/60)) % (r/60)
else:
if self.type == self.TYPE_VFD_LENGTH:
return sign_l + "%d:%02d" % (l/60, l%60)
elif self.type == self.TYPE_VFD_POSITION:
return sign_p + "%d:%02d" % (p/60, p%60)
elif self.type == self.TYPE_REMAINING:
return sign_r + "%d:%02d" % (r/60, r%60)
else:
if self.showHours:
if self.type == self.TYPE_VFD_LENGTH:
return sign_l + "%d:%02d:%02d:%03d" % ((l/3600/90000), (l/90000)%3600/60, (l/90000)%60, (l%90000)/90)
elif self.type == self.TYPE_VFD_POSITION:
return sign_r + "%d:%02d:%02d:%03d" % ((r/3600/90000), (r/90000)%3600/60, (r/90000)%60, (r%90000)/90)
elif self.type == self.TYPE_REMAINING:
return sign_p + "%d:%02d:%02d:%03d" % ((p/3600/90000), (p/90000)%3600/60, (p/90000)%60, (p%90000)/90)
else:
if self.type == self.TYPE_VFD_LENGTH:
return sign_l + "%d:%02d:%03d" % ((l/60/90000), (l/90000)%60, (l%90000)/90)
elif self.type == self.TYPE_VFD_POSITION:
return sign_p + "%d:%02d:%03d" % ((p/60/90000), (p/90000)%60, (p%90000)/90)
elif self.type == self.TYPE_REMAINING:
return sign_r + "%d:%02d:%03d" % ((r/60/90000), (r/90000)%60, (r%90000)/90)
# range/value are for the Progress renderer
range = 10000
@cached
def getValue(self):
pos = self.position
len = self.length
if pos is None or len is None or len <= 0:
return None
return pos * 10000 / len
position = property(getPosition)
length = property(getLength)
cutlist = property(getCutlist)
text = property(getText)
value = property(getValue)
def changed(self, what):
cutlist_refresh = what[0] != self.CHANGED_SPECIFIC or what[1] in (iPlayableService.evCuesheetChanged,)
time_refresh = what[0] == self.CHANGED_POLL or what[0] == self.CHANGED_SPECIFIC and what[1] in (iPlayableService.evCuesheetChanged,)
if cutlist_refresh:
if self.type == self.TYPE_GAUGE:
self.downstream_elements.cutlist_changed()
if time_refresh:
self.downstream_elements.changed(what)
| gpl-2.0 | -949,953,025,196,225,400 | 41.319688 | 175 | 0.580516 | false |
DARKPOP/external_chromium_org | chrome/tools/webforms_extractor.py | 185 | 10187 | #!/usr/bin/env python
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Extracts registration forms from the corresponding HTML files.
Used for extracting forms within HTML files. This script is used in
conjunction with the webforms_aggregator.py script, which aggregates web pages
with fillable forms (i.e registration forms).
The purpose of this script is to extract out all non-form elements that may be
causing parsing errors and timeout issues when running browser_tests.
This script extracts all forms from a HTML file.
If there are multiple forms per downloaded site, multiple files are created
for each form.
Used as a standalone script but assumes that it is run from the directory in
which it is checked into.
Usage: forms_extractor.py [options]
Options:
-l LOG_LEVEL, --log_level=LOG_LEVEL,
LOG_LEVEL: debug, info, warning or error [default: error]
-j, --js extracts javascript elements from web form.
-h, --help show this help message and exit
"""
import glob
import logging
from optparse import OptionParser
import os
import re
import sys
class FormsExtractor(object):
"""Extracts HTML files, leaving only registration forms from the HTML file."""
_HTML_FILES_PATTERN = r'*.html'
_HTML_FILE_PREFIX = r'grabber-'
_FORM_FILE_PREFIX = r'grabber-stripped-'
_REGISTRATION_PAGES_DIR = os.path.join(os.pardir, 'test', 'data', 'autofill',
'heuristics', 'input')
_EXTRACTED_FORMS_DIR = os.path.join(os.pardir, 'test', 'data', 'autofill',
'heuristics', 'input')
logger = logging.getLogger(__name__)
log_handlers = {'StreamHandler': None}
# This pattern is used for retrieving the form location comment located at the
# top of each downloaded HTML file indicating where the form originated from.
_RE_FORM_LOCATION_PATTERN = re.compile(
ur"""
<!--Form\s{1}Location: # Starting of form location comment.
.*? # Any characters (non-greedy).
--> # Ending of the form comment.
""", re.U | re.S | re.I | re.X)
# This pattern is used for removing all script code.
_RE_SCRIPT_PATTERN = re.compile(
ur"""
<script # A new opening '<script' tag.
\b # The end of the word 'script'.
.*? # Any characters (non-greedy).
> # Ending of the (opening) tag: '>'.
.*? # Any characters (non-greedy) between the tags.
</script\s*> # The '</script>' closing tag.
""", re.U | re.S | re.I | re.X)
# This pattern is used for removing all href js code.
_RE_HREF_JS_PATTERN = re.compile(
ur"""
\bhref # The word href and its beginning.
\s*=\s* # The '=' with all whitespace before and after it.
(?P<quote>[\'\"]) # A single or double quote which is captured.
\s*javascript\s*: # The word 'javascript:' with any whitespace possible.
.*? # Any characters (non-greedy) between the quotes.
\1 # The previously captured single or double quote.
""", re.U | re.S | re.I | re.X)
_RE_EVENT_EXPR = (
ur"""
\b # The beginning of a new word.
on\w+? # All words starting with 'on' (non-greedy)
# example: |onmouseover|.
\s*=\s* # The '=' with all whitespace before and after it.
(?P<quote>[\'\"]) # A captured single or double quote.
.*? # Any characters (non-greedy) between the quotes.
\1 # The previously captured single or double quote.
""")
# This pattern is used for removing code with js events, such as |onload|.
# By adding the leading |ur'<[^<>]*?'| and the trailing |'ur'[^<>]*?>'| the
# pattern matches to strings such as '<tr class="nav"
# onmouseover="mOvr1(this);" onmouseout="mOut1(this);">'
_RE_TAG_WITH_EVENTS_PATTERN = re.compile(
ur"""
< # Matches character '<'.
[^<>]*? # Matches any characters except '<' and '>' (non-greedy).""" +
_RE_EVENT_EXPR +
ur"""
[^<>]*? # Matches any characters except '<' and '>' (non-greedy).
> # Matches character '>'.
""", re.U | re.S | re.I | re.X)
# Adds whitespace chars at the end of the matched event. Also match trailing
# whitespaces for JS events. Do not match leading whitespace.
# For example: |< /form>| is invalid HTML and does not exist but |</form >| is
# considered valid HTML.
_RE_EVENT_PATTERN = re.compile(
_RE_EVENT_EXPR + ur'\s*', re.U | re.S | re.I | re.X)
# This pattern is used for finding form elements.
_RE_FORM_PATTERN = re.compile(
ur"""
<form # A new opening '<form' tag.
\b # The end of the word 'form'.
.*? # Any characters (non-greedy).
> # Ending of the (opening) tag: '>'.
.*? # Any characters (non-greedy) between the tags.
</form\s*> # The '</form>' closing tag.
""", re.U | re.S | re.I | re.X)
def __init__(self, input_dir=_REGISTRATION_PAGES_DIR,
output_dir=_EXTRACTED_FORMS_DIR, logging_level=None):
"""Creates a FormsExtractor object.
Args:
input_dir: the directory of HTML files.
output_dir: the directory where the registration form files will be
saved.
logging_level: verbosity level, default is None.
Raises:
IOError exception if input directory doesn't exist.
"""
if logging_level:
if not self.log_handlers['StreamHandler']:
console = logging.StreamHandler()
console.setLevel(logging.DEBUG)
self.log_handlers['StreamHandler'] = console
self.logger.addHandler(console)
self.logger.setLevel(logging_level)
else:
if self.log_handlers['StreamHandler']:
self.logger.removeHandler(self.log_handlers['StreamHandler'])
self.log_handlers['StreamHandler'] = None
self._input_dir = input_dir
self._output_dir = output_dir
if not os.path.isdir(self._input_dir):
error_msg = 'Directory "%s" doesn\'t exist.' % self._input_dir
self.logger.error('Error: %s', error_msg)
raise IOError(error_msg)
if not os.path.isdir(output_dir):
os.makedirs(output_dir)
self._form_location_comment = ''
def _SubstituteAllEvents(self, matchobj):
"""Remove all js events that are present as attributes within a tag.
Args:
matchobj: A regexp |re.MatchObject| containing text that has at least one
event. Example: |<tr class="nav" onmouseover="mOvr1(this);"
onmouseout="mOut1(this);">|.
Returns:
The text containing the tag with all the attributes except for the tags
with events. Example: |<tr class="nav">|.
"""
tag_with_all_attrs = matchobj.group(0)
return self._RE_EVENT_PATTERN.sub('', tag_with_all_attrs)
def Extract(self, strip_js_only):
"""Extracts and saves the extracted registration forms.
Iterates through all the HTML files.
Args:
strip_js_only: If True, only Javascript is stripped from the HTML content.
Otherwise, all non-form elements are stripped.
"""
pathname_pattern = os.path.join(self._input_dir, self._HTML_FILES_PATTERN)
html_files = [f for f in glob.glob(pathname_pattern) if os.path.isfile(f)]
for filename in html_files:
self.logger.info('Stripping file "%s" ...', filename)
with open(filename, 'U') as f:
html_content = self._RE_TAG_WITH_EVENTS_PATTERN.sub(
self._SubstituteAllEvents,
self._RE_HREF_JS_PATTERN.sub(
'', self._RE_SCRIPT_PATTERN.sub('', f.read())))
form_filename = os.path.split(filename)[1] # Path dropped.
form_filename = form_filename.replace(self._HTML_FILE_PREFIX, '', 1)
(form_filename, extension) = os.path.splitext(form_filename)
form_filename = (self._FORM_FILE_PREFIX + form_filename +
'%s' + extension)
form_filename = os.path.join(self._output_dir, form_filename)
if strip_js_only:
form_filename = form_filename % ''
try:
with open(form_filename, 'w') as f:
f.write(html_content)
except IOError as e:
self.logger.error('Error: %s', e)
continue
else: # Remove all non form elements.
match = self._RE_FORM_LOCATION_PATTERN.search(html_content)
if match:
form_location_comment = match.group() + os.linesep
else:
form_location_comment = ''
forms_iterator = self._RE_FORM_PATTERN.finditer(html_content)
for form_number, form_match in enumerate(forms_iterator, start=1):
form_content = form_match.group()
numbered_form_filename = form_filename % form_number
try:
with open(numbered_form_filename, 'w') as f:
f.write(form_location_comment)
f.write(form_content)
except IOError as e:
self.logger.error('Error: %s', e)
continue
self.logger.info('\tFile "%s" extracted SUCCESSFULLY!', filename)
def main():
parser = OptionParser()
parser.add_option(
'-l', '--log_level', metavar='LOG_LEVEL', default='error',
help='LOG_LEVEL: debug, info, warning or error [default: %default]')
parser.add_option(
'-j', '--js', dest='js', action='store_true', default=False,
help='Removes all javascript elements [default: %default]')
(options, args) = parser.parse_args()
options.log_level = options.log_level.upper()
if options.log_level not in ['DEBUG', 'INFO', 'WARNING', 'ERROR']:
print 'Wrong log_level argument.'
parser.print_help()
return 1
options.log_level = getattr(logging, options.log_level)
extractor = FormsExtractor(logging_level=options.log_level)
extractor.Extract(options.js)
return 0
if __name__ == '__main__':
sys.exit(main())
| bsd-3-clause | 4,252,375,787,807,320,600 | 39.106299 | 80 | 0.60695 | false |
pyGrowler/Growler | tests/utils.py | 2 | 1128 | #
# tests/utils
#
"""
Useful functions for all tests
"""
import asyncio
import pytest
from growler.aio.http_protocol import GrowlerHTTPProtocol
import growler
def random_port():
from random import randint
return randint(1024, 2**16)
@asyncio.coroutine
def setup_test_server(unused_tcp_port, event_loop):
"""
Sets up a GrowlerProtocol server for testing
"""
# proto = growler.protocol.GrowlerProtocol
proto = TestProtocol
server = yield from event_loop.create_server(proto, '127.0.0.1', unused_tcp_port)
return server, unused_tcp_port
@asyncio.coroutine
def setup_http_server(loop, port):
"""
Sets up a GrowlerHTTPProtocol server for testing
"""
# proto = growler.protocol.GrowlerHTTPProtocol
app = growler.App()
def proto():
return GrowlerHTTPProtocol(app)
return (yield from loop.create_server(proto, '127.0.0.1', port))
def teardown_server(server, loop=asyncio.get_event_loop()):
"""
'Generic' tear down a server and wait on the loop for everything to close.
"""
server.close()
loop.run_until_complete(server.wait_closed())
| apache-2.0 | 2,447,465,477,460,241,000 | 22.5 | 85 | 0.692376 | false |
nirzari18/Query-Analysis-Application-on-Google-App-Engine | lib/simplejson/decoder.py | 132 | 14721 | """Implementation of JSONDecoder
"""
from __future__ import absolute_import
import re
import sys
import struct
from .compat import fromhex, b, u, text_type, binary_type, PY3, unichr
from .scanner import make_scanner, JSONDecodeError
def _import_c_scanstring():
try:
from ._speedups import scanstring
return scanstring
except ImportError:
return None
c_scanstring = _import_c_scanstring()
# NOTE (3.1.0): JSONDecodeError may still be imported from this module for
# compatibility, but it was never in the __all__
__all__ = ['JSONDecoder']
FLAGS = re.VERBOSE | re.MULTILINE | re.DOTALL
def _floatconstants():
_BYTES = fromhex('7FF80000000000007FF0000000000000')
# The struct module in Python 2.4 would get frexp() out of range here
# when an endian is specified in the format string. Fixed in Python 2.5+
if sys.byteorder != 'big':
_BYTES = _BYTES[:8][::-1] + _BYTES[8:][::-1]
nan, inf = struct.unpack('dd', _BYTES)
return nan, inf, -inf
NaN, PosInf, NegInf = _floatconstants()
_CONSTANTS = {
'-Infinity': NegInf,
'Infinity': PosInf,
'NaN': NaN,
}
STRINGCHUNK = re.compile(r'(.*?)(["\\\x00-\x1f])', FLAGS)
BACKSLASH = {
'"': u('"'), '\\': u('\u005c'), '/': u('/'),
'b': u('\b'), 'f': u('\f'), 'n': u('\n'), 'r': u('\r'), 't': u('\t'),
}
DEFAULT_ENCODING = "utf-8"
def py_scanstring(s, end, encoding=None, strict=True,
_b=BACKSLASH, _m=STRINGCHUNK.match, _join=u('').join,
_PY3=PY3, _maxunicode=sys.maxunicode):
"""Scan the string s for a JSON string. End is the index of the
character in s after the quote that started the JSON string.
Unescapes all valid JSON string escape sequences and raises ValueError
on attempt to decode an invalid string. If strict is False then literal
control characters are allowed in the string.
Returns a tuple of the decoded string and the index of the character in s
after the end quote."""
if encoding is None:
encoding = DEFAULT_ENCODING
chunks = []
_append = chunks.append
begin = end - 1
while 1:
chunk = _m(s, end)
if chunk is None:
raise JSONDecodeError(
"Unterminated string starting at", s, begin)
end = chunk.end()
content, terminator = chunk.groups()
# Content is contains zero or more unescaped string characters
if content:
if not _PY3 and not isinstance(content, text_type):
content = text_type(content, encoding)
_append(content)
# Terminator is the end of string, a literal control character,
# or a backslash denoting that an escape sequence follows
if terminator == '"':
break
elif terminator != '\\':
if strict:
msg = "Invalid control character %r at"
raise JSONDecodeError(msg, s, end)
else:
_append(terminator)
continue
try:
esc = s[end]
except IndexError:
raise JSONDecodeError(
"Unterminated string starting at", s, begin)
# If not a unicode escape sequence, must be in the lookup table
if esc != 'u':
try:
char = _b[esc]
except KeyError:
msg = "Invalid \\X escape sequence %r"
raise JSONDecodeError(msg, s, end)
end += 1
else:
# Unicode escape sequence
msg = "Invalid \\uXXXX escape sequence"
esc = s[end + 1:end + 5]
escX = esc[1:2]
if len(esc) != 4 or escX == 'x' or escX == 'X':
raise JSONDecodeError(msg, s, end - 1)
try:
uni = int(esc, 16)
except ValueError:
raise JSONDecodeError(msg, s, end - 1)
end += 5
# Check for surrogate pair on UCS-4 systems
# Note that this will join high/low surrogate pairs
# but will also pass unpaired surrogates through
if (_maxunicode > 65535 and
uni & 0xfc00 == 0xd800 and
s[end:end + 2] == '\\u'):
esc2 = s[end + 2:end + 6]
escX = esc2[1:2]
if len(esc2) == 4 and not (escX == 'x' or escX == 'X'):
try:
uni2 = int(esc2, 16)
except ValueError:
raise JSONDecodeError(msg, s, end)
if uni2 & 0xfc00 == 0xdc00:
uni = 0x10000 + (((uni - 0xd800) << 10) |
(uni2 - 0xdc00))
end += 6
char = unichr(uni)
# Append the unescaped character
_append(char)
return _join(chunks), end
# Use speedup if available
scanstring = c_scanstring or py_scanstring
WHITESPACE = re.compile(r'[ \t\n\r]*', FLAGS)
WHITESPACE_STR = ' \t\n\r'
def JSONObject(state, encoding, strict, scan_once, object_hook,
object_pairs_hook, memo=None,
_w=WHITESPACE.match, _ws=WHITESPACE_STR):
(s, end) = state
# Backwards compatibility
if memo is None:
memo = {}
memo_get = memo.setdefault
pairs = []
# Use a slice to prevent IndexError from being raised, the following
# check will raise a more specific ValueError if the string is empty
nextchar = s[end:end + 1]
# Normally we expect nextchar == '"'
if nextchar != '"':
if nextchar in _ws:
end = _w(s, end).end()
nextchar = s[end:end + 1]
# Trivial empty object
if nextchar == '}':
if object_pairs_hook is not None:
result = object_pairs_hook(pairs)
return result, end + 1
pairs = {}
if object_hook is not None:
pairs = object_hook(pairs)
return pairs, end + 1
elif nextchar != '"':
raise JSONDecodeError(
"Expecting property name enclosed in double quotes",
s, end)
end += 1
while True:
key, end = scanstring(s, end, encoding, strict)
key = memo_get(key, key)
# To skip some function call overhead we optimize the fast paths where
# the JSON key separator is ": " or just ":".
if s[end:end + 1] != ':':
end = _w(s, end).end()
if s[end:end + 1] != ':':
raise JSONDecodeError("Expecting ':' delimiter", s, end)
end += 1
try:
if s[end] in _ws:
end += 1
if s[end] in _ws:
end = _w(s, end + 1).end()
except IndexError:
pass
value, end = scan_once(s, end)
pairs.append((key, value))
try:
nextchar = s[end]
if nextchar in _ws:
end = _w(s, end + 1).end()
nextchar = s[end]
except IndexError:
nextchar = ''
end += 1
if nextchar == '}':
break
elif nextchar != ',':
raise JSONDecodeError("Expecting ',' delimiter or '}'", s, end - 1)
try:
nextchar = s[end]
if nextchar in _ws:
end += 1
nextchar = s[end]
if nextchar in _ws:
end = _w(s, end + 1).end()
nextchar = s[end]
except IndexError:
nextchar = ''
end += 1
if nextchar != '"':
raise JSONDecodeError(
"Expecting property name enclosed in double quotes",
s, end - 1)
if object_pairs_hook is not None:
result = object_pairs_hook(pairs)
return result, end
pairs = dict(pairs)
if object_hook is not None:
pairs = object_hook(pairs)
return pairs, end
def JSONArray(state, scan_once, _w=WHITESPACE.match, _ws=WHITESPACE_STR):
(s, end) = state
values = []
nextchar = s[end:end + 1]
if nextchar in _ws:
end = _w(s, end + 1).end()
nextchar = s[end:end + 1]
# Look-ahead for trivial empty array
if nextchar == ']':
return values, end + 1
elif nextchar == '':
raise JSONDecodeError("Expecting value or ']'", s, end)
_append = values.append
while True:
value, end = scan_once(s, end)
_append(value)
nextchar = s[end:end + 1]
if nextchar in _ws:
end = _w(s, end + 1).end()
nextchar = s[end:end + 1]
end += 1
if nextchar == ']':
break
elif nextchar != ',':
raise JSONDecodeError("Expecting ',' delimiter or ']'", s, end - 1)
try:
if s[end] in _ws:
end += 1
if s[end] in _ws:
end = _w(s, end + 1).end()
except IndexError:
pass
return values, end
class JSONDecoder(object):
"""Simple JSON <http://json.org> decoder
Performs the following translations in decoding by default:
+---------------+-------------------+
| JSON | Python |
+===============+===================+
| object | dict |
+---------------+-------------------+
| array | list |
+---------------+-------------------+
| string | str, unicode |
+---------------+-------------------+
| number (int) | int, long |
+---------------+-------------------+
| number (real) | float |
+---------------+-------------------+
| true | True |
+---------------+-------------------+
| false | False |
+---------------+-------------------+
| null | None |
+---------------+-------------------+
It also understands ``NaN``, ``Infinity``, and ``-Infinity`` as
their corresponding ``float`` values, which is outside the JSON spec.
"""
def __init__(self, encoding=None, object_hook=None, parse_float=None,
parse_int=None, parse_constant=None, strict=True,
object_pairs_hook=None):
"""
*encoding* determines the encoding used to interpret any
:class:`str` objects decoded by this instance (``'utf-8'`` by
default). It has no effect when decoding :class:`unicode` objects.
Note that currently only encodings that are a superset of ASCII work,
strings of other encodings should be passed in as :class:`unicode`.
*object_hook*, if specified, will be called with the result of every
JSON object decoded and its return value will be used in place of the
given :class:`dict`. This can be used to provide custom
deserializations (e.g. to support JSON-RPC class hinting).
*object_pairs_hook* is an optional function that will be called with
the result of any object literal decode with an ordered list of pairs.
The return value of *object_pairs_hook* will be used instead of the
:class:`dict`. This feature can be used to implement custom decoders
that rely on the order that the key and value pairs are decoded (for
example, :func:`collections.OrderedDict` will remember the order of
insertion). If *object_hook* is also defined, the *object_pairs_hook*
takes priority.
*parse_float*, if specified, will be called with the string of every
JSON float to be decoded. By default, this is equivalent to
``float(num_str)``. This can be used to use another datatype or parser
for JSON floats (e.g. :class:`decimal.Decimal`).
*parse_int*, if specified, will be called with the string of every
JSON int to be decoded. By default, this is equivalent to
``int(num_str)``. This can be used to use another datatype or parser
for JSON integers (e.g. :class:`float`).
*parse_constant*, if specified, will be called with one of the
following strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``. This
can be used to raise an exception if invalid JSON numbers are
encountered.
*strict* controls the parser's behavior when it encounters an
invalid control character in a string. The default setting of
``True`` means that unescaped control characters are parse errors, if
``False`` then control characters will be allowed in strings.
"""
if encoding is None:
encoding = DEFAULT_ENCODING
self.encoding = encoding
self.object_hook = object_hook
self.object_pairs_hook = object_pairs_hook
self.parse_float = parse_float or float
self.parse_int = parse_int or int
self.parse_constant = parse_constant or _CONSTANTS.__getitem__
self.strict = strict
self.parse_object = JSONObject
self.parse_array = JSONArray
self.parse_string = scanstring
self.memo = {}
self.scan_once = make_scanner(self)
def decode(self, s, _w=WHITESPACE.match, _PY3=PY3):
"""Return the Python representation of ``s`` (a ``str`` or ``unicode``
instance containing a JSON document)
"""
if _PY3 and isinstance(s, binary_type):
s = s.decode(self.encoding)
obj, end = self.raw_decode(s)
end = _w(s, end).end()
if end != len(s):
raise JSONDecodeError("Extra data", s, end, len(s))
return obj
def raw_decode(self, s, idx=0, _w=WHITESPACE.match, _PY3=PY3):
"""Decode a JSON document from ``s`` (a ``str`` or ``unicode``
beginning with a JSON document) and return a 2-tuple of the Python
representation and the index in ``s`` where the document ended.
Optionally, ``idx`` can be used to specify an offset in ``s`` where
the JSON document begins.
This can be used to decode a JSON document from a string that may
have extraneous data at the end.
"""
if idx < 0:
# Ensure that raw_decode bails on negative indexes, the regex
# would otherwise mask this behavior. #98
raise JSONDecodeError('Expecting value', s, idx)
if _PY3 and not isinstance(s, text_type):
raise TypeError("Input string must be text, not bytes")
# strip UTF-8 bom
if len(s) > idx:
ord0 = ord(s[idx])
if ord0 == 0xfeff:
idx += 1
elif ord0 == 0xef and s[idx:idx + 3] == '\xef\xbb\xbf':
idx += 3
return self.scan_once(s, idx=_w(s, idx).end())
| apache-2.0 | -8,044,468,768,657,414,000 | 35.8025 | 79 | 0.532233 | false |
classcat/cctf | cctf/layers/normalization.py | 1 | 5969 | # -*- coding: utf-8 -*-
from __future__ import division, print_function, absolute_import
import tensorflow as tf
from tensorflow.python.training import moving_averages
# masao
import cctf
#import tflearn
from .. import utils
from .. import variables as vs
def batch_normalization(incoming, beta=0.0, gamma=1.0, epsilon=1e-5,
decay=0.9, stddev=0.002, trainable=True,
restore=True, reuse=False, scope=None,
name="BatchNormalization"):
""" Batch Normalization.
Normalize activations of the previous layer at each batch.
Arguments:
incoming: `Tensor`. Incoming Tensor.
beta: `float`. Default: 0.0.
gamma: `float`. Default: 1.0.
epsilon: `float`. Defalut: 1e-5.
decay: `float`. Default: 0.9.
stddev: `float`. Standard deviation for weights initialization.
trainable: `bool`. If True, weights will be trainable.
restore: `bool`. If True, this layer weights will be restored when
loading a model.
reuse: `bool`. If True and 'scope' is provided, this layer variables
will be reused (shared).
scope: `str`. Define this layer scope (optional). A scope can be
used to share variables between layers. Note that scope will
override name.
name: `str`. A name for this layer (optional).
References:
Batch Normalization: Accelerating Deep Network Training by Reducing
Internal Covariate Shif. Sergey Ioffe, Christian Szegedy. 2015.
Links:
[http://arxiv.org/pdf/1502.03167v3.pdf](http://arxiv.org/pdf/1502.03167v3.pdf)
"""
input_shape = utils.get_incoming_shape(incoming)
input_ndim = len(input_shape)
gamma_init = tf.random_normal_initializer(mean=gamma, stddev=stddev)
with tf.variable_op_scope([incoming], scope, name, reuse=reuse) as scope:
name = scope.name
beta = vs.variable('beta', shape=[input_shape[-1]],
initializer=tf.constant_initializer(beta),
trainable=trainable, restore=restore)
gamma = vs.variable('gamma', shape=[input_shape[-1]],
initializer=gamma_init, trainable=trainable,
restore=restore)
# Track per layer variables
tf.add_to_collection(tf.GraphKeys.LAYER_VARIABLES + '/' + name, beta)
tf.add_to_collection(tf.GraphKeys.LAYER_VARIABLES + '/' + name, gamma)
if not restore:
tf.add_to_collection(tf.GraphKeys.EXCL_RESTORE_VARS, beta)
tf.add_to_collection(tf.GraphKeys.EXCL_RESTORE_VARS, gamma)
axis = list(range(input_ndim - 1))
moving_mean = vs.variable('moving_mean',
input_shape[-1:],
initializer=tf.zeros_initializer,
trainable=False,
restore=restore)
moving_variance = vs.variable('moving_variance',
input_shape[-1:],
initializer=tf.ones_initializer,
trainable=False,
restore=restore)
# Define a function to update mean and variance
def update_mean_var():
mean, variance = tf.nn.moments(incoming, axis)
update_moving_mean = moving_averages.assign_moving_average(
moving_mean, mean, decay)
update_moving_variance = moving_averages.assign_moving_average(
moving_variance, variance, decay)
with tf.control_dependencies(
[update_moving_mean, update_moving_variance]):
return tf.identity(mean), tf.identity(variance)
# Retrieve variable managing training mode
is_training = tflearn.get_training_mode()
mean, var = tf.python.control_flow_ops.cond(
is_training, update_mean_var, lambda: (moving_mean, moving_variance))
try:
inference = tf.nn.batch_normalization(
incoming, mean, var, beta, gamma, epsilon)
inference.set_shape(input_shape)
# Fix for old Tensorflow
except Exception as e:
inference = tf.nn.batch_norm_with_global_normalization(
incoming, mean, var, beta, gamma, epsilon,
scale_after_normalization=True,
)
inference.set_shape(input_shape)
# Add attributes for easy access
inference.scope = scope
inference.beta = beta
inference.gamma = gamma
# Track output tensor.
tf.add_to_collection(tf.GraphKeys.LAYER_TENSOR + '/' + name, inference)
return inference
def local_response_normalization(incoming, depth_radius=5, bias=1.0,
alpha=0.0001, beta=0.75,
name="LocalResponseNormalization"):
""" Local Response Normalization.
Input:
4-D Tensor Layer.
Output:
4-D Tensor Layer. (Same dimension as input).
Arguments:
incoming: `Tensor`. Incoming Tensor.
depth_radius: `int`. 0-D. Half-width of the 1-D normalization window.
Defaults to 5.
bias: `float`. An offset (usually positive to avoid dividing by 0).
Defaults to 1.0.
alpha: `float`. A scale factor, usually positive. Defaults to 0.0001.
beta: `float`. An exponent. Defaults to `0.5`.
name: `str`. A name for this layer (optional).
"""
with tf.name_scope(name) as scope:
inference = tf.nn.lrn(incoming, depth_radius=depth_radius,
bias=bias, alpha=alpha,
beta=beta, name=name)
inference.scope = scope
# Track output tensor.
tf.add_to_collection(tf.GraphKeys.LAYER_TENSOR + '/' + name, inference)
return inference
| agpl-3.0 | 2,546,837,239,059,809,300 | 38.269737 | 86 | 0.582342 | false |
shin-/compose | compose/cli/verbose_proxy.py | 22 | 1770 | from __future__ import absolute_import
from __future__ import unicode_literals
import functools
import logging
import pprint
from itertools import chain
import six
def format_call(args, kwargs):
args = (repr(a) for a in args)
kwargs = ("{0!s}={1!r}".format(*item) for item in six.iteritems(kwargs))
return "({0})".format(", ".join(chain(args, kwargs)))
def format_return(result, max_lines):
if isinstance(result, (list, tuple, set)):
return "({0} with {1} items)".format(type(result).__name__, len(result))
if result:
lines = pprint.pformat(result).split('\n')
extra = '\n...' if len(lines) > max_lines else ''
return '\n'.join(lines[:max_lines]) + extra
return result
class VerboseProxy(object):
"""Proxy all function calls to another class and log method name, arguments
and return values for each call.
"""
def __init__(self, obj_name, obj, log_name=None, max_lines=10):
self.obj_name = obj_name
self.obj = obj
self.max_lines = max_lines
self.log = logging.getLogger(log_name or __name__)
def __getattr__(self, name):
attr = getattr(self.obj, name)
if not six.callable(attr):
return attr
return functools.partial(self.proxy_callable, name)
def proxy_callable(self, call_name, *args, **kwargs):
self.log.info("%s %s <- %s",
self.obj_name,
call_name,
format_call(args, kwargs))
result = getattr(self.obj, call_name)(*args, **kwargs)
self.log.info("%s %s -> %s",
self.obj_name,
call_name,
format_return(result, self.max_lines))
return result
| apache-2.0 | -8,928,886,728,567,784,000 | 28.5 | 80 | 0.575706 | false |
simonwydooghe/ansible | test/units/modules/storage/netapp/test_na_ontap_snapshot.py | 38 | 8659 | # (c) 2018, NetApp, Inc
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
''' unit tests ONTAP Ansible module: na_ontap_nvme_snapshot'''
from __future__ import print_function
import json
import pytest
from units.compat import unittest
from units.compat.mock import patch
from ansible.module_utils import basic
from ansible.module_utils._text import to_bytes
import ansible.module_utils.netapp as netapp_utils
from ansible.modules.storage.netapp.na_ontap_snapshot \
import NetAppOntapSnapshot as my_module
if not netapp_utils.has_netapp_lib():
pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
def set_module_args(args):
"""prepare arguments so that they will be picked up during module creation"""
args = json.dumps({'ANSIBLE_MODULE_ARGS': args})
basic._ANSIBLE_ARGS = to_bytes(args) # pylint: disable=protected-access
class AnsibleExitJson(Exception):
"""Exception class to be raised by module.exit_json and caught by the test case"""
pass
class AnsibleFailJson(Exception):
"""Exception class to be raised by module.fail_json and caught by the test case"""
pass
def exit_json(*args, **kwargs): # pylint: disable=unused-argument
"""function to patch over exit_json; package return data into an exception"""
if 'changed' not in kwargs:
kwargs['changed'] = False
raise AnsibleExitJson(kwargs)
def fail_json(*args, **kwargs): # pylint: disable=unused-argument
"""function to patch over fail_json; package return data into an exception"""
kwargs['failed'] = True
raise AnsibleFailJson(kwargs)
class MockONTAPConnection(object):
''' mock server connection to ONTAP host '''
def __init__(self, kind=None):
''' save arguments '''
self.type = kind
self.xml_in = None
self.xml_out = None
def invoke_successfully(self, xml, enable_tunneling): # pylint: disable=unused-argument
''' mock invoke_successfully returning xml data '''
self.xml_in = xml
if self.type == 'snapshot':
xml = self.build_snapshot_info()
elif self.type == 'snapshot_fail':
raise netapp_utils.zapi.NaApiError(code='TEST', message="This exception is from the unit test")
self.xml_out = xml
return xml
@staticmethod
def build_snapshot_info():
''' build xml data for snapshot-info '''
xml = netapp_utils.zapi.NaElement('xml')
data = {'num-records': 1,
'attributes-list': {'snapshot-info': {'comment': 'new comment',
'name': 'ansible',
'snapmirror-label': 'label12'}}}
xml.translate_struct(data)
return xml
class TestMyModule(unittest.TestCase):
''' a group of related Unit Tests '''
def setUp(self):
self.mock_module_helper = patch.multiple(basic.AnsibleModule,
exit_json=exit_json,
fail_json=fail_json)
self.mock_module_helper.start()
self.addCleanup(self.mock_module_helper.stop)
self.server = MockONTAPConnection()
self.onbox = False
def set_default_args(self):
if self.onbox:
hostname = '10.193.75.3'
username = 'admin'
password = 'netapp1!'
vserver = 'ansible'
volume = 'ansible'
snapshot = 'ansible'
comment = 'new comment'
snapmirror_label = 'label12'
else:
hostname = 'hostname'
username = 'username'
password = 'password'
vserver = 'vserver'
volume = 'ansible'
snapshot = 'ansible'
comment = 'new comment'
snapmirror_label = 'label12'
return dict({
'hostname': hostname,
'username': username,
'password': password,
'vserver': vserver,
'volume': volume,
'snapshot': snapshot,
'comment': comment,
'snapmirror_label': snapmirror_label
})
def test_module_fail_when_required_args_missing(self):
''' required arguments are reported as errors '''
with pytest.raises(AnsibleFailJson) as exc:
set_module_args({})
my_module()
print('Info: %s' % exc.value.args[0]['msg'])
def test_ensure_get_called(self):
''' test get_snapshot() for non-existent snapshot'''
set_module_args(self.set_default_args())
my_obj = my_module()
my_obj.server = self.server
assert my_obj.get_snapshot() is None
def test_ensure_get_called_existing(self):
''' test get_snapshot() for existing snapshot'''
set_module_args(self.set_default_args())
my_obj = my_module()
my_obj.server = MockONTAPConnection(kind='snapshot')
assert my_obj.get_snapshot()
@patch('ansible.modules.storage.netapp.na_ontap_snapshot.NetAppOntapSnapshot.create_snapshot')
def test_successful_create(self, create_snapshot):
''' creating snapshot and testing idempotency '''
set_module_args(self.set_default_args())
my_obj = my_module()
if not self.onbox:
my_obj.server = self.server
with pytest.raises(AnsibleExitJson) as exc:
my_obj.apply()
assert exc.value.args[0]['changed']
create_snapshot.assert_called_with()
# to reset na_helper from remembering the previous 'changed' value
my_obj = my_module()
if not self.onbox:
my_obj.server = MockONTAPConnection('snapshot')
with pytest.raises(AnsibleExitJson) as exc:
my_obj.apply()
assert not exc.value.args[0]['changed']
@patch('ansible.modules.storage.netapp.na_ontap_snapshot.NetAppOntapSnapshot.modify_snapshot')
def test_successful_modify(self, modify_snapshot):
''' modifying snapshot and testing idempotency '''
data = self.set_default_args()
data['comment'] = 'adding comment'
data['snapmirror_label'] = 'label22'
set_module_args(data)
my_obj = my_module()
if not self.onbox:
my_obj.server = MockONTAPConnection('snapshot')
with pytest.raises(AnsibleExitJson) as exc:
my_obj.apply()
assert exc.value.args[0]['changed']
modify_snapshot.assert_called_with()
# to reset na_helper from remembering the previous 'changed' value
data['comment'] = 'new comment'
data['snapmirror_label'] = 'label12'
set_module_args(data)
my_obj = my_module()
if not self.onbox:
my_obj.server = MockONTAPConnection('snapshot')
with pytest.raises(AnsibleExitJson) as exc:
my_obj.apply()
assert not exc.value.args[0]['changed']
@patch('ansible.modules.storage.netapp.na_ontap_snapshot.NetAppOntapSnapshot.delete_snapshot')
def test_successful_delete(self, delete_snapshot):
''' deleting snapshot and testing idempotency '''
data = self.set_default_args()
data['state'] = 'absent'
set_module_args(data)
my_obj = my_module()
if not self.onbox:
my_obj.server = MockONTAPConnection('snapshot')
with pytest.raises(AnsibleExitJson) as exc:
my_obj.apply()
assert exc.value.args[0]['changed']
delete_snapshot.assert_called_with()
# to reset na_helper from remembering the previous 'changed' value
my_obj = my_module()
if not self.onbox:
my_obj.server = self.server
with pytest.raises(AnsibleExitJson) as exc:
my_obj.apply()
assert not exc.value.args[0]['changed']
def test_if_all_methods_catch_exception(self):
module_args = {}
module_args.update(self.set_default_args())
set_module_args(module_args)
my_obj = my_module()
if not self.onbox:
my_obj.server = MockONTAPConnection('snapshot_fail')
with pytest.raises(AnsibleFailJson) as exc:
my_obj.create_snapshot()
assert 'Error creating snapshot ansible:' in exc.value.args[0]['msg']
with pytest.raises(AnsibleFailJson) as exc:
my_obj.delete_snapshot()
assert 'Error deleting snapshot ansible:' in exc.value.args[0]['msg']
with pytest.raises(AnsibleFailJson) as exc:
my_obj.modify_snapshot()
assert 'Error modifying snapshot ansible:' in exc.value.args[0]['msg']
| gpl-3.0 | 5,452,039,387,822,053,000 | 37.314159 | 107 | 0.608153 | false |
ajtowns/bitcoin | test/util/bitcoin-util-test.py | 59 | 6594 | #!/usr/bin/env python3
# Copyright 2014 BitPay Inc.
# Copyright 2016-2017 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test framework for bitcoin utils.
Runs automatically during `make check`.
Can also be run manually."""
import argparse
import binascii
import configparser
import difflib
import json
import logging
import os
import pprint
import subprocess
import sys
def main():
config = configparser.ConfigParser()
config.optionxform = str
config.read_file(open(os.path.join(os.path.dirname(__file__), "../config.ini"), encoding="utf8"))
env_conf = dict(config.items('environment'))
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('-v', '--verbose', action='store_true')
args = parser.parse_args()
verbose = args.verbose
if verbose:
level = logging.DEBUG
else:
level = logging.ERROR
formatter = '%(asctime)s - %(levelname)s - %(message)s'
# Add the format/level to the logger
logging.basicConfig(format=formatter, level=level)
bctester(os.path.join(env_conf["SRCDIR"], "test", "util", "data"), "bitcoin-util-test.json", env_conf)
def bctester(testDir, input_basename, buildenv):
""" Loads and parses the input file, runs all tests and reports results"""
input_filename = os.path.join(testDir, input_basename)
raw_data = open(input_filename, encoding="utf8").read()
input_data = json.loads(raw_data)
failed_testcases = []
for testObj in input_data:
try:
bctest(testDir, testObj, buildenv)
logging.info("PASSED: " + testObj["description"])
except:
logging.info("FAILED: " + testObj["description"])
failed_testcases.append(testObj["description"])
if failed_testcases:
error_message = "FAILED_TESTCASES:\n"
error_message += pprint.pformat(failed_testcases, width=400)
logging.error(error_message)
sys.exit(1)
else:
sys.exit(0)
def bctest(testDir, testObj, buildenv):
"""Runs a single test, comparing output and RC to expected output and RC.
Raises an error if input can't be read, executable fails, or output/RC
are not as expected. Error is caught by bctester() and reported.
"""
# Get the exec names and arguments
execprog = os.path.join(buildenv["BUILDDIR"], "src", testObj["exec"] + buildenv["EXEEXT"])
execargs = testObj['args']
execrun = [execprog] + execargs
# Read the input data (if there is any)
stdinCfg = None
inputData = None
if "input" in testObj:
filename = os.path.join(testDir, testObj["input"])
inputData = open(filename, encoding="utf8").read()
stdinCfg = subprocess.PIPE
# Read the expected output data (if there is any)
outputFn = None
outputData = None
outputType = None
if "output_cmp" in testObj:
outputFn = testObj['output_cmp']
outputType = os.path.splitext(outputFn)[1][1:] # output type from file extension (determines how to compare)
try:
outputData = open(os.path.join(testDir, outputFn), encoding="utf8").read()
except:
logging.error("Output file " + outputFn + " can not be opened")
raise
if not outputData:
logging.error("Output data missing for " + outputFn)
raise Exception
if not outputType:
logging.error("Output file %s does not have a file extension" % outputFn)
raise Exception
# Run the test
proc = subprocess.Popen(execrun, stdin=stdinCfg, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True)
try:
outs = proc.communicate(input=inputData)
except OSError:
logging.error("OSError, Failed to execute " + execprog)
raise
if outputData:
data_mismatch, formatting_mismatch = False, False
# Parse command output and expected output
try:
a_parsed = parse_output(outs[0], outputType)
except Exception as e:
logging.error('Error parsing command output as %s: %s' % (outputType, e))
raise
try:
b_parsed = parse_output(outputData, outputType)
except Exception as e:
logging.error('Error parsing expected output %s as %s: %s' % (outputFn, outputType, e))
raise
# Compare data
if a_parsed != b_parsed:
logging.error("Output data mismatch for " + outputFn + " (format " + outputType + ")")
data_mismatch = True
# Compare formatting
if outs[0] != outputData:
error_message = "Output formatting mismatch for " + outputFn + ":\n"
error_message += "".join(difflib.context_diff(outputData.splitlines(True),
outs[0].splitlines(True),
fromfile=outputFn,
tofile="returned"))
logging.error(error_message)
formatting_mismatch = True
assert not data_mismatch and not formatting_mismatch
# Compare the return code to the expected return code
wantRC = 0
if "return_code" in testObj:
wantRC = testObj['return_code']
if proc.returncode != wantRC:
logging.error("Return code mismatch for " + outputFn)
raise Exception
if "error_txt" in testObj:
want_error = testObj["error_txt"]
# Compare error text
# TODO: ideally, we'd compare the strings exactly and also assert
# That stderr is empty if no errors are expected. However, bitcoin-tx
# emits DISPLAY errors when running as a windows application on
# linux through wine. Just assert that the expected error text appears
# somewhere in stderr.
if want_error not in outs[1]:
logging.error("Error mismatch:\n" + "Expected: " + want_error + "\nReceived: " + outs[1].rstrip())
raise Exception
def parse_output(a, fmt):
"""Parse the output according to specified format.
Raise an error if the output can't be parsed."""
if fmt == 'json': # json: compare parsed data
return json.loads(a)
elif fmt == 'hex': # hex: parse and compare binary data
return binascii.a2b_hex(a.strip())
else:
raise NotImplementedError("Don't know how to compare %s" % fmt)
if __name__ == '__main__':
main()
| mit | -8,068,480,639,048,420,000 | 36.68 | 125 | 0.623294 | false |
TemoaProject/temoa | temoa_model/get_comm_tech.py | 1 | 9277 | import sqlite3
import os
import sys
import getopt
import re
from collections import OrderedDict
def get_tperiods(inp_f):
file_ty = re.search(r"(\w+)\.(\w+)\b", inp_f) # Extract the input filename and extension
if not file_ty :
raise "The file type %s is not recognized." % inp_f
elif file_ty.group(2) not in ("db", "sqlite", "sqlite3", "sqlitedb"):
raise Exception("Please specify a database for finding scenarios")
periods_list = {}
periods_set = set()
con = sqlite3.connect(inp_f)
cur = con.cursor() # a database cursor is a control structure that enables traversal over the records in a database
con.text_factory = str # this ensures data is explored with the correct UTF-8 encoding
print(inp_f)
cur.execute("SELECT DISTINCT scenario FROM Output_VFlow_Out")
x = []
for row in cur:
x.append(row[0])
for y in x:
cur.execute("SELECT DISTINCT t_periods FROM Output_VFlow_Out WHERE scenario is '"+str(y)+"'")
periods_list[y] = []
for per in cur:
z = per[0]
periods_list[y].append(z)
cur.close()
con.close()
return dict ( OrderedDict ( sorted(periods_list.items(), key=lambda x: x[1]) ) )
def get_scenario(inp_f):
file_ty = re.search(r"(\w+)\.(\w+)\b", inp_f) # Extract the input filename and extension
if not file_ty :
raise "The file type %s is not recognized." % inp_f
elif file_ty.group(2) not in ("db", "sqlite", "sqlite3", "sqlitedb") :
raise Exception("Please specify a database for finding scenarios")
scene_list = {}
scene_set = set()
con = sqlite3.connect(inp_f)
cur = con.cursor() # a database cursor is a control structure that enables traversal over the records in a database
con.text_factory = str #this ensures data is explored with the correct UTF-8 encoding
print(inp_f)
cur.execute("SELECT DISTINCT scenario FROM Output_VFlow_Out")
for row in cur:
x = row[0]
scene_list[x] = x
cur.close()
con.close()
return dict ( OrderedDict ( sorted(scene_list.items(), key=lambda x: x[1]) ) )
def get_comm(inp_f, db_dat):
comm_list = {}
comm_set = set()
is_query_empty = False
if not db_dat :
con = sqlite3.connect(inp_f)
cur = con.cursor() # a database cursor is a control structure that enables traversal over the records in a database
con.text_factory = str #this ensures data is explored with the correct UTF-8 encoding
print(inp_f)
cur.execute("SELECT DISTINCT comm_name FROM commodities")
for row in cur:
is_query_empty = True
if row[0] != 'ethos':
x= row[0]
comm_list[x] = x
if not is_query_empty:
cur.execute("SELECT input_comm FROM Output_VFlow_Out UNION SELECT output_comm FROM Output_VFlow_Out")
for row in cur:
if row[0] != 'ethos':
x= row[0]
comm_list[x] = x
cur.close()
con.close()
else:
eff_flag = False
with open (inp_f) as f :
for line in f:
if eff_flag is False and re.search("^\s*param\s+efficiency\s*[:][=]", line, flags = re.I) :
#Search for the line param Efficiency := (The script recognizes the commodities specified in this section)
eff_flag = True
elif eff_flag :
line = re.sub("[#].*$", " ", line)
if re.search("^\s*;\s*$", line) :
break # Finish searching this section when encounter a ';'
if re.search("^\s+$", line) :
continue
line = re.sub("^\s+|\s+$", "", line)
row = re.split("\s+", line)
if row[0] != 'ethos':
comm_set.add(row[0])
comm_set.add(row[3])
if eff_flag is False :
print("Error: The Efficiency Parameters cannot be found in the specified file - "+inp_f)
sys.exit(2)
for x in comm_set:
comm_list[x] = x
return OrderedDict ( sorted(comm_list.items(), key=lambda x: x[1]) )
def get_tech(inp_f, db_dat):
tech_list = {}
tech_set = set()
is_query_empty = False
if not db_dat :
con = sqlite3.connect(inp_f)
cur = con.cursor() # a database cursor is a control structure that enables traversal over the records in a database
con.text_factory = str #this ensures data is explored with the correct UTF-8 encoding
print(inp_f)
cur.execute("SELECT DISTINCT tech FROM technologies")
for row in cur:
is_query_empty = True
x= row[0]
tech_list[x] = x
if not is_query_empty:
cur.execute("SELECT DISTINCT tech FROM Output_VFlow_Out")
for row in cur:
x= row[0]
tech_list[x] = x
cur.close()
con.close()
else:
eff_flag = False
with open (inp_f) as f :
for line in f:
if eff_flag is False and re.search("^\s*param\s+efficiency\s*[:][=]", line, flags = re.I) :
#Search for the line param Efficiency := (The script recognizes the commodities specified in this section)
eff_flag = True
elif eff_flag :
line = re.sub("[#].*$", " ", line)
if re.search("^\s*;\s*$", line) :
break # Finish searching this section when encounter a ';'
if re.search("^\s+$", line) :
continue
line = re.sub("^\s+|\s+$", "", line)
row = re.split("\s+", line)
tech_set.add(row[1])
if eff_flag is False :
print("Error: The Efficiency Parameters cannot be found in the specified file - "+inp_f)
sys.exit(2)
for x in tech_set:
tech_list[x] = x
return OrderedDict ( sorted(tech_list.items(), key=lambda x: x[1]) )
def is_db_overwritten(db_file, inp_dat_file):
if os.path.basename(db_file) == '0':
return False
try:
con = sqlite3.connect(db_file)
except:
return False
cur = con.cursor() # A database cursor enables traversal over DB records
con.text_factory = str # This ensures data is explored with UTF-8 encoding
# Copy tables from Input File to DB file.
# IF output file is empty database.
cur.execute("SELECT * FROM technologies")
is_db_empty = False # False for empty db file
for elem in cur:
is_db_empty = True # True for non-empty db file
break
# This file could be schema with populated results from previous run. Or it could be a normal db file.
if is_db_empty:
cur.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='input_file';")
does_input_file_table_exist = False
for i in cur: # This means that the 'input_file' table exists in db.
does_input_file_table_exist = True
if does_input_file_table_exist: # This block distinguishes normal database from schema.
# This is schema file.
cur.execute("SELECT file FROM input_file WHERE id is '1';")
for i in cur:
tagged_file = i[0]
tagged_file = re.sub('["]', "", tagged_file)
cur.close()
con.close()
if tagged_file == inp_dat_file.split(".")[0] + ".dat":
# If Input_file name matches, no overwriting.
return False
else:
# If not a match, delete output tables and update input_file. Return True
return True
cur.close()
con.close()
return False
def help_user():
print('''Use as:
python get_comm_tech.py -i (or --input) <input filename>
| -c (or --comm) To get a dict of commodities
| -t (or --tech) To get a dict of commodities
| -s (or --scenario) To get a dict of scenarios
| -p (or --period) To get a dict of time periods
| -h (or --help) ''')
def get_info(inputs):
inp_file = None
tech_flag = False
comm_flag = False
scene = False
db_or_dat = False # Means db by default
tperiods_flag = False
if inputs is None:
raise Exception("no arguments found")
for opt, arg in inputs.items():
print("%s == %s" %(opt, arg))
if opt in ("-i", "--input"):
inp_file = arg
elif opt in ("-c", "--comm"):
comm_flag = True
elif opt in ("-t", "--tech"):
tech_flag = True
elif opt in ("-s", "--scenario"):
scene = True
elif opt in ("-p", "--period"):
tperiods_flag = True
elif opt in ("-h", "--help"):
help_user()
sys.exit(2)
if inp_file is None:
raise Exception("Input file not specified")
if tperiods_flag:
if comm_flag or scene or tech_flag:
raise Exception("can only use one flag at a time")
if (comm_flag and tech_flag) or (comm_flag and scene) or(scene and tech_flag) or(comm_flag and tech_flag and scene):
raise Exception("can only use one flag at a time")
if not comm_flag and not tech_flag and not scene and not tperiods_flag:
raise Exception("flag not specified")
file_ty = re.search(r"(\w+)\.(\w+)\b", inp_file) # Extract the input filename and extension
if not file_ty:
raise Exception("The file type {} is not recognized.".format(file_ty))
elif file_ty.group(2) in ("db", "sqlite", "sqlite3", "sqlitedb"):
db_or_dat = False
elif file_ty.group(2) in ("dat", "txt"):
db_or_dat = True
else :
print("The input file type %s is not recognized. Please specify a database or a text file." % inp_f)
sys.exit(2)
if comm_flag:
return get_comm(inp_file, db_or_dat)
if tech_flag:
return get_tech(inp_file, db_or_dat)
if tperiods_flag:
return get_tperiods(inp_file)
if scene:
if db_or_dat:
raise Exception("Please specify a database for finding scenarios")
return get_scenario(inp_file)
if __name__ == "__main__":
try:
argv = sys.argv[1:]
opts, args = getopt.getopt(argv, "hctsi:p", ["help", "comm", "tech", "scenario","input=", "period"])
print(opts)
except getopt.GetoptError:
help_user()
sys.exit(2)
print(get_info( dict(opts) ))
| gpl-2.0 | 7,540,277,114,393,624,000 | 27.457055 | 119 | 0.638137 | false |
wonjunetai/pulse | features/main.py | 1 | 11431 | import json
import time
from features.features_helpers import create_paths_for_cell_line
from features.uniprot_transmem import get_transmembrane_region_features
from features.uniprot_ptm import get_postranscriptional_modification_features
from features.uniprot_elm_read import get_uniprot_elm_features
from features.generate_iupred_file import generate_iupred_file
from features.uniprot_disorder import get_uniprot_disorder_features
from features.uniprot_domain_read import get_uniprot_domain_read
from features.run_pfam_scan import start_pfam_scan
from features.uniprot_core import get_sable_scores
from features.mutation_features import get_mutation_features
from features.conservation_conversion_query import create_query_file
from features.convert_from_hg19_hg18 import use_remap_api
from features.seq_conserv import generate_sequence_conservation_features
from features.event_conserv import generate_event_conservation_feature_table
from features.generate_ml_input import generate_machine_learning_matrix
from features.network_features import generate_network_features
from helpers.normalize_unicode_data import normalize_unicode_data
def feature_extract_cell_line(cell_line, pulse_path, preprocess_input_path, feature_extract_output_path):
# TODO: Could probably be better if you put every element in FEATURES_SETTINGS into a dict?
# TODO: Should move all of param_files_locations to be relative to pulse!
features_settings = json.load(open(pulse_path + '/features/features_settings.json'))
create_paths_for_cell_line(pulse_path, cell_line)
print "Features paths created for: " + cell_line
#########################
# TRANSMEMBRANE SCORING #
#########################
print "Now getting transmembrane region features..."
uniprot_exon_indices_location = preprocess_input_path + '/uniprot_exon_indices_map.out'
uniprot_tm_indices_db_location = normalize_unicode_data(features_settings["F_UNIPROT_TRANSMEM_INDICES_LOCATION"])
uniprot_tm_read_output_location = feature_extract_output_path + '/transmem_read.out'
get_transmembrane_region_features(uniprot_exon_indices_location, uniprot_tm_indices_db_location,
uniprot_tm_read_output_location)
print "Finished getting transmembrane region features."
time.sleep(2)
################
# PTM FEATURES #
################
print "Now getting post-transcriptional modifications..."
uniprot_ptm_db_location = normalize_unicode_data(features_settings["F_PTMS_LOCATION"])
uniprot_ptm_read_output_location = feature_extract_output_path + '/ptm_read.out'
get_postranscriptional_modification_features(uniprot_exon_indices_location, uniprot_ptm_db_location,
uniprot_ptm_read_output_location)
print "Finished getting post-transcriptional modification features."
###################################
# EUKARYOTIC LINEAR MOTIF SCORING #
###################################
print "Now getting eukaryotic linear motif scores..."
uniprot_elm_db_location = normalize_unicode_data(features_settings["F_ELM2_LOCATION"])
uniprot_elm_read_output_location = feature_extract_output_path + '/elm_read.out'
get_uniprot_elm_features(uniprot_exon_indices_location, uniprot_elm_db_location, uniprot_elm_read_output_location)
print "Finished getting eukaryotic linear motif scores."
#############################
# DISORDEROME HELPER SCRIPT #
#############################
print "Now running helper files for disorderome..."
p_seq_output_location = preprocess_input_path + '/p_seq_isoforms.fas'
iupred_isoforms_output_location = feature_extract_output_path + '/iupred_isoforms.out'
iupred_install_path = normalize_unicode_data(features_settings["IUPRED_INSTALL_PATH"])
generate_iupred_file(p_seq_output_location, feature_extract_output_path,
iupred_install_path, iupred_isoforms_output_location)
print "Now done running helper file for disorderome."
########################
# DISORDEROME FEATURES #
########################
print "Now getting disorderome features..."
canonical_db_location = pulse_path + '/input/info_canonical_v3.ddbb'
disorder_read_out_location = feature_extract_output_path + '/disorder_read.out'
get_uniprot_disorder_features(pulse_path, uniprot_exon_indices_location, iupred_isoforms_output_location,
canonical_db_location, disorder_read_out_location)
print "Finished getting disorderome features."
##########################
# PFAM & DOMAIN FEATURES #
##########################
print "Now running pfam_scan..."
pfam_scan_script_location = pulse_path + '/helpers/pfam_scan.pl'
pfam_input_location = preprocess_input_path + '/p_seq_isoforms.fas'
pfam_output_location = feature_extract_output_path + '/pfam_done.out'
hmmer3_data_location = normalize_unicode_data(features_settings["HMMER3_DATA_LOCATION"])
pfam_exit_code = start_pfam_scan(pfam_scan_script_location, pfam_input_location,
pfam_output_location, hmmer3_data_location)
pfam_exit_code = 0
if pfam_exit_code == 0:
print "pfam_scan successful"
print "Now getting uniprot domain features..."
f_pfam_special_db_location = normalize_unicode_data(features_settings["F_PFAM_SPECIAL_LOCATION"])
domain_read_output_location = feature_extract_output_path + '/domain_read.out'
get_uniprot_domain_read(f_pfam_special_db_location, canonical_db_location, uniprot_exon_indices_location,
pfam_output_location, domain_read_output_location)
print "Finished getting uniprot domain features."
#################
# SABLE SCORING #
#################
print "Now getting features for SABLE..."
f_sable_db_location = normalize_unicode_data(features_settings["F_SABLE_LOCATION"])
uniprot_core_output_location = feature_extract_output_path + '/core_read.out'
get_sable_scores(uniprot_exon_indices_location, f_sable_db_location, uniprot_core_output_location)
print "Finished getting features for SABLE."
####################
# MUTATION SCORING #
####################
print "Now getting mutation scores..."
f_mutations_db_location = normalize_unicode_data(features_settings["F_MUTATIONS_LOCATION"])
mutation_features_output_location = feature_extract_output_path + '/mutation_read.out'
get_mutation_features(uniprot_exon_indices_location, f_mutations_db_location,
mutation_features_output_location)
print "Finished getting mutation scores."
#################################
# CONSERVATION/NETWORK FEATURES #
#################################
print "Creating query file conservation/network features..."
conservation_query_output_location = feature_extract_output_path + '/conservation_query.txt'
as_location_file = preprocess_input_path + '/as_location.out'
create_query_file(as_location_file, conservation_query_output_location)
print "Finished creating query file."
print "Converting between hg19 to hg18 using the query file generated..."
remap_api_location = pulse_path + '/helpers/remap_api.pl'
remap_mode = "asm-asm"
remap_from = "GCF_000001405.13"
remap_to = "GCF_000001405.12"
remap_input_location = conservation_query_output_location
remap_output_location = feature_extract_output_path + '/report_conservationQuery.txt.xls'
remap_exit_code = use_remap_api(remap_api_location, remap_mode, remap_from, remap_to,
remap_input_location, remap_output_location)
print "Conversion complete."
remap_exit_code = 0
if remap_exit_code == 0:
print "Now generating sequence conservation features..."
f_phastcons_db_location = normalize_unicode_data(features_settings["F_PHASTCONS_HG18_BED_LOCATION"])
as_location_file = preprocess_input_path + '/as_location.out'
remapped_coordinates_file = feature_extract_output_path + '/report_conservationQuery.txt'
sequence_conservation_output_location = feature_extract_output_path + '/sequenceCon_read.out'
generate_sequence_conservation_features(f_phastcons_db_location, as_location_file,
remapped_coordinates_file, sequence_conservation_output_location)
print "Finished generating sequence conservation features."
print "Now generating event_conservation feature table..."
f_event_conservation_db_location = normalize_unicode_data(features_settings["F_EVENT_CONSERVATION_LOCATION"])
event_conservation_output = feature_extract_output_path + '/eventCon_read.out'
generate_event_conservation_feature_table(f_phastcons_db_location, f_event_conservation_db_location,
as_location_file, remapped_coordinates_file,
event_conservation_output)
print "Finished generating event_conservation feature table."
print "Now generating network features using gene names..."
f_uniprot_genewiki_location = normalize_unicode_data(features_settings["F_UNIPROT_GENEWIKI_LOCATION"])
f_degree_location = normalize_unicode_data(features_settings["F_DEGREE_LOCATION"])
network_features_output_location = feature_extract_output_path + '/degree_read.out'
generate_network_features(f_uniprot_genewiki_location, f_degree_location, uniprot_exon_indices_location,
network_features_output_location)
print "Finished generating network features."
###############################
# GENERATE MATRIX OF FEATURES #
###############################
print "Now generating ML input..."
machine_learning_output_path = pulse_path + '/output/machine/' + cell_line
generated_ml_output = machine_learning_output_path + '/features_headers.txt'
generated_ml_names = machine_learning_output_path + '/names.txt'
ts_read = normalize_unicode_data(features_settings["ML_AS_EVENT_CLASSIFICATION"])
generate_machine_learning_matrix(uniprot_exon_indices_location, uniprot_core_output_location,
network_features_output_location,
disorder_read_out_location, domain_read_output_location,
uniprot_elm_read_output_location, event_conservation_output,
mutation_features_output_location, uniprot_ptm_read_output_location,
sequence_conservation_output_location, uniprot_tm_read_output_location,
ts_read, generated_ml_output, generated_ml_names)
print "Finished generating ML input."
else:
print "Remapping failed"
exit()
else:
print "pfam_scan failed"
exit()
| mit | 5,733,470,211,454,113,000 | 56.442211 | 121 | 0.644038 | false |
glaubitz/fs-uae-debian | arcade/OpenGL/GL/VERSION/GL_1_2_images.py | 9 | 1587 | """Version 1.2 Image-handling functions
Almost all of the 1.2 enhancements are image-handling-related,
so this is, most of the 1.2 wrapper code...
Note that the functions that manually wrap certain operations are
guarded by if simple.functionName checks, so that you can use
if functionName to see if the function is available at run-time.
"""
from OpenGL import wrapper, constants, arrays
from OpenGL.raw.GL.ARB import imaging
from OpenGL.raw.GL.VERSION import GL_1_2 as _simple
from OpenGL.GL.ARB.imaging import *
from OpenGL.GL import images
import ctypes
for suffix,arrayConstant in [
('b', constants.GL_BYTE),
('f', constants.GL_FLOAT),
('i', constants.GL_INT),
('s', constants.GL_SHORT),
('ub', constants.GL_UNSIGNED_BYTE),
('ui', constants.GL_UNSIGNED_INT),
('us', constants.GL_UNSIGNED_SHORT),
]:
for functionName in (
'glTexImage3D',
'glTexSubImage3D', # extension/1.2 standard
):
functionName, function = images.typedImageFunction(
suffix, arrayConstant, getattr(_simple, functionName),
)
globals()[functionName] = function
try:
del function, functionName
except NameError as err:
pass
try:
del suffix,arrayConstant
except NameError as err:
pass
glTexImage3D = images.setDimensionsAsInts(
images.setImageInput(
_simple.glTexImage3D,
typeName = 'type',
)
)
glTexSubImage3D = images.setDimensionsAsInts(
images.setImageInput(
_simple.glTexSubImage3D,
typeName = 'type',
)
)
| gpl-2.0 | 938,005,508,820,903,800 | 27.854545 | 66 | 0.669187 | false |
jaimahajan1997/sympy | sympy/printing/jscode.py | 17 | 10902 | """
Javascript code printer
The JavascriptCodePrinter converts single sympy expressions into single
Javascript expressions, using the functions defined in the Javascript
Math object where possible.
"""
from __future__ import print_function, division
from sympy.core import S
from sympy.codegen.ast import Assignment
from sympy.printing.codeprinter import CodePrinter
from sympy.printing.precedence import precedence
from sympy.core.compatibility import string_types, range
# dictionary mapping sympy function to (argument_conditions, Javascript_function).
# Used in JavascriptCodePrinter._print_Function(self)
known_functions = {
'Abs': 'Math.abs',
'sin': 'Math.sin',
'cos': 'Math.cos',
'tan': 'Math.tan',
'acos': 'Math.acos',
'asin': 'Math.asin',
'atan': 'Math.atan',
'atan2': 'Math.atan2',
'ceiling': 'Math.ceil',
'floor': 'Math.floor',
'sign': 'Math.sign',
'exp': 'Math.exp',
'log': 'Math.log',
}
class JavascriptCodePrinter(CodePrinter):
""""A Printer to convert python expressions to strings of javascript code
"""
printmethod = '_javascript'
language = 'Javascript'
_default_settings = {
'order': None,
'full_prec': 'auto',
'precision': 15,
'user_functions': {},
'human': True,
'contract': True
}
def __init__(self, settings={}):
CodePrinter.__init__(self, settings)
self.known_functions = dict(known_functions)
userfuncs = settings.get('user_functions', {})
self.known_functions.update(userfuncs)
def _rate_index_position(self, p):
return p*5
def _get_statement(self, codestring):
return "%s;" % codestring
def _get_comment(self, text):
return "// {0}".format(text)
def _declare_number_const(self, name, value):
return "var {0} = {1};".format(name, value)
def _format_code(self, lines):
return self.indent_code(lines)
def _traverse_matrix_indices(self, mat):
rows, cols = mat.shape
return ((i, j) for i in range(rows) for j in range(cols))
def _get_loop_opening_ending(self, indices):
open_lines = []
close_lines = []
loopstart = "for (var %(varble)s=%(start)s; %(varble)s<%(end)s; %(varble)s++){"
for i in indices:
# Javascript arrays start at 0 and end at dimension-1
open_lines.append(loopstart % {
'varble': self._print(i.label),
'start': self._print(i.lower),
'end': self._print(i.upper + 1)})
close_lines.append("}")
return open_lines, close_lines
def _print_Pow(self, expr):
PREC = precedence(expr)
if expr.exp == -1:
return '1/%s' % (self.parenthesize(expr.base, PREC))
elif expr.exp == 0.5:
return 'Math.sqrt(%s)' % self._print(expr.base)
else:
return 'Math.pow(%s, %s)' % (self._print(expr.base),
self._print(expr.exp))
def _print_Rational(self, expr):
p, q = int(expr.p), int(expr.q)
return '%d/%d' % (p, q)
def _print_Indexed(self, expr):
# calculate index for 1d array
dims = expr.shape
elem = S.Zero
offset = S.One
for i in reversed(range(expr.rank)):
elem += expr.indices[i]*offset
offset *= dims[i]
return "%s[%s]" % (self._print(expr.base.label), self._print(elem))
def _print_Idx(self, expr):
return self._print(expr.label)
def _print_Exp1(self, expr):
return "Math.E"
def _print_Pi(self, expr):
return 'Math.PI'
def _print_Infinity(self, expr):
return 'Number.POSITIVE_INFINITY'
def _print_NegativeInfinity(self, expr):
return 'Number.NEGATIVE_INFINITY'
def _print_Piecewise(self, expr):
if expr.args[-1].cond != True:
# We need the last conditional to be a True, otherwise the resulting
# function may not return a result.
raise ValueError("All Piecewise expressions must contain an "
"(expr, True) statement to be used as a default "
"condition. Without one, the generated "
"expression may not evaluate to anything under "
"some condition.")
lines = []
if expr.has(Assignment):
for i, (e, c) in enumerate(expr.args):
if i == 0:
lines.append("if (%s) {" % self._print(c))
elif i == len(expr.args) - 1 and c == True:
lines.append("else {")
else:
lines.append("else if (%s) {" % self._print(c))
code0 = self._print(e)
lines.append(code0)
lines.append("}")
return "\n".join(lines)
else:
# The piecewise was used in an expression, need to do inline
# operators. This has the downside that inline operators will
# not work for statements that span multiple lines (Matrix or
# Indexed expressions).
ecpairs = ["((%s) ? (\n%s\n)\n" % (self._print(c), self._print(e))
for e, c in expr.args[:-1]]
last_line = ": (\n%s\n)" % self._print(expr.args[-1].expr)
return ": ".join(ecpairs) + last_line + " ".join([")"*len(ecpairs)])
def _print_MatrixElement(self, expr):
return "{0}[{1}]".format(expr.parent, expr.j +
expr.i*expr.parent.shape[1])
def indent_code(self, code):
"""Accepts a string of code or a list of code lines"""
if isinstance(code, string_types):
code_lines = self.indent_code(code.splitlines(True))
return ''.join(code_lines)
tab = " "
inc_token = ('{', '(', '{\n', '(\n')
dec_token = ('}', ')')
code = [ line.lstrip(' \t') for line in code ]
increase = [ int(any(map(line.endswith, inc_token))) for line in code ]
decrease = [ int(any(map(line.startswith, dec_token)))
for line in code ]
pretty = []
level = 0
for n, line in enumerate(code):
if line == '' or line == '\n':
pretty.append(line)
continue
level -= decrease[n]
pretty.append("%s%s" % (tab*level, line))
level += increase[n]
return pretty
def jscode(expr, assign_to=None, **settings):
"""Converts an expr to a string of javascript code
Parameters
==========
expr : Expr
A sympy expression to be converted.
assign_to : optional
When given, the argument is used as the name of the variable to which
the expression is assigned. Can be a string, ``Symbol``,
``MatrixSymbol``, or ``Indexed`` type. This is helpful in case of
line-wrapping, or for expressions that generate multi-line statements.
precision : integer, optional
The precision for numbers such as pi [default=15].
user_functions : dict, optional
A dictionary where keys are ``FunctionClass`` instances and values are
their string representations. Alternatively, the dictionary value can
be a list of tuples i.e. [(argument_test, js_function_string)]. See
below for examples.
human : bool, optional
If True, the result is a single string that may contain some constant
declarations for the number symbols. If False, the same information is
returned in a tuple of (symbols_to_declare, not_supported_functions,
code_text). [default=True].
contract: bool, optional
If True, ``Indexed`` instances are assumed to obey tensor contraction
rules and the corresponding nested loops over indices are generated.
Setting contract=False will not generate loops, instead the user is
responsible to provide values for the indices in the code.
[default=True].
Examples
========
>>> from sympy import jscode, symbols, Rational, sin, ceiling, Abs
>>> x, tau = symbols("x, tau")
>>> jscode((2*tau)**Rational(7, 2))
'8*Math.sqrt(2)*Math.pow(tau, 7/2)'
>>> jscode(sin(x), assign_to="s")
's = Math.sin(x);'
Custom printing can be defined for certain types by passing a dictionary of
"type" : "function" to the ``user_functions`` kwarg. Alternatively, the
dictionary value can be a list of tuples i.e. [(argument_test,
js_function_string)].
>>> custom_functions = {
... "ceiling": "CEIL",
... "Abs": [(lambda x: not x.is_integer, "fabs"),
... (lambda x: x.is_integer, "ABS")]
... }
>>> jscode(Abs(x) + ceiling(x), user_functions=custom_functions)
'fabs(x) + CEIL(x)'
``Piecewise`` expressions are converted into conditionals. If an
``assign_to`` variable is provided an if statement is created, otherwise
the ternary operator is used. Note that if the ``Piecewise`` lacks a
default term, represented by ``(expr, True)`` then an error will be thrown.
This is to prevent generating an expression that may not evaluate to
anything.
>>> from sympy import Piecewise
>>> expr = Piecewise((x + 1, x > 0), (x, True))
>>> print(jscode(expr, tau))
if (x > 0) {
tau = x + 1;
}
else {
tau = x;
}
Support for loops is provided through ``Indexed`` types. With
``contract=True`` these expressions will be turned into loops, whereas
``contract=False`` will just print the assignment expression that should be
looped over:
>>> from sympy import Eq, IndexedBase, Idx
>>> len_y = 5
>>> y = IndexedBase('y', shape=(len_y,))
>>> t = IndexedBase('t', shape=(len_y,))
>>> Dy = IndexedBase('Dy', shape=(len_y-1,))
>>> i = Idx('i', len_y-1)
>>> e=Eq(Dy[i], (y[i+1]-y[i])/(t[i+1]-t[i]))
>>> jscode(e.rhs, assign_to=e.lhs, contract=False)
'Dy[i] = (y[i + 1] - y[i])/(t[i + 1] - t[i]);'
Matrices are also supported, but a ``MatrixSymbol`` of the same dimensions
must be provided to ``assign_to``. Note that any expression that can be
generated normally can also exist inside a Matrix:
>>> from sympy import Matrix, MatrixSymbol
>>> mat = Matrix([x**2, Piecewise((x + 1, x > 0), (x, True)), sin(x)])
>>> A = MatrixSymbol('A', 3, 1)
>>> print(jscode(mat, A))
A[0] = Math.pow(x, 2);
if (x > 0) {
A[1] = x + 1;
}
else {
A[1] = x;
}
A[2] = Math.sin(x);
"""
return JavascriptCodePrinter(settings).doprint(expr, assign_to)
def print_jscode(expr, **settings):
"""Prints the Javascript representation of the given expression.
See jscode for the meaning of the optional arguments.
"""
print(jscode(expr, **settings))
| bsd-3-clause | -8,977,775,939,714,953,000 | 34.396104 | 87 | 0.57494 | false |
divio/django | tests/template_tests/syntax_tests/test_extends.py | 86 | 15503 | from django.test import SimpleTestCase
from ..utils import setup
inheritance_templates = {
'inheritance01': "1{% block first %}&{% endblock %}3{% block second %}_{% endblock %}",
'inheritance02': "{% extends 'inheritance01' %}"
"{% block first %}2{% endblock %}{% block second %}4{% endblock %}",
'inheritance03': "{% extends 'inheritance02' %}",
'inheritance04': "{% extends 'inheritance01' %}",
'inheritance05': "{% extends 'inheritance02' %}",
'inheritance06': "{% extends foo %}",
'inheritance07': "{% extends 'inheritance01' %}{% block second %}5{% endblock %}",
'inheritance08': "{% extends 'inheritance02' %}{% block second %}5{% endblock %}",
'inheritance09': "{% extends 'inheritance04' %}",
'inheritance10': "{% extends 'inheritance04' %} ",
'inheritance11': "{% extends 'inheritance04' %}"
"{% block first %}2{% endblock %}{% block second %}4{% endblock %}",
'inheritance12': "{% extends 'inheritance07' %}{% block first %}2{% endblock %}",
'inheritance13': "{% extends 'inheritance02' %}"
"{% block first %}a{% endblock %}{% block second %}b{% endblock %}",
'inheritance14': "{% extends 'inheritance01' %}{% block newblock %}NO DISPLAY{% endblock %}",
'inheritance15': "{% extends 'inheritance01' %}"
"{% block first %}2{% block inner %}inner{% endblock %}{% endblock %}",
'inheritance16': "{% extends 'inheritance15' %}{% block inner %}out{% endblock %}",
'inheritance17': "{% load testtags %}{% block first %}1234{% endblock %}",
'inheritance18': "{% load testtags %}{% echo this that theother %}5678",
'inheritance19': "{% extends 'inheritance01' %}"
"{% block first %}{% load testtags %}{% echo 400 %}5678{% endblock %}",
'inheritance20': "{% extends 'inheritance01' %}{% block first %}{{ block.super }}a{% endblock %}",
'inheritance21': "{% extends 'inheritance02' %}{% block first %}{{ block.super }}a{% endblock %}",
'inheritance22': "{% extends 'inheritance04' %}{% block first %}{{ block.super }}a{% endblock %}",
'inheritance23': "{% extends 'inheritance20' %}{% block first %}{{ block.super }}b{% endblock %}",
'inheritance24': "{% extends context_template %}"
"{% block first %}2{% endblock %}{% block second %}4{% endblock %}",
'inheritance25': "{% extends context_template.1 %}"
"{% block first %}2{% endblock %}{% block second %}4{% endblock %}",
'inheritance26': "no tags",
'inheritance27': "{% extends 'inheritance26' %}",
'inheritance 28': "{% block first %}!{% endblock %}",
'inheritance29': "{% extends 'inheritance 28' %}",
'inheritance30': "1{% if optional %}{% block opt %}2{% endblock %}{% endif %}3",
'inheritance31': "{% extends 'inheritance30' %}{% block opt %}two{% endblock %}",
'inheritance32': "{% extends 'inheritance30' %}{% block opt %}two{% endblock %}",
'inheritance33': "1{% ifequal optional 1 %}{% block opt %}2{% endblock %}{% endifequal %}3",
'inheritance34': "{% extends 'inheritance33' %}{% block opt %}two{% endblock %}",
'inheritance35': "{% extends 'inheritance33' %}{% block opt %}two{% endblock %}",
'inheritance36': "{% for n in numbers %}_{% block opt %}{{ n }}{% endblock %}{% endfor %}_",
'inheritance37': "{% extends 'inheritance36' %}{% block opt %}X{% endblock %}",
'inheritance38': "{% extends 'inheritance36' %}{% block opt %}X{% endblock %}",
'inheritance39': "{% extends 'inheritance30' %}{% block opt %}new{{ block.super }}{% endblock %}",
'inheritance40': "{% extends 'inheritance33' %}{% block opt %}new{{ block.super }}{% endblock %}",
'inheritance41': "{% extends 'inheritance36' %}{% block opt %}new{{ block.super }}{% endblock %}",
'inheritance42': "{% extends 'inheritance02'|cut:' ' %}",
}
class InheritanceTests(SimpleTestCase):
libraries = {'testtags': 'template_tests.templatetags.testtags'}
@setup(inheritance_templates)
def test_inheritance01(self):
"""
Standard template with no inheritance
"""
output = self.engine.render_to_string('inheritance01')
self.assertEqual(output, '1&3_')
@setup(inheritance_templates)
def test_inheritance02(self):
"""
Standard two-level inheritance
"""
output = self.engine.render_to_string('inheritance02')
self.assertEqual(output, '1234')
@setup(inheritance_templates)
def test_inheritance03(self):
"""
Three-level with no redefinitions on third level
"""
output = self.engine.render_to_string('inheritance03')
self.assertEqual(output, '1234')
@setup(inheritance_templates)
def test_inheritance04(self):
"""
Two-level with no redefinitions on second level
"""
output = self.engine.render_to_string('inheritance04')
self.assertEqual(output, '1&3_')
@setup(inheritance_templates)
def test_inheritance05(self):
"""
Two-level with double quotes instead of single quotes
"""
output = self.engine.render_to_string('inheritance05')
self.assertEqual(output, '1234')
@setup(inheritance_templates)
def test_inheritance06(self):
"""
Three-level with variable parent-template name
"""
output = self.engine.render_to_string('inheritance06', {'foo': 'inheritance02'})
self.assertEqual(output, '1234')
@setup(inheritance_templates)
def test_inheritance07(self):
"""
Two-level with one block defined, one block not defined
"""
output = self.engine.render_to_string('inheritance07')
self.assertEqual(output, '1&35')
@setup(inheritance_templates)
def test_inheritance08(self):
"""
Three-level with one block defined on this level, two blocks
defined next level
"""
output = self.engine.render_to_string('inheritance08')
self.assertEqual(output, '1235')
@setup(inheritance_templates)
def test_inheritance09(self):
"""
Three-level with second and third levels blank
"""
output = self.engine.render_to_string('inheritance09')
self.assertEqual(output, '1&3_')
@setup(inheritance_templates)
def test_inheritance10(self):
"""
Three-level with space NOT in a block -- should be ignored
"""
output = self.engine.render_to_string('inheritance10')
self.assertEqual(output, '1&3_')
@setup(inheritance_templates)
def test_inheritance11(self):
"""
Three-level with both blocks defined on this level, but none on
second level
"""
output = self.engine.render_to_string('inheritance11')
self.assertEqual(output, '1234')
@setup(inheritance_templates)
def test_inheritance12(self):
"""
Three-level with this level providing one and second level
providing the other
"""
output = self.engine.render_to_string('inheritance12')
self.assertEqual(output, '1235')
@setup(inheritance_templates)
def test_inheritance13(self):
"""
Three-level with this level overriding second level
"""
output = self.engine.render_to_string('inheritance13')
self.assertEqual(output, '1a3b')
@setup(inheritance_templates)
def test_inheritance14(self):
"""
A block defined only in a child template shouldn't be displayed
"""
output = self.engine.render_to_string('inheritance14')
self.assertEqual(output, '1&3_')
@setup(inheritance_templates)
def test_inheritance15(self):
"""
A block within another block
"""
output = self.engine.render_to_string('inheritance15')
self.assertEqual(output, '12inner3_')
@setup(inheritance_templates)
def test_inheritance16(self):
"""
A block within another block (level 2)
"""
output = self.engine.render_to_string('inheritance16')
self.assertEqual(output, '12out3_')
@setup(inheritance_templates)
def test_inheritance17(self):
"""
{% load %} tag (parent -- setup for exception04)
"""
output = self.engine.render_to_string('inheritance17')
self.assertEqual(output, '1234')
@setup(inheritance_templates)
def test_inheritance18(self):
"""
{% load %} tag (standard usage, without inheritance)
"""
output = self.engine.render_to_string('inheritance18')
self.assertEqual(output, 'this that theother5678')
@setup(inheritance_templates)
def test_inheritance19(self):
"""
{% load %} tag (within a child template)
"""
output = self.engine.render_to_string('inheritance19')
self.assertEqual(output, '140056783_')
@setup(inheritance_templates)
def test_inheritance20(self):
"""
Two-level inheritance with {{ block.super }}
"""
output = self.engine.render_to_string('inheritance20')
self.assertEqual(output, '1&a3_')
@setup(inheritance_templates)
def test_inheritance21(self):
"""
Three-level inheritance with {{ block.super }} from parent
"""
output = self.engine.render_to_string('inheritance21')
self.assertEqual(output, '12a34')
@setup(inheritance_templates)
def test_inheritance22(self):
"""
Three-level inheritance with {{ block.super }} from grandparent
"""
output = self.engine.render_to_string('inheritance22')
self.assertEqual(output, '1&a3_')
@setup(inheritance_templates)
def test_inheritance23(self):
"""
Three-level inheritance with {{ block.super }} from parent and
grandparent
"""
output = self.engine.render_to_string('inheritance23')
self.assertEqual(output, '1&ab3_')
@setup(inheritance_templates)
def test_inheritance24(self):
"""
Inheritance from local context without use of template loader
"""
context_template = self.engine.from_string("1{% block first %}_{% endblock %}3{% block second %}_{% endblock %}")
output = self.engine.render_to_string('inheritance24', {'context_template': context_template})
self.assertEqual(output, '1234')
@setup(inheritance_templates)
def test_inheritance25(self):
"""
Inheritance from local context with variable parent template
"""
context_template = [
self.engine.from_string("Wrong"),
self.engine.from_string("1{% block first %}_{% endblock %}3{% block second %}_{% endblock %}"),
]
output = self.engine.render_to_string('inheritance25', {'context_template': context_template})
self.assertEqual(output, '1234')
@setup(inheritance_templates)
def test_inheritance26(self):
"""
Set up a base template to extend
"""
output = self.engine.render_to_string('inheritance26')
self.assertEqual(output, 'no tags')
@setup(inheritance_templates)
def test_inheritance27(self):
"""
Inheritance from a template that doesn't have any blocks
"""
output = self.engine.render_to_string('inheritance27')
self.assertEqual(output, 'no tags')
@setup(inheritance_templates)
def test_inheritance_28(self):
"""
Set up a base template with a space in it.
"""
output = self.engine.render_to_string('inheritance 28')
self.assertEqual(output, '!')
@setup(inheritance_templates)
def test_inheritance29(self):
"""
Inheritance from a template with a space in its name should work.
"""
output = self.engine.render_to_string('inheritance29')
self.assertEqual(output, '!')
@setup(inheritance_templates)
def test_inheritance30(self):
"""
Base template, putting block in a conditional {% if %} tag
"""
output = self.engine.render_to_string('inheritance30', {'optional': True})
self.assertEqual(output, '123')
# Inherit from a template with block wrapped in an {% if %} tag
# (in parent), still gets overridden
@setup(inheritance_templates)
def test_inheritance31(self):
output = self.engine.render_to_string('inheritance31', {'optional': True})
self.assertEqual(output, '1two3')
@setup(inheritance_templates)
def test_inheritance32(self):
output = self.engine.render_to_string('inheritance32')
self.assertEqual(output, '13')
@setup(inheritance_templates)
def test_inheritance33(self):
"""
Base template, putting block in a conditional {% ifequal %} tag
"""
output = self.engine.render_to_string('inheritance33', {'optional': 1})
self.assertEqual(output, '123')
@setup(inheritance_templates)
def test_inheritance34(self):
"""
Inherit from a template with block wrapped in an {% ifequal %} tag
(in parent), still gets overridden
"""
output = self.engine.render_to_string('inheritance34', {'optional': 1})
self.assertEqual(output, '1two3')
@setup(inheritance_templates)
def test_inheritance35(self):
"""
Inherit from a template with block wrapped in an {% ifequal %} tag
(in parent), still gets overridden
"""
output = self.engine.render_to_string('inheritance35', {'optional': 2})
self.assertEqual(output, '13')
@setup(inheritance_templates)
def test_inheritance36(self):
"""
Base template, putting block in a {% for %} tag
"""
output = self.engine.render_to_string('inheritance36', {'numbers': '123'})
self.assertEqual(output, '_1_2_3_')
@setup(inheritance_templates)
def test_inheritance37(self):
"""
Inherit from a template with block wrapped in an {% for %} tag
(in parent), still gets overridden
"""
output = self.engine.render_to_string('inheritance37', {'numbers': '123'})
self.assertEqual(output, '_X_X_X_')
@setup(inheritance_templates)
def test_inheritance38(self):
"""
Inherit from a template with block wrapped in an {% for %} tag
(in parent), still gets overridden
"""
output = self.engine.render_to_string('inheritance38')
self.assertEqual(output, '_')
# The super block will still be found.
@setup(inheritance_templates)
def test_inheritance39(self):
output = self.engine.render_to_string('inheritance39', {'optional': True})
self.assertEqual(output, '1new23')
@setup(inheritance_templates)
def test_inheritance40(self):
output = self.engine.render_to_string('inheritance40', {'optional': 1})
self.assertEqual(output, '1new23')
@setup(inheritance_templates)
def test_inheritance41(self):
output = self.engine.render_to_string('inheritance41', {'numbers': '123'})
self.assertEqual(output, '_new1_new2_new3_')
@setup(inheritance_templates)
def test_inheritance42(self):
"""
Expression starting and ending with a quote
"""
output = self.engine.render_to_string('inheritance42')
self.assertEqual(output, '1234')
| bsd-3-clause | 2,086,931,300,173,956,000 | 38.14899 | 121 | 0.607495 | false |
mattias-ohlsson/anaconda | tests/pyanaconda_test/vnc_test.py | 2 | 4102 | #!/usr/bin/python
import mock
import os
class VncTest(mock.TestCase):
def setUp(self):
self.setupModules(["_isys", "block", "logging", "ConfigParser"])
self.fs = mock.DiskIO()
import pyanaconda
pyanaconda.anaconda_log = mock.Mock()
self.OK = 22
import pyanaconda.vnc
pyanaconda.vnc.log = mock.Mock()
pyanaconda.vnc.os = mock.Mock()
pyanaconda.vnc.subprocess = mock.Mock()
pyanaconda.vnc.subprocess.Popen().communicate.return_value = (1, 2)
pyanaconda.vnc.subprocess.Popen().returncode = self.OK
pyanaconda.vnc.open = self.fs.open
self.ROOT = '/'
self.DISPLAY = '2'
self.DESKTOP = 'Desktop'
self.PASS = ''
self.LOG_FILE = '/tmp/vnc.log'
self.PW_FILE = '/tmp/vncpassword'
self.VNCCONNECTHOST = 'host'
def tearDown(self):
self.tearDownModules()
def set_vnc_password_1_test(self):
import pyanaconda.vnc
server = pyanaconda.vnc.VncServer()
pyanaconda.vnc.iutil = mock.Mock()
pyanaconda.vnc.os.pipe.return_value = (1, 2)
server.setVNCPassword()
self.assertEqual(
pyanaconda.vnc.iutil.execWithRedirect.call_args_list,
[(('vncpasswd', ['-f']), {'stdin': 1, 'stdout': '/tmp/vncpassword'})])
def initialize_test(self):
import pyanaconda.vnc
IP = '192.168.0.21'
HOSTNAME = 'desktop'
dev = mock.Mock()
dev.get.return_value = 'eth0'
pyanaconda.vnc.network = mock.Mock()
pyanaconda.vnc.network.Network().netdevices = [dev]
pyanaconda.vnc.network.getActiveNetDevs.return_value = [0]
pyanaconda.vnc.network.getDefaultHostname.return_value = HOSTNAME
pyanaconda.vnc.isys = mock.Mock()
pyanaconda.vnc.isys.getIPAddresses = mock.Mock(return_value=[IP])
server = pyanaconda.vnc.VncServer(display=self.DISPLAY)
server.initialize()
expected = "%s:%s (%s)" % (HOSTNAME, self.DISPLAY, IP)
self.assertEqual(server.connxinfo, expected)
def openlogfile_test(self):
import pyanaconda.vnc
FILE = 'file'
pyanaconda.vnc.os.O_RDWR = os.O_RDWR
pyanaconda.vnc.os.O_CREAT = os.O_CREAT
pyanaconda.vnc.os.open.return_value = FILE
server = pyanaconda.vnc.VncServer(log_file=self.LOG_FILE)
ret = server.openlogfile()
self.assertEqual(ret, FILE)
self.assertEqual(pyanaconda.vnc.os.open.call_args,
((self.LOG_FILE, os.O_RDWR | os.O_CREAT), {})
)
def connect_to_view_test(self):
import pyanaconda.vnc
pyanaconda.vnc.subprocess.Popen().communicate.return_value = (self.OK, '')
server = pyanaconda.vnc.VncServer(vncconnecthost=self.VNCCONNECTHOST)
ret = server.connectToView()
self.assertTrue(ret)
params = pyanaconda.vnc.subprocess.Popen.call_args[0][0]
self.assertTrue(self.VNCCONNECTHOST in params)
self.assertTrue(params[params.index(self.VNCCONNECTHOST)-1] == "-connect")
def start_server_test(self):
import pyanaconda.vnc
pyanaconda.vnc.VncServer.initialize = mock.Mock()
pyanaconda.vnc.VncServer.setVNCPassword = mock.Mock()
pyanaconda.vnc.VncServer.VNCListen = mock.Mock()
pyanaconda.vnc.subprocess.Popen().poll.return_value = None
pyanaconda.vnc.os.environ = {}
pyanaconda.vnc.time.sleep = mock.Mock()
server = pyanaconda.vnc.VncServer(root=self.ROOT, display=self.DISPLAY,
desktop=self.DESKTOP, password=self.PASS, vncconnecthost="")
server.openlogfile = mock.Mock()
server.startServer()
params = pyanaconda.vnc.subprocess.Popen.call_args[0][0]
self.assertTrue('desktop=%s'%self.DESKTOP in params)
self.assertTrue(':%s'%self.DISPLAY in params)
self.assertTrue(pyanaconda.vnc.VncServer.VNCListen.called)
self.assertTrue("DISPLAY" in pyanaconda.vnc.os.environ)
self.assertEqual(pyanaconda.vnc.os.environ['DISPLAY'], ":%s" % self.DISPLAY)
| gpl-2.0 | -2,935,803,873,079,089,000 | 35.625 | 84 | 0.631156 | false |
SNeuhausen/training_management | models/education_plan/education_plan.py | 1 | 7704 | # -*- coding: utf-8 -*-
# /#############################################################################
#
# Stephan Neuhausen.
# Copyright (C) 20014-TODAY Stephan Neuhausen iad.de.
#
# /#############################################################################
from datetime import date
from openerp import models, fields, api
from openerp.addons.training_management.models.model_names import ModelNames
from openerp.addons.training_management.models.table_names import TableNames
from openerp.addons.training_management.models.selections import ParticipationStateSelection
from openerp.addons.training_management.models.selections import ParticipationCompletionStates
from openerp.addons.training_management.models.selections import ParticipationJobStates
from openerp.addons.training_management.utils.date_utils import DateUtils
class EducationPlan(models.Model):
_name = ModelNames.EDUCATION_PLAN
name = fields.Char(size=128, string='Titel', copy=False, required=True)
measure_id = fields.Many2one(
comodel_name=ModelNames.MEASURE,
string='Maßnahme',
required=True,
copy=True,
)
partner_id = fields.Many2one(
comodel_name=ModelNames.PARTNER,
required=True,
domain=[('is_participant', '=', True)],
string='Teilnehmer',
copy=True,
)
status = fields.Selection(
selection=ParticipationStateSelection.get_items(),
default=ParticipationStateSelection.INTERESTED,
string="Status",
copy=False,
)
is_active = fields.Boolean(string='Ist aktive Planung', default=False, copy=False)
customer_number = fields.Char(size=128, string='Kunden-Nr.', copy=True)
sponsor_id = fields.Many2one(
comodel_name=ModelNames.PARTNER,
domain=[('is_sponsor', '=', True), ('is_company', '=', True)],
string='Kostenträger',
copy=True,
)
sponsor_contact_id = fields.Many2one(
comodel_name=ModelNames.PARTNER,
domain=[('is_sponsor', '=', True), ('is_company', '=', False)],
string='Ansprechpartner',
copy=True,
)
completion_status = fields.Selection(
selection=ParticipationCompletionStates.get_items(),
string='Beendigungsgrund',
copy=False,
)
completion_comment = fields.Text(string='Anmerkung', copy=False)
job_status = fields.Selection(
selection=ParticipationJobStates.get_items(),
string='In Arbeit?',
copy=False,
)
job_checked_date = fields.Date(string='Kontakt vom', copy=False)
show_dates = fields.Boolean(string='keine Modultermine zeigen?')
show_lectures = fields.Boolean(string='nur Fachinhalte zeigen?')
teaching_type_id = fields.Many2one(
comodel_name=ModelNames.TEACHING_TYPE,
default=lambda self: self._default__teaching_type_id(),
string='Typ',
copy=False,
)
tag_ids = fields.Many2many(
comodel_name=ModelNames.TAG,
relation=TableNames.EDUCATION_PLAN__TAG,
column1="education_plan_id",
column2="tag_id",
string='Tags',
copy=True,
)
planning_date = fields.Date(
string='Planungsdatum',
required=True,
default=lambda self: fields.Date.today(),
copy=False,
)
start_date = fields.Date(string='Startdatum', copy=True)
end_date = fields.Date(string='Enddatum', copy=True)
school_day_count = fields.Integer(
string="Anzahl Unterrichtstage",
compute="_compute__school_day_count",
readonly=True,
store=True,
copy=False,
)
last_presence_date = fields.Date(string='Letzter Anwesenheitstag', copy=False)
plan_comment = fields.Text(string='Anmerkung', copy=False)
course_plan_ids = fields.One2many(
comodel_name=ModelNames.COURSE_PLAN,
inverse_name="education_plan_id",
string='Kursplanungen',
copy=True,
)
concrete_degree_ids = fields.One2many(
comodel_name=ModelNames.CONCRETE_DEGREE,
inverse_name="education_plan_id",
string=u"Abschlüsse",
copy=False,
)
enable_teaching_filter = fields.Boolean(default=True, string=u"Auswahl der Lehrinhalte filtern")
def _default__teaching_type_id(self):
return self.env[ModelNames.TEACHING_TYPE].get_course_type()
@api.depends(
'course_plan_ids.start_date', 'course_plan_ids.end_date',
'measure_id.special_day_ids.type', 'measure_id.special_day_ids.date'
)
def _compute__school_day_count(self):
for plan in self:
school_days = plan.compute_school_days()
plan.school_day_count = len(school_days)
def compute_school_days(self):
self.ensure_one()
school_days = set()
for course_plan in self.course_plan_ids:
school_days |= course_plan.compute_school_days()
return school_days
@api.multi
def action_button__toggle_active_state(self):
for plan in self:
plan.is_active = not plan.is_active
@api.multi
def action_button__compute_dates_from_course_plans(self):
convert = DateUtils.convert_to_date
new_start_date = convert(self.start_date, date.max)
new_end_date = convert(self.end_date, date.min)
for course_plan in self.course_plan_ids:
new_start_date = min(new_start_date, convert(course_plan.start_date, date.max))
new_end_date = max(new_end_date, convert(course_plan.end_date, date.min))
if new_start_date == date.max:
new_start_date = False
if new_end_date == date.min:
new_end_date = False
if not self.last_presence_date:
self.last_presence_date = new_end_date
self.start_date = new_start_date
self.end_date = new_end_date
@api.onchange('last_presence_date', 'start_date', 'end_date')
def _onchange__change_status_to_completed(self):
convert = DateUtils.convert_to_date
last_presence_date = convert(self.last_presence_date)
start_date = convert(self.start_date)
end_date = convert(self.end_date)
if not all([last_presence_date, start_date, end_date]):
return
if start_date <= last_presence_date <= end_date:
self.status = ParticipationStateSelection.ALUMNI
@api.model
def create(self, vals):
plan = super(EducationPlan, self).create(vals)
if plan.is_active:
plan.deactivate_other_plans()
return plan
@api.multi
def write(self, vals):
previous_active_states = dict((plan, plan.is_active) for plan in self)
result = super(EducationPlan, self).write(vals)
for plan in self:
is_active_now = plan.is_active
was_active_before = previous_active_states[plan]
if not was_active_before and is_active_now:
plan.deactivate_other_plans()
return result
def deactivate_other_plans(self):
self.ensure_one()
other_plans = self.get_other_plans()
for other_plan in other_plans:
if other_plan.is_active:
other_plan.is_active = False
def get_other_plans(self):
self.ensure_one()
participant = self.partner_id
if participant:
other_plans = participant.education_plan_ids - self
return other_plans
else:
return self.browse()
@api.multi
def copy(self, default=None):
self.ensure_one()
default = default or {}
default["name"] = u"{name} (Kopie)".format(name=self.name)
return super(EducationPlan, self).copy(default=default)
| gpl-3.0 | -1,053,912,573,318,107,900 | 35.15493 | 100 | 0.623166 | false |
jezdez/kuma | vendor/packages/translate/convert/po2wordfast.py | 25 | 4112 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2005-2007 Zuza Software Foundation
#
# This file is part of translate.
#
# translate is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# translate is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
"""Convert Gettext PO localization files to a Wordfast translation memory file.
See: http://docs.translatehouse.org/projects/translate-toolkit/en/latest/commands/po2wordfast.html
for examples and usage instructions.
"""
import os
from translate.convert import convert
from translate.misc import wStringIO
from translate.storage import po, wordfast
class po2wordfast:
def convertfiles(self, inputfile, wffile, sourcelanguage='en', targetlanguage=None):
"""converts a .po file (possibly many) to a Wordfast TM file"""
inputstore = po.pofile(inputfile)
for inunit in inputstore.units:
if inunit.isheader() or inunit.isblank() or not inunit.istranslated():
continue
source = inunit.source
target = inunit.target
newunit = wffile.addsourceunit(source)
newunit.target = target
newunit.targetlang = targetlanguage
def convertpo(inputfile, outputfile, templatefile, sourcelanguage='en', targetlanguage=None):
"""reads in stdin using fromfileclass, converts using convertorclass, writes to stdout"""
convertor = po2wordfast()
outputfile.wffile.header.targetlang = targetlanguage
convertor.convertfiles(inputfile, outputfile.wffile, sourcelanguage, targetlanguage)
return 1
class wfmultifile:
def __init__(self, filename, mode=None):
"""initialises wfmultifile from a seekable inputfile or writable outputfile"""
self.filename = filename
if mode is None:
if os.path.exists(filename):
mode = 'r'
else:
mode = 'w'
self.mode = mode
self.multifilename = os.path.splitext(filename)[0]
self.wffile = wordfast.WordfastTMFile()
def openoutputfile(self, subfile):
"""returns a pseudo-file object for the given subfile"""
def onclose(contents):
pass
outputfile = wStringIO.CatchStringOutput(onclose)
outputfile.filename = subfile
outputfile.wffile = self.wffile
return outputfile
class WfOptionParser(convert.ArchiveConvertOptionParser):
def recursiveprocess(self, options):
if not options.targetlanguage:
raise ValueError("You must specify the target language")
super(WfOptionParser, self).recursiveprocess(options)
self.output = open(options.output, 'w')
#options.outputarchive.wffile.setsourcelanguage(options.sourcelanguage)
self.output.write(str(options.outputarchive.wffile))
def main(argv=None):
formats = {"po": ("txt", convertpo), ("po", "txt"): ("txt", convertpo)}
archiveformats = {(None, "output"): wfmultifile, (None, "template"): wfmultifile}
parser = WfOptionParser(formats, usepots=False, usetemplates=False, description=__doc__, archiveformats=archiveformats)
parser.add_option("-l", "--language", dest="targetlanguage", default=None,
help="set target language code (e.g. af-ZA) [required]", metavar="LANG")
parser.add_option("", "--source-language", dest="sourcelanguage", default='en',
help="set source language code (default: en)", metavar="LANG")
parser.passthrough.append("sourcelanguage")
parser.passthrough.append("targetlanguage")
parser.run(argv)
if __name__ == '__main__':
main()
| mpl-2.0 | -9,140,157,742,357,399,000 | 37.429907 | 123 | 0.687257 | false |
dongjoon-hyun/spark | python/pyspark/ml/__init__.py | 25 | 1530 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
DataFrame-based machine learning APIs to let users quickly assemble and configure practical
machine learning pipelines.
"""
from pyspark.ml.base import Estimator, Model, Predictor, PredictionModel, \
Transformer, UnaryTransformer
from pyspark.ml.pipeline import Pipeline, PipelineModel
from pyspark.ml import classification, clustering, evaluation, feature, fpm, \
image, recommendation, regression, stat, tuning, util, linalg, param
__all__ = [
"Transformer", "UnaryTransformer", "Estimator", "Model",
"Predictor", "PredictionModel", "Pipeline", "PipelineModel",
"classification", "clustering", "evaluation", "feature", "fpm", "image",
"recommendation", "regression", "stat", "tuning", "util", "linalg", "param",
]
| apache-2.0 | -898,271,769,885,563,500 | 45.363636 | 91 | 0.751634 | false |
waidyanatha/sambro-eden | private/templates/EUROSHA/config.py | 2 | 19304 | # -*- coding: utf-8 -*-
try:
# Python 2.7
from collections import OrderedDict
except:
# Python 2.6
from gluon.contrib.simplejson.ordered_dict import OrderedDict
from gluon import current
from gluon.storage import Storage
from s3.s3forms import S3SQLCustomForm, S3SQLInlineComponent, S3SQLInlineComponentCheckbox
settings = current.deployment_settings
T = current.T
"""
Template settings for EUROSHA: European Open Source Humanitarian Aid
"""
# Pre-Populate
settings.base.prepopulate = ["EUROSHA"]
settings.base.system_name = T("EUROSHA Humanitarian Data Registry")
settings.base.system_name_short = T("EUROSHA")
# Theme (folder to use for views/layout.html)
settings.base.theme = "EUROSHA"
# Auth settings
# Do new users need to verify their email address?
settings.auth.registration_requires_verification = True
# Do new users need to be approved by an administrator prior to being able to login?
settings.auth.registration_requires_approval = True
# Uncomment this to request the Organisation when a user registers
settings.auth.registration_requests_organisation = True
settings.auth.role_modules = OrderedDict([
("transport", "Airports and Seaports"),
("hms", "Hospitals"),
("org", "Organizations, Offices, and Facilities"),
("inv", "Warehouses"),
("staff", "Staff"),
("vol", "Volunteers"),
("project", "Projects"),
#("asset", "Assets"),
#("vehicle", "Vehicles"),
])
# L10n settings
settings.L10n.languages = OrderedDict([
("en", "English"),
("fr", "French"),
])
# Default timezone for users
settings.L10n.utc_offset = "UTC +0100"
# Number formats (defaults to ISO 31-0)
# Decimal separator for numbers (defaults to ,)
settings.L10n.decimal_separator = "."
# Thousands separator for numbers (defaults to space)
settings.L10n.thousands_separator = ","
# Finance settings
settings.fin.currencies = {
"EUR" : T("Euros"),
"GBP" : T("Great British Pounds"),
"USD" : T("United States Dollars"),
}
# Security Policy
settings.security.policy = 8 # Delegations
settings.security.map = True
# Realm Entity (old)
#def eurosha_realm_entity(table, row):
# user = current.auth.user
# if user is not None:
# return current.s3db.pr_get_pe_id("org_organisation",
# user.organisation_id)
# else:
# return None
#settings.auth.realm_entity = eurosha_realm_entity
def eurosha_realm_entity(table, row):
"""
Assign a Realm Entity to records
"""
tablename = table._tablename
# Do not apply realms for Master Data
# @ToDo: Restore Realms and add a role/functionality support for Master Data
if tablename in [#"hrm_certificate",
"hrm_department",
"hrm_job_title",
"hrm_course",
"hrm_programme",
]:
return None
db = current.db
s3db = current.s3db
# Entity reference fields
EID = "pe_id"
#OID = "organisation_id"
SID = "site_id"
#GID = "group_id"
PID = "person_id"
# Owner Entity Foreign Key
realm_entity_fks = dict(pr_contact = EID,
pr_physical_description = EID,
pr_address = EID,
pr_image = EID,
pr_identity = PID,
pr_education = PID,
pr_note = PID,
hrm_human_resource = SID,
inv_recv = SID,
inv_recv_item = "req_id",
inv_send = SID,
inv_track_item = "track_org_id",
inv_adj_item = "adj_id",
req_req_item = "req_id"
)
# Default Foreign Keys (ordered by priority)
default_fks = ["catalog_id",
"project_id",
"project_location_id"
]
# Link Tables
realm_entity_link_table = dict(
project_task = Storage(tablename = "project_task_project",
link_key = "task_id"
)
)
if tablename in realm_entity_link_table:
# Replace row with the record from the link table
link_table = realm_entity_link_table[tablename]
table = s3db[link_table.tablename]
rows = db(table[link_table.link_key] == row.id).select(table.id,
limitby=(0, 1))
if rows:
# Update not Create
row = rows.first()
# Check if there is a FK to inherit the realm_entity
realm_entity = 0
fk = realm_entity_fks.get(tablename, None)
for default_fk in [fk] + default_fks:
if default_fk in table.fields:
fk = default_fk
# Inherit realm_entity from parent record
if fk == EID:
ftable = s3db.pr_person
query = ftable[EID] == row[EID]
else:
ftablename = table[fk].type[10:] # reference tablename
ftable = s3db[ftablename]
query = (table.id == row.id) & \
(table[fk] == ftable.id)
record = db(query).select(ftable.realm_entity,
limitby=(0, 1)).first()
if record:
realm_entity = record.realm_entity
break
#else:
# Continue to loop through the rest of the default_fks
# Fall back to default get_realm_entity function
# EUROSHA should never use User organsiation (since volunteers editing on behalf of other Orgs)
#use_user_organisation = False
## Suppliers & Partners are owned by the user's organisation
#if realm_entity == 0 and tablename == "org_organisation":
# ott = s3db.org_organisation_type
# row = table[row.id]
# row = db(table.organisation_type_id == ott.id).select(ott.name,
# limitby=(0, 1)
# ).first()
#
# if row and row.name != "Red Cross / Red Crescent":
# use_user_organisation = True
## Groups are owned by the user's organisation
#elif tablename in ["pr_group"]:
# use_user_organisation = True
#user = current.auth.user
#if use_user_organisation and user:
# # @ToDo - this might cause issues if the user's org is different from the realm that gave them permissions to create the Org
# realm_entity = s3db.pr_get_pe_id("org_organisation",
# user.organisation_id)
return realm_entity
settings.auth.realm_entity = eurosha_realm_entity
# Set this if there will be multiple areas in which work is being done,
# and a menu to select among them is wanted.
settings.gis.menu = "Country"
# PoIs to export in KML/OSM feeds from Admin locations
settings.gis.poi_resources = ["cr_shelter", "hms_hospital", "org_office",
"transport_airport", "transport_seaport"
]
# Enable this for a UN-style deployment
settings.ui.cluster = True
settings.frontpage.rss = [
{"title": "Blog",
"url": "http://eurosha-volunteers-blog.org/feed/"
}
]
# Organisation Management
# Uncomment to add summary fields for Organisations/Offices for # National/International staff
settings.org.summary = True
# HRM
# Uncomment to allow HRs to have multiple Job Titles
settings.hrm.multiple_job_titles = True
# Uncomment to disable Staff experience
settings.hrm.staff_experience = False
# Uncomment to disable Volunteer experience
settings.hrm.vol_experience = False
# Uncomment to disable the use of HR Certificates
settings.hrm.use_certificates = False
# Uncomment to disable the use of HR Credentials
settings.hrm.use_credentials = False
# Uncomment to disable the use of HR Description
settings.hrm.use_description = False
# Uncomment to disable the use of HR ID
settings.hrm.use_id = False
# Uncomment to disable the use of HR Skills
settings.hrm.use_skills = False
# Uncomment to disable the use of HR Trainings
settings.hrm.use_trainings = False
# Projects
# Uncomment this to use settings suitable for a global/regional organisation (e.g. DRR)
settings.project.mode_3w = True
# Uncomment this to use Codes for projects
settings.project.codes = True
# Uncomment this to call project locations 'Communities'
#settings.project.community = True
# Uncomment this to use multiple Budgets per project
settings.project.multiple_budgets = True
# Uncomment this to use multiple Organisations per project
settings.project.multiple_organisations = True
# Uncomment this to customise
#settings.project.organisation_roles = {
# 1: T("Host National Society"),
# 2: T("Partner National Society"),
# 3: T("Donor"),
# #4: T("Customer"), # T("Beneficiary")?
# 5: T("Partner")
#}
# -----------------------------------------------------------------------------
def customize_org_organisation(**attr):
s3 = current.response.s3
# Custom prep
standard_prep = s3.prep
def custom_prep(r):
# Call standard prep
if callable(standard_prep):
result = standard_prep(r)
else:
result = True
if r.interactive or r.representation.lower() == "aadata":
s3db = current.s3db
list_fields = ["id",
"name",
"acronym",
"organisation_type_id",
(T("Clusters"), "sector.name"),
"country",
"website"
]
s3db.configure("org_organisation", list_fields=list_fields)
if r.interactive:
crud_form = S3SQLCustomForm(
"name",
"acronym",
"organisation_type_id",
"region",
"country",
S3SQLInlineComponentCheckbox(
"sector",
label = T("Clusters"),
field = "sector_id",
cols = 3,
),
"phone",
"website",
"year",
"logo",
"comments",
)
s3db.configure("org_organisation", crud_form=crud_form)
return result
s3.prep = custom_prep
return attr
settings.ui.customize_org_organisation = customize_org_organisation
# -----------------------------------------------------------------------------
settings.ui.crud_form_project_project = S3SQLCustomForm(
"organisation_id",
"name",
"code",
"description",
"status_id",
"start_date",
"end_date",
#S3SQLInlineComponentCheckbox(
# "hazard",
# label = T("Hazards"),
# field = "hazard_id",
# cols = 4,
#),
S3SQLInlineComponentCheckbox(
"sector",
label = T("Sectors"),
field = "sector_id",
cols = 4,
),
#S3SQLInlineComponent(
# "location",
# label = T("Locations"),
# fields = ["location_id"],
#),
S3SQLInlineComponentCheckbox(
"theme",
label = T("Themes"),
field = "theme_id",
cols = 4,
# Filter Theme by Sector
# filter = {"linktable": "project_theme_sector",
# "lkey": "theme_id",
# "rkey": "sector_id",
# },
# script = '''
#S3OptionsFilter({
# 'triggerName':'defaultsector-sector_id',
# 'targetName':'defaulttheme-theme_id',
# 'targetWidget':'defaulttheme-theme_id_widget',
# 'lookupResource':'theme',
# 'lookupURL':S3.Ap.concat('/project/theme_sector_widget?sector_ids='),
# 'getWidgetHTML':true,
# 'showEmptyField':false
#})'''
),
#"drr.hfa",
"objectives",
"human_resource_id",
# Partner Orgs
#S3SQLInlineComponent(
# "organisation",
# name = "partner",
# label = T("Partner Organizations"),
# fields = ["organisation_id",
# "comments",
# ],
# filterby = dict(field = "role",
# options = "2"
# )
#),
# Donors
#S3SQLInlineComponent(
# "organisation",
# name = "donor",
# label = T("Donor(s)"),
# fields = ["organisation_id",
# "amount",
# "currency"],
# filterby = dict(field = "role",
# options = "3"
# )
#),
#"budget",
#"currency",
"comments",
)
settings.ui.crud_form_project_location = S3SQLCustomForm(
"project_id",
"location_id",
# @ToDo: Grouped Checkboxes
S3SQLInlineComponentCheckbox(
"activity_type",
label = T("Activity Types"),
field = "activity_type_id",
cols = 3,
# Filter Activity Type by Sector
#filter = {"linktable": "project_activity_type_sector",
# "lkey": "activity_type_id",
# "rkey": "sector_id",
# "lookuptable": "project_project",
# "lookupkey": "project_id",
# },
),
"comments",
)
# Comment/uncomment modules here to disable/enable them
settings.modules = OrderedDict([
# Core modules which shouldn't be disabled
("default", Storage(
name_nice = T("Home"),
restricted = False, # Use ACLs to control access to this module
access = None, # All Users (inc Anonymous) can see this module in the default menu & access the controller
module_type = None # This item is not shown in the menu
)),
("admin", Storage(
name_nice = T("Administration"),
#description = "Site Administration",
restricted = True,
access = "|1|", # Only Administrators can see this module in the default menu & access the controller
module_type = None # This item is handled separately for the menu
)),
("appadmin", Storage(
name_nice = T("Administration"),
#description = "Site Administration",
restricted = True,
module_type = None # No Menu
)),
("errors", Storage(
name_nice = T("Ticket Viewer"),
#description = "Needed for Breadcrumbs",
restricted = False,
module_type = None # No Menu
)),
("sync", Storage(
name_nice = T("Synchronization"),
#description = "Synchronization",
restricted = True,
access = "|1|", # Only Administrators can see this module in the default menu & access the controller
module_type = None # This item is handled separately for the menu
)),
("translate", Storage(
name_nice = T("Translation Functionality"),
#description = "Selective translation of strings based on module.",
module_type = None,
)),
("gis", Storage(
name_nice = T("Map"),
#description = "Situation Awareness & Geospatial Analysis",
restricted = True,
module_type = 1,
)),
("pr", Storage(
name_nice = T("Person Registry"),
#description = "Central point to record details on People",
restricted = True,
access = "|1|", # Only Administrators can see this module in the default menu (access to controller is possible to all still)
module_type = None
)),
("org", Storage(
name_nice = T("Organizations"),
#description = 'Lists "who is doing what & where". Allows relief agencies to coordinate their activities',
restricted = True,
module_type = 2
)),
# All modules below here should be possible to disable safely
("hrm", Storage(
name_nice = T("Staff"),
#description = "Human Resources Management",
restricted = True,
module_type = None,
)),
("cms", Storage(
name_nice = T("Content Management"),
#description = "Content Management System",
restricted = True,
module_type = 10,
)),
("doc", Storage(
name_nice = T("Documents"),
#description = "A library of digital resources, such as photos, documents and reports",
restricted = True,
module_type = 10,
)),
("msg", Storage(
name_nice = T("Messaging"),
#description = "Sends & Receives Alerts via Email & SMS",
restricted = True,
# The user-visible functionality of this module isn't normally required. Rather it's main purpose is to be accessed from other modules.
module_type = None,
)),
("supply", Storage(
name_nice = T("Supply Chain Management"),
#description = "Used within Inventory Management, Request Management and Asset Management",
restricted = True,
module_type = None, # Not displayed
)),
("inv", Storage(
name_nice = T("Warehouses"),
#description = "Receiving and Sending Items",
restricted = True,
module_type = 4
)),
#("asset", Storage(
# name_nice = T("Assets"),
# #description = "Recording and Assigning Assets",
# restricted = True,
# module_type = 5,
# )),
# Vehicle depends on Assets
#("vehicle", Storage(
# name_nice = T("Vehicles"),
# #description = "Manage Vehicles",
# restricted = True,
# module_type = 6,
# )),
("project", Storage(
name_nice = T("Projects"),
#description = "Tracking of Projects, Activities and Tasks",
restricted = True,
module_type = 7
)),
("cr", Storage(
name_nice = T("Shelters"),
#description = "Tracks the location, capacity and breakdown of victims in Shelters",
restricted = True,
module_type = 10
)),
("hms", Storage(
name_nice = T("Hospitals"),
#description = "Helps to monitor status of hospitals",
restricted = True,
module_type = 3
)),
("transport", Storage(
name_nice = T("Transport"),
restricted = True,
module_type = 10,
)),
("stats", Storage(
name_nice = "Stats",
#description = "Needed for Project Benficiaries",
restricted = True,
module_type = None
)),
])
| mit | -822,010,378,834,629,800 | 33.781982 | 147 | 0.53476 | false |
TaintTrap/platform_external_chromium | net/tools/testserver/xmppserver.py | 67 | 16907 | #!/usr/bin/python2.4
# Copyright (c) 2010 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A bare-bones and non-compliant XMPP server.
Just enough of the protocol is implemented to get it to work with
Chrome's sync notification system.
"""
import asynchat
import asyncore
import base64
import re
import socket
from xml.dom import minidom
# pychecker complains about the use of fileno(), which is implemented
# by asyncore by forwarding to an internal object via __getattr__.
__pychecker__ = 'no-classattr'
class Error(Exception):
"""Error class for this module."""
pass
class UnexpectedXml(Error):
"""Raised when an unexpected XML element has been encountered."""
def __init__(self, xml_element):
xml_text = xml_element.toxml()
Error.__init__(self, 'Unexpected XML element', xml_text)
def ParseXml(xml_string):
"""Parses the given string as XML and returns a minidom element
object.
"""
dom = minidom.parseString(xml_string)
# minidom handles xmlns specially, but there's a bug where it sets
# the attribute value to None, which causes toxml() or toprettyxml()
# to break.
def FixMinidomXmlnsBug(xml_element):
if xml_element.getAttribute('xmlns') is None:
xml_element.setAttribute('xmlns', '')
def ApplyToAllDescendantElements(xml_element, fn):
fn(xml_element)
for node in xml_element.childNodes:
if node.nodeType == node.ELEMENT_NODE:
ApplyToAllDescendantElements(node, fn)
root = dom.documentElement
ApplyToAllDescendantElements(root, FixMinidomXmlnsBug)
return root
def CloneXml(xml):
"""Returns a deep copy of the given XML element.
Args:
xml: The XML element, which should be something returned from
ParseXml() (i.e., a root element).
"""
return xml.ownerDocument.cloneNode(True).documentElement
class StanzaParser(object):
"""A hacky incremental XML parser.
StanzaParser consumes data incrementally via FeedString() and feeds
its delegate complete parsed stanzas (i.e., XML documents) via
FeedStanza(). Any stanzas passed to FeedStanza() are unlinked after
the callback is done.
Use like so:
class MyClass(object):
...
def __init__(self, ...):
...
self._parser = StanzaParser(self)
...
def SomeFunction(self, ...):
...
self._parser.FeedString(some_data)
...
def FeedStanza(self, stanza):
...
print stanza.toprettyxml()
...
"""
# NOTE(akalin): The following regexps are naive, but necessary since
# none of the existing Python 2.4/2.5 XML libraries support
# incremental parsing. This works well enough for our purposes.
#
# The regexps below assume that any present XML element starts at
# the beginning of the string, but there may be trailing whitespace.
# Matches an opening stream tag (e.g., '<stream:stream foo="bar">')
# (assumes that the stream XML namespace is defined in the tag).
_stream_re = re.compile(r'^(<stream:stream [^>]*>)\s*')
# Matches an empty element tag (e.g., '<foo bar="baz"/>').
_empty_element_re = re.compile(r'^(<[^>]*/>)\s*')
# Matches a non-empty element (e.g., '<foo bar="baz">quux</foo>').
# Does *not* handle nested elements.
_non_empty_element_re = re.compile(r'^(<([^ >]*)[^>]*>.*?</\2>)\s*')
# The closing tag for a stream tag. We have to insert this
# ourselves since all XML stanzas are children of the stream tag,
# which is never closed until the connection is closed.
_stream_suffix = '</stream:stream>'
def __init__(self, delegate):
self._buffer = ''
self._delegate = delegate
def FeedString(self, data):
"""Consumes the given string data, possibly feeding one or more
stanzas to the delegate.
"""
self._buffer += data
while (self._ProcessBuffer(self._stream_re, self._stream_suffix) or
self._ProcessBuffer(self._empty_element_re) or
self._ProcessBuffer(self._non_empty_element_re)):
pass
def _ProcessBuffer(self, regexp, xml_suffix=''):
"""If the buffer matches the given regexp, removes the match from
the buffer, appends the given suffix, parses it, and feeds it to
the delegate.
Returns:
Whether or not the buffer matched the given regexp.
"""
results = regexp.match(self._buffer)
if not results:
return False
xml_text = self._buffer[:results.end()] + xml_suffix
self._buffer = self._buffer[results.end():]
stanza = ParseXml(xml_text)
self._delegate.FeedStanza(stanza)
# Needed because stanza may have cycles.
stanza.unlink()
return True
class Jid(object):
"""Simple struct for an XMPP jid (essentially an e-mail address with
an optional resource string).
"""
def __init__(self, username, domain, resource=''):
self.username = username
self.domain = domain
self.resource = resource
def __str__(self):
jid_str = "%s@%s" % (self.username, self.domain)
if self.resource:
jid_str += '/' + self.resource
return jid_str
def GetBareJid(self):
return Jid(self.username, self.domain)
class IdGenerator(object):
"""Simple class to generate unique IDs for XMPP messages."""
def __init__(self, prefix):
self._prefix = prefix
self._id = 0
def GetNextId(self):
next_id = "%s.%s" % (self._prefix, self._id)
self._id += 1
return next_id
class HandshakeTask(object):
"""Class to handle the initial handshake with a connected XMPP
client.
"""
# The handshake states in order.
(_INITIAL_STREAM_NEEDED,
_AUTH_NEEDED,
_AUTH_STREAM_NEEDED,
_BIND_NEEDED,
_SESSION_NEEDED,
_FINISHED) = range(6)
# Used when in the _INITIAL_STREAM_NEEDED and _AUTH_STREAM_NEEDED
# states. Not an XML object as it's only the opening tag.
#
# The from and id attributes are filled in later.
_STREAM_DATA = (
'<stream:stream from="%s" id="%s" '
'version="1.0" xmlns:stream="http://etherx.jabber.org/streams" '
'xmlns="jabber:client">')
# Used when in the _INITIAL_STREAM_NEEDED state.
_AUTH_STANZA = ParseXml(
'<stream:features xmlns:stream="http://etherx.jabber.org/streams">'
' <mechanisms xmlns="urn:ietf:params:xml:ns:xmpp-sasl">'
' <mechanism>PLAIN</mechanism>'
' <mechanism>X-GOOGLE-TOKEN</mechanism>'
' </mechanisms>'
'</stream:features>')
# Used when in the _AUTH_NEEDED state.
_AUTH_SUCCESS_STANZA = ParseXml(
'<success xmlns="urn:ietf:params:xml:ns:xmpp-sasl"/>')
# Used when in the _AUTH_STREAM_NEEDED state.
_BIND_STANZA = ParseXml(
'<stream:features xmlns:stream="http://etherx.jabber.org/streams">'
' <bind xmlns="urn:ietf:params:xml:ns:xmpp-bind"/>'
' <session xmlns="urn:ietf:params:xml:ns:xmpp-session"/>'
'</stream:features>')
# Used when in the _BIND_NEEDED state.
#
# The id and jid attributes are filled in later.
_BIND_RESULT_STANZA = ParseXml(
'<iq id="" type="result">'
' <bind xmlns="urn:ietf:params:xml:ns:xmpp-bind">'
' <jid/>'
' </bind>'
'</iq>')
# Used when in the _SESSION_NEEDED state.
#
# The id attribute is filled in later.
_IQ_RESPONSE_STANZA = ParseXml('<iq id="" type="result"/>')
def __init__(self, connection, resource_prefix):
self._connection = connection
self._id_generator = IdGenerator(resource_prefix)
self._username = ''
self._domain = ''
self._jid = None
self._resource_prefix = resource_prefix
self._state = self._INITIAL_STREAM_NEEDED
def FeedStanza(self, stanza):
"""Inspects the given stanza and changes the handshake state if needed.
Called when a stanza is received from the client. Inspects the
stanza to make sure it has the expected attributes given the
current state, advances the state if needed, and sends a reply to
the client if needed.
"""
def ExpectStanza(stanza, name):
if stanza.tagName != name:
raise UnexpectedXml(stanza)
def ExpectIq(stanza, type, name):
ExpectStanza(stanza, 'iq')
if (stanza.getAttribute('type') != type or
stanza.firstChild.tagName != name):
raise UnexpectedXml(stanza)
def GetStanzaId(stanza):
return stanza.getAttribute('id')
def HandleStream(stanza):
ExpectStanza(stanza, 'stream:stream')
domain = stanza.getAttribute('to')
if domain:
self._domain = domain
SendStreamData()
def SendStreamData():
next_id = self._id_generator.GetNextId()
stream_data = self._STREAM_DATA % (self._domain, next_id)
self._connection.SendData(stream_data)
def GetUserDomain(stanza):
encoded_username_password = stanza.firstChild.data
username_password = base64.b64decode(encoded_username_password)
(_, username_domain, _) = username_password.split('\0')
# The domain may be omitted.
#
# If we were using python 2.5, we'd be able to do:
#
# username, _, domain = username_domain.partition('@')
# if not domain:
# domain = self._domain
at_pos = username_domain.find('@')
if at_pos != -1:
username = username_domain[:at_pos]
domain = username_domain[at_pos+1:]
else:
username = username_domain
domain = self._domain
return (username, domain)
if self._state == self._INITIAL_STREAM_NEEDED:
HandleStream(stanza)
self._connection.SendStanza(self._AUTH_STANZA, False)
self._state = self._AUTH_NEEDED
elif self._state == self._AUTH_NEEDED:
ExpectStanza(stanza, 'auth')
(self._username, self._domain) = GetUserDomain(stanza)
self._connection.SendStanza(self._AUTH_SUCCESS_STANZA, False)
self._state = self._AUTH_STREAM_NEEDED
elif self._state == self._AUTH_STREAM_NEEDED:
HandleStream(stanza)
self._connection.SendStanza(self._BIND_STANZA, False)
self._state = self._BIND_NEEDED
elif self._state == self._BIND_NEEDED:
ExpectIq(stanza, 'set', 'bind')
stanza_id = GetStanzaId(stanza)
resource_element = stanza.getElementsByTagName('resource')[0]
resource = resource_element.firstChild.data
full_resource = '%s.%s' % (self._resource_prefix, resource)
response = CloneXml(self._BIND_RESULT_STANZA)
response.setAttribute('id', stanza_id)
self._jid = Jid(self._username, self._domain, full_resource)
jid_text = response.parentNode.createTextNode(str(self._jid))
response.getElementsByTagName('jid')[0].appendChild(jid_text)
self._connection.SendStanza(response)
self._state = self._SESSION_NEEDED
elif self._state == self._SESSION_NEEDED:
ExpectIq(stanza, 'set', 'session')
stanza_id = GetStanzaId(stanza)
xml = CloneXml(self._IQ_RESPONSE_STANZA)
xml.setAttribute('id', stanza_id)
self._connection.SendStanza(xml)
self._state = self._FINISHED
self._connection.HandshakeDone(self._jid)
def AddrString(addr):
return '%s:%d' % addr
class XmppConnection(asynchat.async_chat):
"""A single XMPP client connection.
This class handles the connection to a single XMPP client (via a
socket). It does the XMPP handshake and also implements the (old)
Google notification protocol.
"""
# Used for acknowledgements to the client.
#
# The from and id attributes are filled in later.
_IQ_RESPONSE_STANZA = ParseXml('<iq from="" id="" type="result"/>')
def __init__(self, sock, socket_map, delegate, addr):
"""Starts up the xmpp connection.
Args:
sock: The socket to the client.
socket_map: A map from sockets to their owning objects.
delegate: The delegate, which is notified when the XMPP
handshake is successful, when the connection is closed, and
when a notification has to be broadcast.
addr: The host/port of the client.
"""
# We do this because in versions of python < 2.6,
# async_chat.__init__ doesn't take a map argument nor pass it to
# dispatcher.__init__. We rely on the fact that
# async_chat.__init__ calls dispatcher.__init__ as the last thing
# it does, and that calling dispatcher.__init__ with socket=None
# and map=None is essentially a no-op.
asynchat.async_chat.__init__(self)
asyncore.dispatcher.__init__(self, sock, socket_map)
self.set_terminator(None)
self._delegate = delegate
self._parser = StanzaParser(self)
self._jid = None
self._addr = addr
addr_str = AddrString(self._addr)
self._handshake_task = HandshakeTask(self, addr_str)
print 'Starting connection to %s' % self
def __str__(self):
if self._jid:
return str(self._jid)
else:
return AddrString(self._addr)
# async_chat implementation.
def collect_incoming_data(self, data):
self._parser.FeedString(data)
# This is only here to make pychecker happy.
def found_terminator(self):
asynchat.async_chat.found_terminator(self)
def close(self):
print "Closing connection to %s" % self
self._delegate.OnXmppConnectionClosed(self)
asynchat.async_chat.close(self)
# Called by self._parser.FeedString().
def FeedStanza(self, stanza):
if self._handshake_task:
self._handshake_task.FeedStanza(stanza)
elif stanza.tagName == 'iq' and stanza.getAttribute('type') == 'result':
# Ignore all client acks.
pass
elif (stanza.firstChild and
stanza.firstChild.namespaceURI == 'google:push'):
self._HandlePushCommand(stanza)
else:
raise UnexpectedXml(stanza)
# Called by self._handshake_task.
def HandshakeDone(self, jid):
self._jid = jid
self._handshake_task = None
self._delegate.OnXmppHandshakeDone(self)
print "Handshake done for %s" % self
def _HandlePushCommand(self, stanza):
if stanza.tagName == 'iq' and stanza.firstChild.tagName == 'subscribe':
# Subscription request.
self._SendIqResponseStanza(stanza)
elif stanza.tagName == 'message' and stanza.firstChild.tagName == 'push':
# Send notification request.
self._delegate.ForwardNotification(self, stanza)
else:
raise UnexpectedXml(command_xml)
def _SendIqResponseStanza(self, iq):
stanza = CloneXml(self._IQ_RESPONSE_STANZA)
stanza.setAttribute('from', str(self._jid.GetBareJid()))
stanza.setAttribute('id', iq.getAttribute('id'))
self.SendStanza(stanza)
def SendStanza(self, stanza, unlink=True):
"""Sends a stanza to the client.
Args:
stanza: The stanza to send.
unlink: Whether to unlink stanza after sending it. (Pass in
False if stanza is a constant.)
"""
self.SendData(stanza.toxml())
if unlink:
stanza.unlink()
def SendData(self, data):
"""Sends raw data to the client.
"""
# We explicitly encode to ascii as that is what the client expects
# (some minidom library functions return unicode strings).
self.push(data.encode('ascii'))
def ForwardNotification(self, notification_stanza):
"""Forwards a notification to the client."""
notification_stanza.setAttribute('from', str(self._jid.GetBareJid()))
notification_stanza.setAttribute('to', str(self._jid))
self.SendStanza(notification_stanza, False)
class XmppServer(asyncore.dispatcher):
"""The main XMPP server class.
The XMPP server starts accepting connections on the given address
and spawns off XmppConnection objects for each one.
Use like so:
socket_map = {}
xmpp_server = xmppserver.XmppServer(socket_map, ('127.0.0.1', 5222))
asyncore.loop(30.0, False, socket_map)
"""
def __init__(self, socket_map, addr):
asyncore.dispatcher.__init__(self, None, socket_map)
self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
self.set_reuse_addr()
self.bind(addr)
self.listen(5)
self._socket_map = socket_map
self._connections = set()
self._handshake_done_connections = set()
def handle_accept(self):
(sock, addr) = self.accept()
xmpp_connection = XmppConnection(sock, self._socket_map, self, addr)
self._connections.add(xmpp_connection)
def close(self):
# A copy is necessary since calling close on each connection
# removes it from self._connections.
for connection in self._connections.copy():
connection.close()
asyncore.dispatcher.close(self)
# XmppConnection delegate methods.
def OnXmppHandshakeDone(self, xmpp_connection):
self._handshake_done_connections.add(xmpp_connection)
def OnXmppConnectionClosed(self, xmpp_connection):
self._connections.discard(xmpp_connection)
self._handshake_done_connections.discard(xmpp_connection)
def ForwardNotification(self, unused_xmpp_connection, notification_stanza):
for connection in self._handshake_done_connections:
print 'Sending notification to %s' % connection
connection.ForwardNotification(notification_stanza)
| bsd-3-clause | -2,555,677,848,919,703,000 | 31.265267 | 77 | 0.66919 | false |
iandees/all-the-places | locations/spiders/lifetime.py | 1 | 1351 | # -*- coding: utf-8 -*-
import scrapy
import json
from locations.items import GeojsonPointItem
class LifetimeFitnessSpider(scrapy.Spider):
name = "lifetimefitness"
allowed_domains = ['lifetime.life']
start_urls = (
'https://www.lifetime.life/view-all-locations.html',
)
def parse(self, response):
response.selector.remove_namespaces()
city_urls = response.xpath('//a[@class="btn btn-link btn-xs m-b-sm p-x-0 b-x-xs-0 pull-xs-right"]/@href').extract()
for path in city_urls:
yield scrapy.Request(
"https://www.lifetime.life" + path.strip(),
callback=self.parse_store,
)
def parse_store(self, response):
json_data = response.xpath('//script[@type="application/ld+json"]/text()').extract_first()
data = json.loads(json_data)
properties = {
'name': data['name'],
'ref': data['name'],
'addr_full': data['address']['streetAddress'],
'city': data['address']['addressLocality'],
'state': data['address']['addressRegion'],
'postcode': data['address']['postalCode'],
'phone': data['telephone'],
'website': data['url'],
'lat': data['geo']['latitude'],
'lon': data['geo']['longitude'],
}
yield GeojsonPointItem(**properties) | mit | -7,733,758,944,532,887,000 | 31.97561 | 123 | 0.57809 | false |
GuillaumeGomez/servo | tests/wpt/web-platform-tests/webdriver/support/asserts.py | 23 | 2715 | # WebDriver specification ID: dfn-error-response-data
errors = {
"element click intercepted": 400,
"element not selectable": 400,
"element not interactable": 400,
"insecure certificate": 400,
"invalid argument": 400,
"invalid cookie domain": 400,
"invalid coordinates": 400,
"invalid element state": 400,
"invalid selector": 400,
"invalid session id": 404,
"javascript error": 500,
"move target out of bounds": 500,
"no such alert": 400,
"no such cookie": 404,
"no such element": 404,
"no such frame": 400,
"no such window": 400,
"script timeout": 408,
"session not created": 500,
"stale element reference": 400,
"timeout": 408,
"unable to set cookie": 500,
"unable to capture screen": 500,
"unexpected alert open": 500,
"unknown command": 404,
"unknown error": 500,
"unknown method": 405,
"unsupported operation": 500,
}
# WebDriver specification ID: dfn-send-an-error
#
# > When required to send an error, with error code, a remote end must run the
# > following steps:
# >
# > 1. Let http status and name be the error response data for error code.
# > 2. Let message be an implementation-defined string containing a
# > human-readable description of the reason for the error.
# > 3. Let stacktrace be an implementation-defined string containing a stack
# > trace report of the active stack frames at the time when the error
# > occurred.
# > 4. Let data be a new JSON Object initialised with the following properties:
# >
# > error
# > name
# > message
# > message
# > stacktrace
# > stacktrace
# >
# > 5. Send a response with status and data as arguments.
def assert_error(response, error_code):
"""Verify that the provided wdclient.Response instance described a valid
error response as defined by `dfn-send-an-error` and the provided error
code.
:param response: wdclient.Response instance
:param error_code: string value of the expected "error code"
"""
assert response.status == errors[error_code]
assert "value" in response.body
assert response.body["value"]["error"] == error_code
assert isinstance(response.body["value"]["message"], basestring)
assert isinstance(response.body["value"]["stacktrace"], basestring)
def assert_success(response, value):
"""Verify that the provided wdclient.Response instance described a valid
error response as defined by `dfn-send-an-error` and the provided error
code.
:param response: wdclient.Response instance
:param value: expected value of the response body
"""
assert response.status == 200
assert response.body["value"] == value
| mpl-2.0 | 6,435,868,599,776,638,000 | 34.25974 | 79 | 0.676243 | false |
eduNEXT/edx-platform | openedx/core/djangoapps/schedules/migrations/0001_initial.py | 3 | 1385 | import django.utils.timezone
import model_utils.fields
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('student', '0001_squashed_0031_auto_20200317_1122'),
]
operations = [
migrations.CreateModel(
name='Schedule',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, verbose_name='created', editable=False)),
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, verbose_name='modified', editable=False)),
('active', models.BooleanField(default=True, help_text='Indicates if this schedule is actively used')),
('start', models.DateTimeField(help_text='Date this schedule went into effect')),
('upgrade_deadline', models.DateTimeField(help_text='Deadline by which the learner must upgrade to a verified seat', null=True, blank=True)),
('enrollment', models.OneToOneField(to='student.CourseEnrollment', on_delete=models.CASCADE)),
],
options={
'verbose_name': 'Schedule',
'verbose_name_plural': 'Schedules',
},
),
]
| agpl-3.0 | -699,718,904,904,229,000 | 46.758621 | 157 | 0.628881 | false |
TheWardoctor/Wardoctors-repo | script.module.exodus/lib/resources/lib/sources/en/watchfree.py | 5 | 8377 | # -*- coding: utf-8 -*-
'''
Exodus Add-on
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import re,urllib,urlparse,base64
from resources.lib.modules import cleantitle
from resources.lib.modules import client
from resources.lib.modules import proxy
class source:
def __init__(self):
self.priority = 1
self.language = ['en']
self.domains = ['watchfree.to','watchfree.unblockall.org']
self.base_link = 'http://watchfree.unblockall.org'
self.moviesearch_link = '/?keyword=%s&search_section=1'
self.tvsearch_link = '/?keyword=%s&search_section=2'
def movie(self, imdb, title, localtitle, aliases, year):
try:
query = self.moviesearch_link % urllib.quote_plus(cleantitle.query(title))
query = urlparse.urljoin(self.base_link, query)
result = str(proxy.request(query, 'free movies'))
if 'page=2' in result or 'page%3D2' in result: result += str(proxy.request(query + '&page=2', 'free movies'))
result = client.parseDOM(result, 'div', attrs = {'class': 'item'})
title = 'watch' + cleantitle.get(title)
years = ['(%s)' % str(year), '(%s)' % str(int(year)+1), '(%s)' % str(int(year)-1)]
result = [(client.parseDOM(i, 'a', ret='href'), client.parseDOM(i, 'a', ret='title')) for i in result]
result = [(i[0][0], i[1][0]) for i in result if len(i[0]) > 0 and len(i[1]) > 0]
result = [i for i in result if any(x in i[1] for x in years)]
r = [(proxy.parse(i[0]), i[1]) for i in result]
match = [i[0] for i in r if title == cleantitle.get(i[1]) and '(%s)' % str(year) in i[1]]
match2 = [i[0] for i in r]
match2 = [x for y,x in enumerate(match2) if x not in match2[:y]]
if match2 == []: return
for i in match2[:5]:
try:
if len(match) > 0: url = match[0] ; break
r = proxy.request(urlparse.urljoin(self.base_link, i), 'free movies')
r = re.findall('(tt\d+)', r)
if imdb in r: url = i ; break
except:
pass
url = re.findall('(?://.+?|)(/.+)', url)[0]
url = client.replaceHTMLCodes(url)
url = url.encode('utf-8')
return url
except:
return
def tvshow(self, imdb, tvdb, tvshowtitle, localtvshowtitle, aliases, year):
try:
query = self.tvsearch_link % urllib.quote_plus(cleantitle.query(tvshowtitle))
query = urlparse.urljoin(self.base_link, query)
result = str(proxy.request(query, 'free movies'))
if 'page=2' in result or 'page%3D2' in result: result += str(proxy.request(query + '&page=2', 'free movies'))
result = client.parseDOM(result, 'div', attrs = {'class': 'item'})
tvshowtitle = 'watch' + cleantitle.get(tvshowtitle)
years = ['(%s)' % str(year), '(%s)' % str(int(year)+1), '(%s)' % str(int(year)-1)]
result = [(client.parseDOM(i, 'a', ret='href'), client.parseDOM(i, 'a', ret='title')) for i in result]
result = [(i[0][0], i[1][0]) for i in result if len(i[0]) > 0 and len(i[1]) > 0]
result = [i for i in result if any(x in i[1] for x in years)]
r = [(proxy.parse(i[0]), i[1]) for i in result]
match = [i[0] for i in r if tvshowtitle == cleantitle.get(i[1]) and '(%s)' % str(year) in i[1]]
match2 = [i[0] for i in r]
match2 = [x for y,x in enumerate(match2) if x not in match2[:y]]
if match2 == []: return
for i in match2[:5]:
try:
if len(match) > 0: url = match[0] ; break
r = proxy.request(urlparse.urljoin(self.base_link, i), 'free movies')
r = re.findall('(tt\d+)', r)
if imdb in r: url = i ; break
except:
pass
url = re.findall('(?://.+?|)(/.+)', url)[0]
url = client.replaceHTMLCodes(url)
url = url.encode('utf-8')
return url
except:
return
def episode(self, url, imdb, tvdb, title, premiered, season, episode):
try:
if url == None: return
url = urlparse.urljoin(self.base_link, url)
result = proxy.request(url, 'tv_episode_item')
result = client.parseDOM(result, 'div', attrs = {'class': 'tv_episode_item'})
title = cleantitle.get(title)
premiered = re.compile('(\d{4})-(\d{2})-(\d{2})').findall(premiered)[0]
premiered = '%s %01d %s' % (premiered[1].replace('01','January').replace('02','February').replace('03','March').replace('04','April').replace('05','May').replace('06','June').replace('07','July').replace('08','August').replace('09','September').replace('10','October').replace('11','November').replace('12','December'), int(premiered[2]), premiered[0])
result = [(client.parseDOM(i, 'a', ret='href'), client.parseDOM(i, 'span', attrs = {'class': 'tv_episode_name'}), client.parseDOM(i, 'span', attrs = {'class': 'tv_num_versions'})) for i in result]
result = [(i[0], i[1][0], i[2]) for i in result if len(i[1]) > 0] + [(i[0], None, i[2]) for i in result if len(i[1]) == 0]
result = [(i[0], i[1], i[2][0]) for i in result if len(i[2]) > 0] + [(i[0], i[1], None) for i in result if len(i[2]) == 0]
result = [(i[0][0], i[1], i[2]) for i in result if len(i[0]) > 0]
url = [i for i in result if title == cleantitle.get(i[1]) and premiered == i[2]][:1]
if len(url) == 0: url = [i for i in result if premiered == i[2]]
if len(url) == 0 or len(url) > 1: url = [i for i in result if 'season-%01d-episode-%01d' % (int(season), int(episode)) in i[0]]
url = url[0][0]
url = proxy.parse(url)
url = re.findall('(?://.+?|)(/.+)', url)[0]
url = client.replaceHTMLCodes(url)
url = url.encode('utf-8')
return url
except:
return
def sources(self, url, hostDict, hostprDict):
try:
sources = []
if url == None: return sources
url = urlparse.urljoin(self.base_link, url)
result = proxy.request(url, 'link_ite')
links = client.parseDOM(result, 'table', attrs = {'class': 'link_ite.+?'})
for i in links:
try:
url = client.parseDOM(i, 'a', ret='href')
url = [x for x in url if 'gtfo' in x][-1]
url = proxy.parse(url)
url = urlparse.parse_qs(urlparse.urlparse(url).query)['gtfo'][0]
url = base64.b64decode(url)
url = client.replaceHTMLCodes(url)
url = url.encode('utf-8')
host = re.findall('([\w]+[.][\w]+)$', urlparse.urlparse(url.strip().lower()).netloc)[0]
if not host in hostDict: raise Exception()
host = host.encode('utf-8')
quality = client.parseDOM(i, 'div', attrs = {'class': 'quality'})
if any(x in ['[CAM]', '[TS]'] for x in quality): quality = 'CAM'
else: quality = 'SD'
quality = quality.encode('utf-8')
sources.append({'source': host, 'quality': quality, 'language': 'en', 'url': url, 'direct': False, 'debridonly': False})
except:
pass
return sources
except:
return sources
def resolve(self, url):
return url
| apache-2.0 | 2,834,163,736,857,230,000 | 42.180412 | 364 | 0.52489 | false |
dudepare/django | tests/m2m_regress/tests.py | 273 | 4695 | from __future__ import unicode_literals
from django.core.exceptions import FieldError
from django.test import TestCase
from django.utils import six
from .models import (
Entry, Line, Post, RegressionModelSplit, SelfRefer, SelfReferChild,
SelfReferChildSibling, Tag, TagCollection, Worksheet,
)
class M2MRegressionTests(TestCase):
def test_multiple_m2m(self):
# Multiple m2m references to model must be distinguished when
# accessing the relations through an instance attribute.
s1 = SelfRefer.objects.create(name='s1')
s2 = SelfRefer.objects.create(name='s2')
s3 = SelfRefer.objects.create(name='s3')
s1.references.add(s2)
s1.related.add(s3)
e1 = Entry.objects.create(name='e1')
t1 = Tag.objects.create(name='t1')
t2 = Tag.objects.create(name='t2')
e1.topics.add(t1)
e1.related.add(t2)
self.assertQuerysetEqual(s1.references.all(), ["<SelfRefer: s2>"])
self.assertQuerysetEqual(s1.related.all(), ["<SelfRefer: s3>"])
self.assertQuerysetEqual(e1.topics.all(), ["<Tag: t1>"])
self.assertQuerysetEqual(e1.related.all(), ["<Tag: t2>"])
def test_internal_related_name_not_in_error_msg(self):
# The secret internal related names for self-referential many-to-many
# fields shouldn't appear in the list when an error is made.
six.assertRaisesRegex(
self, FieldError,
"Choices are: id, name, references, related, selfreferchild, selfreferchildsibling$",
lambda: SelfRefer.objects.filter(porcupine='fred')
)
def test_m2m_inheritance_symmetry(self):
# Test to ensure that the relationship between two inherited models
# with a self-referential m2m field maintains symmetry
sr_child = SelfReferChild(name="Hanna")
sr_child.save()
sr_sibling = SelfReferChildSibling(name="Beth")
sr_sibling.save()
sr_child.related.add(sr_sibling)
self.assertQuerysetEqual(sr_child.related.all(), ["<SelfRefer: Beth>"])
self.assertQuerysetEqual(sr_sibling.related.all(), ["<SelfRefer: Hanna>"])
def test_m2m_pk_field_type(self):
# Regression for #11311 - The primary key for models in a m2m relation
# doesn't have to be an AutoField
w = Worksheet(id='abc')
w.save()
w.delete()
def test_add_m2m_with_base_class(self):
# Regression for #11956 -- You can add an object to a m2m with the
# base class without causing integrity errors
t1 = Tag.objects.create(name='t1')
t2 = Tag.objects.create(name='t2')
c1 = TagCollection.objects.create(name='c1')
c1.tags = [t1, t2]
c1 = TagCollection.objects.get(name='c1')
self.assertQuerysetEqual(c1.tags.all(), ["<Tag: t1>", "<Tag: t2>"], ordered=False)
self.assertQuerysetEqual(t1.tag_collections.all(), ["<TagCollection: c1>"])
def test_manager_class_caching(self):
e1 = Entry.objects.create()
e2 = Entry.objects.create()
t1 = Tag.objects.create()
t2 = Tag.objects.create()
# Get same manager twice in a row:
self.assertIs(t1.entry_set.__class__, t1.entry_set.__class__)
self.assertIs(e1.topics.__class__, e1.topics.__class__)
# Get same manager for different instances
self.assertIs(e1.topics.__class__, e2.topics.__class__)
self.assertIs(t1.entry_set.__class__, t2.entry_set.__class__)
def test_m2m_abstract_split(self):
# Regression for #19236 - an abstract class with a 'split' method
# causes a TypeError in add_lazy_relation
m1 = RegressionModelSplit(name='1')
m1.save()
def test_assigning_invalid_data_to_m2m_doesnt_clear_existing_relations(self):
t1 = Tag.objects.create(name='t1')
t2 = Tag.objects.create(name='t2')
c1 = TagCollection.objects.create(name='c1')
c1.tags = [t1, t2]
with self.assertRaises(TypeError):
c1.tags = 7
c1.refresh_from_db()
self.assertQuerysetEqual(c1.tags.order_by('name'), ["<Tag: t1>", "<Tag: t2>"])
def test_multiple_forwards_only_m2m(self):
# Regression for #24505 - Multiple ManyToManyFields to same "to"
# model with related_name set to '+'.
foo = Line.objects.create(name='foo')
bar = Line.objects.create(name='bar')
post = Post.objects.create()
post.primary_lines.add(foo)
post.secondary_lines.add(bar)
self.assertQuerysetEqual(post.primary_lines.all(), ['<Line: foo>'])
self.assertQuerysetEqual(post.secondary_lines.all(), ['<Line: bar>'])
| bsd-3-clause | 2,107,127,109,326,109,700 | 36.862903 | 97 | 0.63344 | false |
jordanemedlock/psychtruths | temboo/Library/Google/Plus/Domains/Media/Insert.py | 5 | 6021 | # -*- coding: utf-8 -*-
###############################################################################
#
# Insert
# Adds a new media item to an album.
#
# Python versions 2.6, 2.7, 3.x
#
# Copyright 2014, Temboo Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
#
#
###############################################################################
from temboo.core.choreography import Choreography
from temboo.core.choreography import InputSet
from temboo.core.choreography import ResultSet
from temboo.core.choreography import ChoreographyExecution
import json
class Insert(Choreography):
def __init__(self, temboo_session):
"""
Create a new instance of the Insert Choreo. A TembooSession object, containing a valid
set of Temboo credentials, must be supplied.
"""
super(Insert, self).__init__(temboo_session, '/Library/Google/Plus/Domains/Media/Insert')
def new_input_set(self):
return InsertInputSet()
def _make_result_set(self, result, path):
return InsertResultSet(result, path)
def _make_execution(self, session, exec_id, path):
return InsertChoreographyExecution(session, exec_id, path)
class InsertInputSet(InputSet):
"""
An InputSet with methods appropriate for specifying the inputs to the Insert
Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
"""
def set_AccessToken(self, value):
"""
Set the value of the AccessToken input for this Choreo. ((optional, string) A valid access token retrieved during the OAuth2 process. This is required unless you provide the ClientID, ClientSecret, and RefreshToken to generate a new access token.)
"""
super(InsertInputSet, self)._set_input('AccessToken', value)
def set_ClientID(self, value):
"""
Set the value of the ClientID input for this Choreo. ((conditional, string) The Client ID provided by Google. Required unless providing a valid AccessToken.)
"""
super(InsertInputSet, self)._set_input('ClientID', value)
def set_ClientSecret(self, value):
"""
Set the value of the ClientSecret input for this Choreo. ((conditional, string) The Client Secret provided by Google. Required unless providing a valid AccessToken.)
"""
super(InsertInputSet, self)._set_input('ClientSecret', value)
def set_Collection(self, value):
"""
Set the value of the Collection input for this Choreo. ((optional, string) Currently the acceptable values are "cloud". (Upload the media to share on Google+).)
"""
super(InsertInputSet, self)._set_input('Collection', value)
def set_ContentType(self, value):
"""
Set the value of the ContentType input for this Choreo. ((conditional, string) The Content-Type of the file that is being uploaded (i.e. image/jpg). Required when specifying the FileContent input.)
"""
super(InsertInputSet, self)._set_input('ContentType', value)
def set_DisplayName(self, value):
"""
Set the value of the DisplayName input for this Choreo. ((optional, string) The display name for the media. If this parameter is not provided, Google assigns a GUID to the media resource.)
"""
super(InsertInputSet, self)._set_input('DisplayName', value)
def set_Fields(self, value):
"""
Set the value of the Fields input for this Choreo. ((optional, string) Selector specifying a subset of fields to include in the response.)
"""
super(InsertInputSet, self)._set_input('Fields', value)
def set_FileContent(self, value):
"""
Set the value of the FileContent input for this Choreo. ((conditional, string) The Base64 encoded contents of the file to upload.)
"""
super(InsertInputSet, self)._set_input('FileContent', value)
def set_RefreshToken(self, value):
"""
Set the value of the RefreshToken input for this Choreo. ((conditional, string) An OAuth refresh token used to generate a new access token when the original token is expired. Required unless providing a valid AccessToken.)
"""
super(InsertInputSet, self)._set_input('RefreshToken', value)
def set_UserID(self, value):
"""
Set the value of the UserID input for this Choreo. ((optional, string) The ID of the user to create the activity on behalf of. The value "me" is set as the default to indicate the authenticated user.)
"""
super(InsertInputSet, self)._set_input('UserID', value)
class InsertResultSet(ResultSet):
"""
A ResultSet with methods tailored to the values returned by the Insert Choreo.
The ResultSet object is used to retrieve the results of a Choreo execution.
"""
def getJSONFromString(self, str):
return json.loads(str)
def get_Response(self):
"""
Retrieve the value for the "Response" output from this Choreo execution. ((json) The response from Google.)
"""
return self._output.get('Response', None)
def get_NewAccessToken(self):
"""
Retrieve the value for the "NewAccessToken" output from this Choreo execution. ((string) Contains a new AccessToken when the RefreshToken is provided.)
"""
return self._output.get('NewAccessToken', None)
class InsertChoreographyExecution(ChoreographyExecution):
def _make_result_set(self, response, path):
return InsertResultSet(response, path)
| apache-2.0 | 7,014,719,070,635,491,000 | 44.270677 | 255 | 0.670487 | false |
kris-singh/pgmpy | pgmpy/base/DirectedGraph.py | 3 | 9379 | #!/usr/bin/env python3
import itertools
import networkx as nx
from pgmpy.base import UndirectedGraph
class DirectedGraph(nx.DiGraph):
"""
Base class for all Directed Graphical Models.
Each node in the graph can represent either a random variable, `Factor`,
or a cluster of random variables. Edges in the graph represent the
dependencies between these.
Parameters
----------
data: input graph
Data to initialize graph. If data=None (default) an empty graph is
created. The data can be an edge list or any Networkx graph object.
Examples
--------
Create an empty DirectedGraph with no nodes and no edges
>>> from pgmpy.base import DirectedGraph
>>> G = DirectedGraph()
G can be grown in several ways:
**Nodes:**
Add one node at a time:
>>> G.add_node(node='a')
Add the nodes from any container (a list, set or tuple or the nodes
from another graph).
>>> G.add_nodes_from(nodes=['a', 'b'])
**Edges:**
G can also be grown by adding edges.
Add one edge,
>>> G.add_edge(u='a', v='b')
a list of edges,
>>> G.add_edges_from(ebunch=[('a', 'b'), ('b', 'c')])
If some edges connect nodes not yet in the model, the nodes
are added automatically. There are no errors when adding
nodes or edges that already exist.
**Shortcuts:**
Many common graph features allow python syntax for speed reporting.
>>> 'a' in G # check if node in graph
True
>>> len(G) # number of nodes in graph
3
"""
def __init__(self, ebunch=None):
super(DirectedGraph, self).__init__(ebunch)
def add_node(self, node, weight=None):
"""
Adds a single node to the Graph.
Parameters
----------
node: str, int, or any hashable python object.
The node to add to the graph.
weight: int, float
The weight of the node.
Examples
--------
>>> from pgmpy.base import DirectedGraph
>>> G = DirectedGraph()
>>> G.add_node(node='A')
>>> G.nodes()
['A']
Adding a node with some weight.
>>> G.add_node(node='B', weight=0.3)
The weight of these nodes can be accessed as:
>>> G.node['B']
{'weight': 0.3}
>>> G.node['A']
{'weight': None}
"""
super(DirectedGraph, self).add_node(node, weight=weight)
def add_nodes_from(self, nodes, weights=None):
"""
Add multiple nodes to the Graph.
**The behviour of adding weights is different than in networkx.
Parameters
----------
nodes: iterable container
A container of nodes (list, dict, set, or any hashable python
object).
weights: list, tuple (default=None)
A container of weights (int, float). The weight value at index i
is associated with the variable at index i.
Examples
--------
>>> from pgmpy.base import DirectedGraph
>>> G = DirectedGraph()
>>> G.add_nodes_from(nodes=['A', 'B', 'C'])
>>> G.nodes()
['A', 'B', 'C']
Adding nodes with weights:
>>> G.add_nodes_from(nodes=['D', 'E'], weights=[0.3, 0.6])
>>> G.node['D']
{'weight': 0.3}
>>> G.node['E']
{'weight': 0.6}
>>> G.node['A']
{'weight': None}
"""
nodes = list(nodes)
if weights:
if len(nodes) != len(weights):
raise ValueError("The number of elements in nodes and weights"
"should be equal.")
for index in range(len(nodes)):
self.add_node(node=nodes[index], weight=weights[index])
else:
for node in nodes:
self.add_node(node=node)
def add_edge(self, u, v, weight=None):
"""
Add an edge between u and v.
The nodes u and v will be automatically added if they are
not already in the graph.
Parameters
----------
u, v : nodes
Nodes can be any hashable Python object.
weight: int, float (default=None)
The weight of the edge
Examples
--------
>>> from pgmpy.base import DirectedGraph
>>> G = DirectedGraph()
>>> G.add_nodes_from(nodes=['Alice', 'Bob', 'Charles'])
>>> G.add_edge(u='Alice', v='Bob')
>>> G.nodes()
['Alice', 'Bob', 'Charles']
>>> G.edges()
[('Alice', 'Bob')]
When the node is not already present in the graph:
>>> G.add_edge(u='Alice', v='Ankur')
>>> G.nodes()
['Alice', 'Ankur', 'Bob', 'Charles']
>>> G.edges()
[('Alice', 'Bob'), ('Alice', 'Ankur')]
Adding edges with weight:
>>> G.add_edge('Ankur', 'Maria', weight=0.1)
>>> G.edge['Ankur']['Maria']
{'weight': 0.1}
"""
super(DirectedGraph, self).add_edge(u, v, weight=weight)
def add_edges_from(self, ebunch, weights=None):
"""
Add all the edges in ebunch.
If nodes referred in the ebunch are not already present, they
will be automatically added. Node names can be any hashable python
object.
**The behavior of adding weights is different than networkx.
Parameters
----------
ebunch : container of edges
Each edge given in the container will be added to the graph.
The edges must be given as 2-tuples (u, v).
weights: list, tuple (default=None)
A container of weights (int, float). The weight value at index i
is associated with the edge at index i.
Examples
--------
>>> from pgmpy.base import DirectedGraph
>>> G = DirectedGraph()
>>> G.add_nodes_from(nodes=['Alice', 'Bob', 'Charles'])
>>> G.add_edges_from(ebunch=[('Alice', 'Bob'), ('Bob', 'Charles')])
>>> G.nodes()
['Alice', 'Bob', 'Charles']
>>> G.edges()
[('Alice', 'Bob'), ('Bob', 'Charles')]
When the node is not already in the model:
>>> G.add_edges_from(ebunch=[('Alice', 'Ankur')])
>>> G.nodes()
['Alice', 'Bob', 'Charles', 'Ankur']
>>> G.edges()
[('Alice', 'Bob'), ('Bob', 'Charles'), ('Alice', 'Ankur')]
Adding edges with weights:
>>> G.add_edges_from([('Ankur', 'Maria'), ('Maria', 'Mason')],
... weights=[0.3, 0.5])
>>> G.edge['Ankur']['Maria']
{'weight': 0.3}
>>> G.edge['Maria']['Mason']
{'weight': 0.5}
"""
ebunch = list(ebunch)
if weights:
if len(ebunch) != len(weights):
raise ValueError("The number of elements in ebunch and weights"
"should be equal")
for index in range(len(ebunch)):
self.add_edge(ebunch[index][0], ebunch[index][1],
weight=weights[index])
else:
for edge in ebunch:
self.add_edge(edge[0], edge[1])
def get_parents(self, node):
"""
Returns a list of parents of node.
Throws an error if the node is not present in the graph.
Parameters
----------
node: string, int or any hashable python object.
The node whose parents would be returned.
Examples
--------
>>> from pgmpy.base import DirectedGraph
>>> G = DirectedGraph(ebunch=[('diff', 'grade'), ('intel', 'grade')])
>>> G.parents(node='grade')
['diff', 'intel']
"""
return self.predecessors(node)
def moralize(self):
"""
Removes all the immoralities in the DirectedGraph and creates a moral
graph (UndirectedGraph).
A v-structure X->Z<-Y is an immorality if there is no directed edge
between X and Y.
Examples
--------
>>> from pgmpy.base import DirectedGraph
>>> G = DirectedGraph(ebunch=[('diff', 'grade'), ('intel', 'grade')])
>>> moral_graph = G.moralize()
>>> moral_graph.edges()
[('intel', 'grade'), ('intel', 'diff'), ('grade', 'diff')]
"""
moral_graph = UndirectedGraph(self.to_undirected().edges())
for node in self.nodes():
moral_graph.add_edges_from(
itertools.combinations(self.get_parents(node), 2))
return moral_graph
def get_leaves(self):
"""
Returns a list of leaves of the graph.
Examples
--------
>>> from pgmpy.base import DirectedGraph
>>> graph = DirectedGraph([('A', 'B'), ('B', 'C'), ('B', 'D')])
>>> graph.get_leaves()
['C', 'D']
"""
return [node for node, out_degree in self.out_degree_iter() if
out_degree == 0]
def get_roots(self):
"""
Returns a list of roots of the graph.
Examples
--------
>>> from pgmpy.base import DirectedGraph
>>> graph = DirectedGraph([('A', 'B'), ('B', 'C'), ('B', 'D'), ('E', 'B')])
>>> graph.get_roots()
['A', 'E']
"""
return [node for node, in_degree in self.in_degree().items() if in_degree == 0]
| mit | 5,979,829,303,730,616,000 | 28.586751 | 87 | 0.518925 | false |
MinFu/youtube-dl | youtube_dl/extractor/hypem.py | 128 | 2101 | from __future__ import unicode_literals
import json
import time
from .common import InfoExtractor
from ..compat import (
compat_urllib_parse,
compat_urllib_request,
)
from ..utils import (
ExtractorError,
)
class HypemIE(InfoExtractor):
_VALID_URL = r'http://(?:www\.)?hypem\.com/track/(?P<id>[^/]+)/'
_TEST = {
'url': 'http://hypem.com/track/1v6ga/BODYWORK+-+TAME',
'md5': 'b9cc91b5af8995e9f0c1cee04c575828',
'info_dict': {
'id': '1v6ga',
'ext': 'mp3',
'title': 'Tame',
'uploader': 'BODYWORK',
}
}
def _real_extract(self, url):
track_id = self._match_id(url)
data = {'ax': 1, 'ts': time.time()}
data_encoded = compat_urllib_parse.urlencode(data)
complete_url = url + "?" + data_encoded
request = compat_urllib_request.Request(complete_url)
response, urlh = self._download_webpage_handle(
request, track_id, 'Downloading webpage with the url')
cookie = urlh.headers.get('Set-Cookie', '')
html_tracks = self._html_search_regex(
r'(?ms)<script type="application/json" id="displayList-data">\s*(.*?)\s*</script>',
response, 'tracks')
try:
track_list = json.loads(html_tracks)
track = track_list['tracks'][0]
except ValueError:
raise ExtractorError('Hypemachine contained invalid JSON.')
key = track['key']
track_id = track['id']
artist = track['artist']
title = track['song']
serve_url = "http://hypem.com/serve/source/%s/%s" % (track_id, key)
request = compat_urllib_request.Request(
serve_url, '', {'Content-Type': 'application/json'})
request.add_header('cookie', cookie)
song_data = self._download_json(request, track_id, 'Downloading metadata')
final_url = song_data["url"]
return {
'id': track_id,
'url': final_url,
'ext': 'mp3',
'title': title,
'uploader': artist,
}
| unlicense | -6,413,875,766,102,206,000 | 30.358209 | 95 | 0.55069 | false |
hyz1011088/StarCluster | utils/s3mount.py | 21 | 1151 | #!/usr/bin/env python
import os
import sys
from starcluster.config import StarClusterConfig
print 'Simple wrapper script for s3fs (http://s3fs.googlecode.com/)'
cfg = StarClusterConfig().load()
ec2 = cfg.get_easy_ec2()
buckets = ec2.s3.get_buckets()
counter = 0
for bucket in buckets:
print "[%d] %s" % (counter,bucket.name)
counter += 1
try:
inp = int(raw_input('>>> Enter the bucket to mnt: '))
selection = buckets[inp].name
print 'you selected: %s' % selection
mountpt = raw_input('>>> please enter the mnt point: ')
print 'mounting %s at: %s' % (selection,mountpt)
except KeyboardInterrupt,e:
print
print 'Exiting...'
sys.exit(1)
try:
os.system('s3fs %s -o accessKeyId=%s -o secretAccessKey=%s %s' % (selection,
cfg.aws.get('aws_access_key_id'),
cfg.aws.get('aws_secret_access_key'),mountpt))
except KeyboardInterrupt,e:
print
print 'Attempting to umount %s' % mountpt
os.system('sudo umount %s' % mountpt)
print 'Exiting...'
sys.exit(1)
| gpl-3.0 | 4,432,722,771,027,389,400 | 30.108108 | 116 | 0.582971 | false |
MechanisM/musicdb | contrib/django/core/serializers/base.py | 2 | 5366 | """
Module for abstract serializer/unserializer base classes.
"""
from StringIO import StringIO
from django.db import models
from django.utils.encoding import smart_unicode
class SerializerDoesNotExist(KeyError):
"""The requested serializer was not found."""
pass
class SerializationError(Exception):
"""Something bad happened during serialization."""
pass
class DeserializationError(Exception):
"""Something bad happened during deserialization."""
pass
class Serializer(object):
"""
Abstract serializer base class.
"""
# Indicates if the implemented serializer is only available for
# internal Django use.
internal_use_only = False
def serialize(self, queryset, **options):
"""
Serialize a queryset.
"""
self.options = options
self.stream = options.pop("stream", StringIO())
self.selected_fields = options.pop("fields", None)
self.use_natural_keys = options.pop("use_natural_keys", False)
self.start_serialization()
for obj in queryset:
self.start_object(obj)
for field in obj._meta.local_fields:
if field.serialize:
if field.rel is None:
if self.selected_fields is None or field.attname in self.selected_fields:
self.handle_field(obj, field)
else:
if self.selected_fields is None or field.attname[:-3] in self.selected_fields:
self.handle_fk_field(obj, field)
for field in obj._meta.many_to_many:
if field.serialize:
if self.selected_fields is None or field.attname in self.selected_fields:
self.handle_m2m_field(obj, field)
self.end_object(obj)
self.end_serialization()
return self.getvalue()
def start_serialization(self):
"""
Called when serializing of the queryset starts.
"""
raise NotImplementedError
def end_serialization(self):
"""
Called when serializing of the queryset ends.
"""
pass
def start_object(self, obj):
"""
Called when serializing of an object starts.
"""
raise NotImplementedError
def end_object(self, obj):
"""
Called when serializing of an object ends.
"""
pass
def handle_field(self, obj, field):
"""
Called to handle each individual (non-relational) field on an object.
"""
raise NotImplementedError
def handle_fk_field(self, obj, field):
"""
Called to handle a ForeignKey field.
"""
raise NotImplementedError
def handle_m2m_field(self, obj, field):
"""
Called to handle a ManyToManyField.
"""
raise NotImplementedError
def getvalue(self):
"""
Return the fully serialized queryset (or None if the output stream is
not seekable).
"""
if callable(getattr(self.stream, 'getvalue', None)):
return self.stream.getvalue()
class Deserializer(object):
"""
Abstract base deserializer class.
"""
def __init__(self, stream_or_string, **options):
"""
Init this serializer given a stream or a string
"""
self.options = options
if isinstance(stream_or_string, basestring):
self.stream = StringIO(stream_or_string)
else:
self.stream = stream_or_string
# hack to make sure that the models have all been loaded before
# deserialization starts (otherwise subclass calls to get_model()
# and friends might fail...)
models.get_apps()
def __iter__(self):
return self
def next(self):
"""Iteration iterface -- return the next item in the stream"""
raise NotImplementedError
class DeserializedObject(object):
"""
A deserialized model.
Basically a container for holding the pre-saved deserialized data along
with the many-to-many data saved with the object.
Call ``save()`` to save the object (with the many-to-many data) to the
database; call ``save(save_m2m=False)`` to save just the object fields
(and not touch the many-to-many stuff.)
"""
def __init__(self, obj, m2m_data=None):
self.object = obj
self.m2m_data = m2m_data
def __repr__(self):
return "<DeserializedObject: %s.%s(pk=%s)>" % (
self.object._meta.app_label, self.object._meta.object_name, self.object.pk)
def save(self, save_m2m=True, using=None):
# Call save on the Model baseclass directly. This bypasses any
# model-defined save. The save is also forced to be raw.
# This ensures that the data that is deserialized is literally
# what came from the file, not post-processed by pre_save/save
# methods.
models.Model.save_base(self.object, using=using, raw=True)
if self.m2m_data and save_m2m:
for accessor_name, object_list in self.m2m_data.items():
setattr(self.object, accessor_name, object_list)
# prevent a second (possibly accidental) call to save() from saving
# the m2m data twice.
self.m2m_data = None
| agpl-3.0 | -8,773,941,918,880,716,000 | 30.751479 | 102 | 0.603988 | false |
RevelSystems/django | django/conf/locale/hu/formats.py | 504 | 1117 | # -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
from __future__ import unicode_literals
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'Y. F j.'
TIME_FORMAT = 'G.i'
DATETIME_FORMAT = 'Y. F j. G.i'
YEAR_MONTH_FORMAT = 'Y. F'
MONTH_DAY_FORMAT = 'F j.'
SHORT_DATE_FORMAT = 'Y.m.d.'
SHORT_DATETIME_FORMAT = 'Y.m.d. G.i'
FIRST_DAY_OF_WEEK = 1 # Monday
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = [
'%Y.%m.%d.', # '2006.10.25.'
]
TIME_INPUT_FORMATS = [
'%H.%M.%S', # '14.30.59'
'%H.%M', # '14.30'
]
DATETIME_INPUT_FORMATS = [
'%Y.%m.%d. %H.%M.%S', # '2006.10.25. 14.30.59'
'%Y.%m.%d. %H.%M.%S.%f', # '2006.10.25. 14.30.59.000200'
'%Y.%m.%d. %H.%M', # '2006.10.25. 14.30'
'%Y.%m.%d.', # '2006.10.25.'
]
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = ' ' # Non-breaking space
NUMBER_GROUPING = 3
| bsd-3-clause | 3,054,225,110,225,006,000 | 31.823529 | 77 | 0.604839 | false |
emrecamasuvi/appengineTmp | lib/flask/helpers.py | 776 | 33793 | # -*- coding: utf-8 -*-
"""
flask.helpers
~~~~~~~~~~~~~
Implements various helpers.
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import os
import sys
import pkgutil
import posixpath
import mimetypes
from time import time
from zlib import adler32
from threading import RLock
from werkzeug.routing import BuildError
from functools import update_wrapper
try:
from werkzeug.urls import url_quote
except ImportError:
from urlparse import quote as url_quote
from werkzeug.datastructures import Headers
from werkzeug.exceptions import NotFound
# this was moved in 0.7
try:
from werkzeug.wsgi import wrap_file
except ImportError:
from werkzeug.utils import wrap_file
from jinja2 import FileSystemLoader
from .signals import message_flashed
from .globals import session, _request_ctx_stack, _app_ctx_stack, \
current_app, request
from ._compat import string_types, text_type
# sentinel
_missing = object()
# what separators does this operating system provide that are not a slash?
# this is used by the send_from_directory function to ensure that nobody is
# able to access files from outside the filesystem.
_os_alt_seps = list(sep for sep in [os.path.sep, os.path.altsep]
if sep not in (None, '/'))
def _endpoint_from_view_func(view_func):
"""Internal helper that returns the default endpoint for a given
function. This always is the function name.
"""
assert view_func is not None, 'expected view func if endpoint ' \
'is not provided.'
return view_func.__name__
def stream_with_context(generator_or_function):
"""Request contexts disappear when the response is started on the server.
This is done for efficiency reasons and to make it less likely to encounter
memory leaks with badly written WSGI middlewares. The downside is that if
you are using streamed responses, the generator cannot access request bound
information any more.
This function however can help you keep the context around for longer::
from flask import stream_with_context, request, Response
@app.route('/stream')
def streamed_response():
@stream_with_context
def generate():
yield 'Hello '
yield request.args['name']
yield '!'
return Response(generate())
Alternatively it can also be used around a specific generator::
from flask import stream_with_context, request, Response
@app.route('/stream')
def streamed_response():
def generate():
yield 'Hello '
yield request.args['name']
yield '!'
return Response(stream_with_context(generate()))
.. versionadded:: 0.9
"""
try:
gen = iter(generator_or_function)
except TypeError:
def decorator(*args, **kwargs):
gen = generator_or_function()
return stream_with_context(gen)
return update_wrapper(decorator, generator_or_function)
def generator():
ctx = _request_ctx_stack.top
if ctx is None:
raise RuntimeError('Attempted to stream with context but '
'there was no context in the first place to keep around.')
with ctx:
# Dummy sentinel. Has to be inside the context block or we're
# not actually keeping the context around.
yield None
# The try/finally is here so that if someone passes a WSGI level
# iterator in we're still running the cleanup logic. Generators
# don't need that because they are closed on their destruction
# automatically.
try:
for item in gen:
yield item
finally:
if hasattr(gen, 'close'):
gen.close()
# The trick is to start the generator. Then the code execution runs until
# the first dummy None is yielded at which point the context was already
# pushed. This item is discarded. Then when the iteration continues the
# real generator is executed.
wrapped_g = generator()
next(wrapped_g)
return wrapped_g
def make_response(*args):
"""Sometimes it is necessary to set additional headers in a view. Because
views do not have to return response objects but can return a value that
is converted into a response object by Flask itself, it becomes tricky to
add headers to it. This function can be called instead of using a return
and you will get a response object which you can use to attach headers.
If view looked like this and you want to add a new header::
def index():
return render_template('index.html', foo=42)
You can now do something like this::
def index():
response = make_response(render_template('index.html', foo=42))
response.headers['X-Parachutes'] = 'parachutes are cool'
return response
This function accepts the very same arguments you can return from a
view function. This for example creates a response with a 404 error
code::
response = make_response(render_template('not_found.html'), 404)
The other use case of this function is to force the return value of a
view function into a response which is helpful with view
decorators::
response = make_response(view_function())
response.headers['X-Parachutes'] = 'parachutes are cool'
Internally this function does the following things:
- if no arguments are passed, it creates a new response argument
- if one argument is passed, :meth:`flask.Flask.make_response`
is invoked with it.
- if more than one argument is passed, the arguments are passed
to the :meth:`flask.Flask.make_response` function as tuple.
.. versionadded:: 0.6
"""
if not args:
return current_app.response_class()
if len(args) == 1:
args = args[0]
return current_app.make_response(args)
def url_for(endpoint, **values):
"""Generates a URL to the given endpoint with the method provided.
Variable arguments that are unknown to the target endpoint are appended
to the generated URL as query arguments. If the value of a query argument
is `None`, the whole pair is skipped. In case blueprints are active
you can shortcut references to the same blueprint by prefixing the
local endpoint with a dot (``.``).
This will reference the index function local to the current blueprint::
url_for('.index')
For more information, head over to the :ref:`Quickstart <url-building>`.
To integrate applications, :class:`Flask` has a hook to intercept URL build
errors through :attr:`Flask.build_error_handler`. The `url_for` function
results in a :exc:`~werkzeug.routing.BuildError` when the current app does
not have a URL for the given endpoint and values. When it does, the
:data:`~flask.current_app` calls its :attr:`~Flask.build_error_handler` if
it is not `None`, which can return a string to use as the result of
`url_for` (instead of `url_for`'s default to raise the
:exc:`~werkzeug.routing.BuildError` exception) or re-raise the exception.
An example::
def external_url_handler(error, endpoint, **values):
"Looks up an external URL when `url_for` cannot build a URL."
# This is an example of hooking the build_error_handler.
# Here, lookup_url is some utility function you've built
# which looks up the endpoint in some external URL registry.
url = lookup_url(endpoint, **values)
if url is None:
# External lookup did not have a URL.
# Re-raise the BuildError, in context of original traceback.
exc_type, exc_value, tb = sys.exc_info()
if exc_value is error:
raise exc_type, exc_value, tb
else:
raise error
# url_for will use this result, instead of raising BuildError.
return url
app.build_error_handler = external_url_handler
Here, `error` is the instance of :exc:`~werkzeug.routing.BuildError`, and
`endpoint` and `**values` are the arguments passed into `url_for`. Note
that this is for building URLs outside the current application, and not for
handling 404 NotFound errors.
.. versionadded:: 0.10
The `_scheme` parameter was added.
.. versionadded:: 0.9
The `_anchor` and `_method` parameters were added.
.. versionadded:: 0.9
Calls :meth:`Flask.handle_build_error` on
:exc:`~werkzeug.routing.BuildError`.
:param endpoint: the endpoint of the URL (name of the function)
:param values: the variable arguments of the URL rule
:param _external: if set to `True`, an absolute URL is generated. Server
address can be changed via `SERVER_NAME` configuration variable which
defaults to `localhost`.
:param _scheme: a string specifying the desired URL scheme. The `_external`
parameter must be set to `True` or a `ValueError` is raised.
:param _anchor: if provided this is added as anchor to the URL.
:param _method: if provided this explicitly specifies an HTTP method.
"""
appctx = _app_ctx_stack.top
reqctx = _request_ctx_stack.top
if appctx is None:
raise RuntimeError('Attempted to generate a URL without the '
'application context being pushed. This has to be '
'executed when application context is available.')
# If request specific information is available we have some extra
# features that support "relative" urls.
if reqctx is not None:
url_adapter = reqctx.url_adapter
blueprint_name = request.blueprint
if not reqctx.request._is_old_module:
if endpoint[:1] == '.':
if blueprint_name is not None:
endpoint = blueprint_name + endpoint
else:
endpoint = endpoint[1:]
else:
# TODO: get rid of this deprecated functionality in 1.0
if '.' not in endpoint:
if blueprint_name is not None:
endpoint = blueprint_name + '.' + endpoint
elif endpoint.startswith('.'):
endpoint = endpoint[1:]
external = values.pop('_external', False)
# Otherwise go with the url adapter from the appctx and make
# the urls external by default.
else:
url_adapter = appctx.url_adapter
if url_adapter is None:
raise RuntimeError('Application was not able to create a URL '
'adapter for request independent URL generation. '
'You might be able to fix this by setting '
'the SERVER_NAME config variable.')
external = values.pop('_external', True)
anchor = values.pop('_anchor', None)
method = values.pop('_method', None)
scheme = values.pop('_scheme', None)
appctx.app.inject_url_defaults(endpoint, values)
if scheme is not None:
if not external:
raise ValueError('When specifying _scheme, _external must be True')
url_adapter.url_scheme = scheme
try:
rv = url_adapter.build(endpoint, values, method=method,
force_external=external)
except BuildError as error:
# We need to inject the values again so that the app callback can
# deal with that sort of stuff.
values['_external'] = external
values['_anchor'] = anchor
values['_method'] = method
return appctx.app.handle_url_build_error(error, endpoint, values)
if anchor is not None:
rv += '#' + url_quote(anchor)
return rv
def get_template_attribute(template_name, attribute):
"""Loads a macro (or variable) a template exports. This can be used to
invoke a macro from within Python code. If you for example have a
template named `_cider.html` with the following contents:
.. sourcecode:: html+jinja
{% macro hello(name) %}Hello {{ name }}!{% endmacro %}
You can access this from Python code like this::
hello = get_template_attribute('_cider.html', 'hello')
return hello('World')
.. versionadded:: 0.2
:param template_name: the name of the template
:param attribute: the name of the variable of macro to access
"""
return getattr(current_app.jinja_env.get_template(template_name).module,
attribute)
def flash(message, category='message'):
"""Flashes a message to the next request. In order to remove the
flashed message from the session and to display it to the user,
the template has to call :func:`get_flashed_messages`.
.. versionchanged:: 0.3
`category` parameter added.
:param message: the message to be flashed.
:param category: the category for the message. The following values
are recommended: ``'message'`` for any kind of message,
``'error'`` for errors, ``'info'`` for information
messages and ``'warning'`` for warnings. However any
kind of string can be used as category.
"""
# Original implementation:
#
# session.setdefault('_flashes', []).append((category, message))
#
# This assumed that changes made to mutable structures in the session are
# are always in sync with the sess on object, which is not true for session
# implementations that use external storage for keeping their keys/values.
flashes = session.get('_flashes', [])
flashes.append((category, message))
session['_flashes'] = flashes
message_flashed.send(current_app._get_current_object(),
message=message, category=category)
def get_flashed_messages(with_categories=False, category_filter=[]):
"""Pulls all flashed messages from the session and returns them.
Further calls in the same request to the function will return
the same messages. By default just the messages are returned,
but when `with_categories` is set to `True`, the return value will
be a list of tuples in the form ``(category, message)`` instead.
Filter the flashed messages to one or more categories by providing those
categories in `category_filter`. This allows rendering categories in
separate html blocks. The `with_categories` and `category_filter`
arguments are distinct:
* `with_categories` controls whether categories are returned with message
text (`True` gives a tuple, where `False` gives just the message text).
* `category_filter` filters the messages down to only those matching the
provided categories.
See :ref:`message-flashing-pattern` for examples.
.. versionchanged:: 0.3
`with_categories` parameter added.
.. versionchanged:: 0.9
`category_filter` parameter added.
:param with_categories: set to `True` to also receive categories.
:param category_filter: whitelist of categories to limit return values
"""
flashes = _request_ctx_stack.top.flashes
if flashes is None:
_request_ctx_stack.top.flashes = flashes = session.pop('_flashes') \
if '_flashes' in session else []
if category_filter:
flashes = list(filter(lambda f: f[0] in category_filter, flashes))
if not with_categories:
return [x[1] for x in flashes]
return flashes
def send_file(filename_or_fp, mimetype=None, as_attachment=False,
attachment_filename=None, add_etags=True,
cache_timeout=None, conditional=False):
"""Sends the contents of a file to the client. This will use the
most efficient method available and configured. By default it will
try to use the WSGI server's file_wrapper support. Alternatively
you can set the application's :attr:`~Flask.use_x_sendfile` attribute
to ``True`` to directly emit an `X-Sendfile` header. This however
requires support of the underlying webserver for `X-Sendfile`.
By default it will try to guess the mimetype for you, but you can
also explicitly provide one. For extra security you probably want
to send certain files as attachment (HTML for instance). The mimetype
guessing requires a `filename` or an `attachment_filename` to be
provided.
Please never pass filenames to this function from user sources without
checking them first. Something like this is usually sufficient to
avoid security problems::
if '..' in filename or filename.startswith('/'):
abort(404)
.. versionadded:: 0.2
.. versionadded:: 0.5
The `add_etags`, `cache_timeout` and `conditional` parameters were
added. The default behavior is now to attach etags.
.. versionchanged:: 0.7
mimetype guessing and etag support for file objects was
deprecated because it was unreliable. Pass a filename if you are
able to, otherwise attach an etag yourself. This functionality
will be removed in Flask 1.0
.. versionchanged:: 0.9
cache_timeout pulls its default from application config, when None.
:param filename_or_fp: the filename of the file to send. This is
relative to the :attr:`~Flask.root_path` if a
relative path is specified.
Alternatively a file object might be provided
in which case `X-Sendfile` might not work and
fall back to the traditional method. Make sure
that the file pointer is positioned at the start
of data to send before calling :func:`send_file`.
:param mimetype: the mimetype of the file if provided, otherwise
auto detection happens.
:param as_attachment: set to `True` if you want to send this file with
a ``Content-Disposition: attachment`` header.
:param attachment_filename: the filename for the attachment if it
differs from the file's filename.
:param add_etags: set to `False` to disable attaching of etags.
:param conditional: set to `True` to enable conditional responses.
:param cache_timeout: the timeout in seconds for the headers. When `None`
(default), this value is set by
:meth:`~Flask.get_send_file_max_age` of
:data:`~flask.current_app`.
"""
mtime = None
if isinstance(filename_or_fp, string_types):
filename = filename_or_fp
file = None
else:
from warnings import warn
file = filename_or_fp
filename = getattr(file, 'name', None)
# XXX: this behavior is now deprecated because it was unreliable.
# removed in Flask 1.0
if not attachment_filename and not mimetype \
and isinstance(filename, string_types):
warn(DeprecationWarning('The filename support for file objects '
'passed to send_file is now deprecated. Pass an '
'attach_filename if you want mimetypes to be guessed.'),
stacklevel=2)
if add_etags:
warn(DeprecationWarning('In future flask releases etags will no '
'longer be generated for file objects passed to the send_file '
'function because this behavior was unreliable. Pass '
'filenames instead if possible, otherwise attach an etag '
'yourself based on another value'), stacklevel=2)
if filename is not None:
if not os.path.isabs(filename):
filename = os.path.join(current_app.root_path, filename)
if mimetype is None and (filename or attachment_filename):
mimetype = mimetypes.guess_type(filename or attachment_filename)[0]
if mimetype is None:
mimetype = 'application/octet-stream'
headers = Headers()
if as_attachment:
if attachment_filename is None:
if filename is None:
raise TypeError('filename unavailable, required for '
'sending as attachment')
attachment_filename = os.path.basename(filename)
headers.add('Content-Disposition', 'attachment',
filename=attachment_filename)
if current_app.use_x_sendfile and filename:
if file is not None:
file.close()
headers['X-Sendfile'] = filename
headers['Content-Length'] = os.path.getsize(filename)
data = None
else:
if file is None:
file = open(filename, 'rb')
mtime = os.path.getmtime(filename)
headers['Content-Length'] = os.path.getsize(filename)
data = wrap_file(request.environ, file)
rv = current_app.response_class(data, mimetype=mimetype, headers=headers,
direct_passthrough=True)
# if we know the file modification date, we can store it as the
# the time of the last modification.
if mtime is not None:
rv.last_modified = int(mtime)
rv.cache_control.public = True
if cache_timeout is None:
cache_timeout = current_app.get_send_file_max_age(filename)
if cache_timeout is not None:
rv.cache_control.max_age = cache_timeout
rv.expires = int(time() + cache_timeout)
if add_etags and filename is not None:
rv.set_etag('flask-%s-%s-%s' % (
os.path.getmtime(filename),
os.path.getsize(filename),
adler32(
filename.encode('utf-8') if isinstance(filename, text_type)
else filename
) & 0xffffffff
))
if conditional:
rv = rv.make_conditional(request)
# make sure we don't send x-sendfile for servers that
# ignore the 304 status code for x-sendfile.
if rv.status_code == 304:
rv.headers.pop('x-sendfile', None)
return rv
def safe_join(directory, filename):
"""Safely join `directory` and `filename`.
Example usage::
@app.route('/wiki/<path:filename>')
def wiki_page(filename):
filename = safe_join(app.config['WIKI_FOLDER'], filename)
with open(filename, 'rb') as fd:
content = fd.read() # Read and process the file content...
:param directory: the base directory.
:param filename: the untrusted filename relative to that directory.
:raises: :class:`~werkzeug.exceptions.NotFound` if the resulting path
would fall out of `directory`.
"""
filename = posixpath.normpath(filename)
for sep in _os_alt_seps:
if sep in filename:
raise NotFound()
if os.path.isabs(filename) or \
filename == '..' or \
filename.startswith('../'):
raise NotFound()
return os.path.join(directory, filename)
def send_from_directory(directory, filename, **options):
"""Send a file from a given directory with :func:`send_file`. This
is a secure way to quickly expose static files from an upload folder
or something similar.
Example usage::
@app.route('/uploads/<path:filename>')
def download_file(filename):
return send_from_directory(app.config['UPLOAD_FOLDER'],
filename, as_attachment=True)
.. admonition:: Sending files and Performance
It is strongly recommended to activate either `X-Sendfile` support in
your webserver or (if no authentication happens) to tell the webserver
to serve files for the given path on its own without calling into the
web application for improved performance.
.. versionadded:: 0.5
:param directory: the directory where all the files are stored.
:param filename: the filename relative to that directory to
download.
:param options: optional keyword arguments that are directly
forwarded to :func:`send_file`.
"""
filename = safe_join(directory, filename)
if not os.path.isfile(filename):
raise NotFound()
options.setdefault('conditional', True)
return send_file(filename, **options)
def get_root_path(import_name):
"""Returns the path to a package or cwd if that cannot be found. This
returns the path of a package or the folder that contains a module.
Not to be confused with the package path returned by :func:`find_package`.
"""
# Module already imported and has a file attribute. Use that first.
mod = sys.modules.get(import_name)
if mod is not None and hasattr(mod, '__file__'):
return os.path.dirname(os.path.abspath(mod.__file__))
# Next attempt: check the loader.
loader = pkgutil.get_loader(import_name)
# Loader does not exist or we're referring to an unloaded main module
# or a main module without path (interactive sessions), go with the
# current working directory.
if loader is None or import_name == '__main__':
return os.getcwd()
# For .egg, zipimporter does not have get_filename until Python 2.7.
# Some other loaders might exhibit the same behavior.
if hasattr(loader, 'get_filename'):
filepath = loader.get_filename(import_name)
else:
# Fall back to imports.
__import__(import_name)
filepath = sys.modules[import_name].__file__
# filepath is import_name.py for a module, or __init__.py for a package.
return os.path.dirname(os.path.abspath(filepath))
def find_package(import_name):
"""Finds a package and returns the prefix (or None if the package is
not installed) as well as the folder that contains the package or
module as a tuple. The package path returned is the module that would
have to be added to the pythonpath in order to make it possible to
import the module. The prefix is the path below which a UNIX like
folder structure exists (lib, share etc.).
"""
root_mod_name = import_name.split('.')[0]
loader = pkgutil.get_loader(root_mod_name)
if loader is None or import_name == '__main__':
# import name is not found, or interactive/main module
package_path = os.getcwd()
else:
# For .egg, zipimporter does not have get_filename until Python 2.7.
if hasattr(loader, 'get_filename'):
filename = loader.get_filename(root_mod_name)
elif hasattr(loader, 'archive'):
# zipimporter's loader.archive points to the .egg or .zip
# archive filename is dropped in call to dirname below.
filename = loader.archive
else:
# At least one loader is missing both get_filename and archive:
# Google App Engine's HardenedModulesHook
#
# Fall back to imports.
__import__(import_name)
filename = sys.modules[import_name].__file__
package_path = os.path.abspath(os.path.dirname(filename))
# package_path ends with __init__.py for a package
if loader.is_package(root_mod_name):
package_path = os.path.dirname(package_path)
site_parent, site_folder = os.path.split(package_path)
py_prefix = os.path.abspath(sys.prefix)
if package_path.startswith(py_prefix):
return py_prefix, package_path
elif site_folder.lower() == 'site-packages':
parent, folder = os.path.split(site_parent)
# Windows like installations
if folder.lower() == 'lib':
base_dir = parent
# UNIX like installations
elif os.path.basename(parent).lower() == 'lib':
base_dir = os.path.dirname(parent)
else:
base_dir = site_parent
return base_dir, package_path
return None, package_path
class locked_cached_property(object):
"""A decorator that converts a function into a lazy property. The
function wrapped is called the first time to retrieve the result
and then that calculated result is used the next time you access
the value. Works like the one in Werkzeug but has a lock for
thread safety.
"""
def __init__(self, func, name=None, doc=None):
self.__name__ = name or func.__name__
self.__module__ = func.__module__
self.__doc__ = doc or func.__doc__
self.func = func
self.lock = RLock()
def __get__(self, obj, type=None):
if obj is None:
return self
with self.lock:
value = obj.__dict__.get(self.__name__, _missing)
if value is _missing:
value = self.func(obj)
obj.__dict__[self.__name__] = value
return value
class _PackageBoundObject(object):
def __init__(self, import_name, template_folder=None):
#: The name of the package or module. Do not change this once
#: it was set by the constructor.
self.import_name = import_name
#: location of the templates. `None` if templates should not be
#: exposed.
self.template_folder = template_folder
#: Where is the app root located?
self.root_path = get_root_path(self.import_name)
self._static_folder = None
self._static_url_path = None
def _get_static_folder(self):
if self._static_folder is not None:
return os.path.join(self.root_path, self._static_folder)
def _set_static_folder(self, value):
self._static_folder = value
static_folder = property(_get_static_folder, _set_static_folder)
del _get_static_folder, _set_static_folder
def _get_static_url_path(self):
if self._static_url_path is None:
if self.static_folder is None:
return None
return '/' + os.path.basename(self.static_folder)
return self._static_url_path
def _set_static_url_path(self, value):
self._static_url_path = value
static_url_path = property(_get_static_url_path, _set_static_url_path)
del _get_static_url_path, _set_static_url_path
@property
def has_static_folder(self):
"""This is `True` if the package bound object's container has a
folder named ``'static'``.
.. versionadded:: 0.5
"""
return self.static_folder is not None
@locked_cached_property
def jinja_loader(self):
"""The Jinja loader for this package bound object.
.. versionadded:: 0.5
"""
if self.template_folder is not None:
return FileSystemLoader(os.path.join(self.root_path,
self.template_folder))
def get_send_file_max_age(self, filename):
"""Provides default cache_timeout for the :func:`send_file` functions.
By default, this function returns ``SEND_FILE_MAX_AGE_DEFAULT`` from
the configuration of :data:`~flask.current_app`.
Static file functions such as :func:`send_from_directory` use this
function, and :func:`send_file` calls this function on
:data:`~flask.current_app` when the given cache_timeout is `None`. If a
cache_timeout is given in :func:`send_file`, that timeout is used;
otherwise, this method is called.
This allows subclasses to change the behavior when sending files based
on the filename. For example, to set the cache timeout for .js files
to 60 seconds::
class MyFlask(flask.Flask):
def get_send_file_max_age(self, name):
if name.lower().endswith('.js'):
return 60
return flask.Flask.get_send_file_max_age(self, name)
.. versionadded:: 0.9
"""
return current_app.config['SEND_FILE_MAX_AGE_DEFAULT']
def send_static_file(self, filename):
"""Function used internally to send static files from the static
folder to the browser.
.. versionadded:: 0.5
"""
if not self.has_static_folder:
raise RuntimeError('No static folder for this object')
# Ensure get_send_file_max_age is called in all cases.
# Here, we ensure get_send_file_max_age is called for Blueprints.
cache_timeout = self.get_send_file_max_age(filename)
return send_from_directory(self.static_folder, filename,
cache_timeout=cache_timeout)
def open_resource(self, resource, mode='rb'):
"""Opens a resource from the application's resource folder. To see
how this works, consider the following folder structure::
/myapplication.py
/schema.sql
/static
/style.css
/templates
/layout.html
/index.html
If you want to open the `schema.sql` file you would do the
following::
with app.open_resource('schema.sql') as f:
contents = f.read()
do_something_with(contents)
:param resource: the name of the resource. To access resources within
subfolders use forward slashes as separator.
:param mode: resource file opening mode, default is 'rb'.
"""
if mode not in ('r', 'rb'):
raise ValueError('Resources can only be opened for reading')
return open(os.path.join(self.root_path, resource), mode)
| apache-2.0 | -4,993,282,996,052,697,000 | 38.803298 | 81 | 0.630338 | false |
kjschiroo/WikiChatter | test/test_indentblock.py | 1 | 5369 | import unittest
import wikichatter.indentblock as indentblock
import wikichatter.mwparsermod as mwpm
EMPTY = "\n"
LEVEL0 = "Level 0\n"
LEVEL1 = ":Level 1\n"
LEVEL2 = "::Level 2\n"
LEVEL3 = ":::Level 3\n"
LEVEL4 = "::::Level 4\n"
LIST1 = "*Level 1\n"
LIST2 = "**Level 2\n"
LIST3 = "***Level 3\n"
LIST4 = "****Level 4\n"
OUTDENT = "{{outdent}}"
OUTDENT_LEVEL = "{{outdent|5}}"
class IndentBlockTest(unittest.TestCase):
def test_generates_list_from_basic_input(self):
text = (
LEVEL0 +
LEVEL1 +
LEVEL2 +
LEVEL3
)
code = mwpm.parse(text)
blocks = indentblock.generate_indentblock_list(code)
self.assertEqual(len(blocks), 4)
self.assertEqual(blocks[0].indent, 0)
self.assertEqual(blocks[1].indent, 1)
self.assertEqual(blocks[2].indent, 2)
self.assertEqual(blocks[3].indent, 3)
def test_generates_list_from_reverse_input(self):
text = (
LEVEL3 +
LEVEL2 +
LEVEL1 +
LEVEL0
)
code = mwpm.parse(text)
blocks = indentblock.generate_indentblock_list(code)
self.assertEqual(len(blocks), 4)
self.assertEqual(blocks[0].indent, 3)
self.assertEqual(blocks[1].indent, 2)
self.assertEqual(blocks[2].indent, 1)
self.assertEqual(blocks[3].indent, 0)
def test_generates_list_from_zigzag_input(self):
text = (
LEVEL0 +
LEVEL1 +
LEVEL2 +
LEVEL3 +
LEVEL2 +
LEVEL1 +
LEVEL0
)
code = mwpm.parse(text)
blocks = indentblock.generate_indentblock_list(code)
self.assertEqual(len(blocks), 7)
self.assertEqual(blocks[0].indent, 0)
self.assertEqual(blocks[1].indent, 1)
self.assertEqual(blocks[2].indent, 2)
self.assertEqual(blocks[3].indent, 3)
self.assertEqual(blocks[4].indent, 2)
self.assertEqual(blocks[5].indent, 1)
self.assertEqual(blocks[6].indent, 0)
def test_handles_outdent(self):
text = (
LEVEL0 +
LEVEL1 +
LEVEL2 +
OUTDENT + LEVEL0
)
code = mwpm.parse(text)
blocks = indentblock.generate_indentblock_list(code)
self.assertEqual(len(blocks), 4)
self.assertEqual(blocks[3].indent, 3)
def test_handles_double_outdent(self):
text = (
LEVEL0 +
LEVEL1 +
LEVEL2 +
OUTDENT + LEVEL0 +
LEVEL1 +
LEVEL2 +
OUTDENT + LEVEL0
)
code = mwpm.parse(text)
blocks = indentblock.generate_indentblock_list(code)
self.assertEqual(len(blocks), 7)
self.assertEqual(blocks[6].indent, 6)
def test_handles_triple_outdent(self):
text = (
LEVEL0 +
LEVEL1 +
OUTDENT + LEVEL0 +
LEVEL1 +
OUTDENT + LEVEL0 +
LEVEL1 +
OUTDENT + LEVEL0
)
code = mwpm.parse(text)
blocks = indentblock.generate_indentblock_list(code)
self.assertEqual(len(blocks), 7)
self.assertEqual(blocks[6].indent, 6)
def test_generates_list_from_basic_list_input(self):
text = (
LEVEL0 +
LIST1 +
LIST2 +
LIST3
)
code = mwpm.parse(text)
blocks = indentblock.generate_indentblock_list(code)
self.assertEqual(len(blocks), 4)
self.assertEqual(blocks[0].indent, 0)
self.assertEqual(blocks[1].indent, 1)
self.assertEqual(blocks[2].indent, 2)
self.assertEqual(blocks[3].indent, 3)
def test_breaks_same_level_apart(self):
text = (
LEVEL0 +
LIST1 +
LIST1 +
LIST2 +
LIST3
)
code = mwpm.parse(text)
blocks = indentblock.generate_indentblock_list(code)
self.assertEqual(len(blocks), 5)
self.assertEqual(blocks[0].indent, 0)
self.assertEqual(blocks[1].indent, 1)
self.assertEqual(blocks[2].indent, 1)
self.assertEqual(blocks[3].indent, 2)
self.assertEqual(blocks[4].indent, 3)
def test_grants_empty_line_previous_indent(self):
text = (
LEVEL0 +
LIST1 +
EMPTY +
LIST1 +
LIST2
)
code = mwpm.parse(text)
blocks = indentblock.generate_indentblock_list(code)
self.assertEqual(len(blocks), 5)
self.assertEqual(blocks[0].indent, 0)
self.assertEqual(blocks[1].indent, 1)
self.assertEqual(blocks[2].indent, 1)
self.assertEqual(blocks[3].indent, 1)
self.assertEqual(blocks[4].indent, 2)
def test_gives_empty_start_zero_indent(self):
text = (
EMPTY +
LEVEL0 +
LIST1 +
LIST1 +
LIST2
)
code = mwpm.parse(text)
blocks = indentblock.generate_indentblock_list(code)
self.assertEqual(len(blocks), 5)
self.assertEqual(blocks[0].indent, 0)
self.assertEqual(blocks[1].indent, 0)
self.assertEqual(blocks[2].indent, 1)
self.assertEqual(blocks[3].indent, 1)
self.assertEqual(blocks[4].indent, 2)
| mit | -9,210,680,813,746,144,000 | 25.579208 | 60 | 0.548147 | false |
sauloal/pycluster | pypy-1.9_64/lib-python/2.7/distutils/tests/test_check.py | 14 | 3546 | """Tests for distutils.command.check."""
import unittest
from test.test_support import run_unittest
from distutils.command.check import check, HAS_DOCUTILS
from distutils.tests import support
from distutils.errors import DistutilsSetupError
class CheckTestCase(support.LoggingSilencer,
support.TempdirManager,
unittest.TestCase):
def _run(self, metadata=None, **options):
if metadata is None:
metadata = {}
pkg_info, dist = self.create_dist(**metadata)
cmd = check(dist)
cmd.initialize_options()
for name, value in options.items():
setattr(cmd, name, value)
cmd.ensure_finalized()
cmd.run()
return cmd
def test_check_metadata(self):
# let's run the command with no metadata at all
# by default, check is checking the metadata
# should have some warnings
cmd = self._run()
self.assertEqual(cmd._warnings, 2)
# now let's add the required fields
# and run it again, to make sure we don't get
# any warning anymore
metadata = {'url': 'xxx', 'author': 'xxx',
'author_email': 'xxx',
'name': 'xxx', 'version': 'xxx'}
cmd = self._run(metadata)
self.assertEqual(cmd._warnings, 0)
# now with the strict mode, we should
# get an error if there are missing metadata
self.assertRaises(DistutilsSetupError, self._run, {}, **{'strict': 1})
# and of course, no error when all metadata are present
cmd = self._run(metadata, strict=1)
self.assertEqual(cmd._warnings, 0)
def test_check_document(self):
if not HAS_DOCUTILS: # won't test without docutils
return
pkg_info, dist = self.create_dist()
cmd = check(dist)
# let's see if it detects broken rest
broken_rest = 'title\n===\n\ntest'
msgs = cmd._check_rst_data(broken_rest)
self.assertEqual(len(msgs), 1)
# and non-broken rest
rest = 'title\n=====\n\ntest'
msgs = cmd._check_rst_data(rest)
self.assertEqual(len(msgs), 0)
def test_check_restructuredtext(self):
if not HAS_DOCUTILS: # won't test without docutils
return
# let's see if it detects broken rest in long_description
broken_rest = 'title\n===\n\ntest'
pkg_info, dist = self.create_dist(long_description=broken_rest)
cmd = check(dist)
cmd.check_restructuredtext()
self.assertEqual(cmd._warnings, 1)
# let's see if we have an error with strict=1
metadata = {'url': 'xxx', 'author': 'xxx',
'author_email': 'xxx',
'name': 'xxx', 'version': 'xxx',
'long_description': broken_rest}
self.assertRaises(DistutilsSetupError, self._run, metadata,
**{'strict': 1, 'restructuredtext': 1})
# and non-broken rest
metadata['long_description'] = 'title\n=====\n\ntest'
cmd = self._run(metadata, strict=1, restructuredtext=1)
self.assertEqual(cmd._warnings, 0)
def test_check_all(self):
metadata = {'url': 'xxx', 'author': 'xxx'}
self.assertRaises(DistutilsSetupError, self._run,
{}, **{'strict': 1,
'restructuredtext': 1})
def test_suite():
return unittest.makeSuite(CheckTestCase)
if __name__ == "__main__":
run_unittest(test_suite())
| mit | 8,594,126,361,010,424,000 | 34.818182 | 78 | 0.576706 | false |
aferr/TimingCompartments | src/mem/slicc/symbols/State.py | 60 | 1754 | # Copyright (c) 1999-2008 Mark D. Hill and David A. Wood
# Copyright (c) 2009 The Hewlett-Packard Development Company
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from slicc.symbols.Symbol import Symbol
class State(Symbol):
def __repr__(self):
return "[State: %s]" % self.ident
__all__ = [ "State" ]
| bsd-3-clause | 3,108,453,019,531,228,700 | 50.588235 | 72 | 0.777081 | false |
htzy/bigfour | lms/djangoapps/courseware/management/commands/export_course.py | 62 | 3095 | """
A Django command that exports a course to a tar.gz file.
If <filename> is '-', it pipes the file to stdout
"""
import os
import re
import shutil
import tarfile
from tempfile import mktemp, mkdtemp
from textwrap import dedent
from path import path
from django.core.management.base import BaseCommand, CommandError
from xmodule.modulestore.django import modulestore
from xmodule.modulestore.xml_exporter import export_course_to_xml
from opaque_keys import InvalidKeyError
from opaque_keys.edx.keys import CourseKey
class Command(BaseCommand):
"""
Export a course to XML. The output is compressed as a tar.gz file
"""
args = "<course_id> <output_filename>"
help = dedent(__doc__).strip()
def handle(self, *args, **options):
course_key, filename, pipe_results = self._parse_arguments(args)
export_course_to_tarfile(course_key, filename)
results = self._get_results(filename) if pipe_results else None
return results
def _parse_arguments(self, args):
"""Parse command line arguments"""
try:
course_key = CourseKey.from_string(args[0])
filename = args[1]
except InvalidKeyError:
raise CommandError("Unparsable course_id")
except IndexError:
raise CommandError("Insufficient arguments")
# If filename is '-' save to a temp file
pipe_results = False
if filename == '-':
filename = mktemp()
pipe_results = True
return course_key, filename, pipe_results
def _get_results(self, filename):
"""Load results from file"""
with open(filename) as f:
results = f.read()
os.remove(filename)
return results
def export_course_to_tarfile(course_key, filename):
"""Exports a course into a tar.gz file"""
tmp_dir = mkdtemp()
try:
course_dir = export_course_to_directory(course_key, tmp_dir)
compress_directory(course_dir, filename)
finally:
shutil.rmtree(tmp_dir, ignore_errors=True)
def export_course_to_directory(course_key, root_dir):
"""Export course into a directory"""
store = modulestore()
course = store.get_course(course_key)
if course is None:
raise CommandError("Invalid course_id")
# The safest characters are A-Z, a-z, 0-9, <underscore>, <period> and <hyphen>.
# We represent the first four with \w.
# TODO: Once we support courses with unicode characters, we will need to revisit this.
replacement_char = u'-'
course_dir = replacement_char.join([course.id.org, course.id.course, course.id.run])
course_dir = re.sub(r'[^\w\.\-]', replacement_char, course_dir)
export_course_to_xml(store, None, course.id, root_dir, course_dir)
export_dir = path(root_dir) / course_dir
return export_dir
def compress_directory(directory, filename):
"""Compress a directory into a tar.gz file"""
mode = 'w:gz'
name = path(directory).name
with tarfile.open(filename, mode) as tar_file:
tar_file.add(directory, arcname=name)
| agpl-3.0 | 1,497,692,776,937,601,300 | 29.048544 | 90 | 0.657512 | false |
stephanekirsch/e-colle | accueil/views.py | 1 | 8473 | #-*- coding: utf-8 -*-
from django.http import HttpResponseForbidden, Http404, HttpResponse
from django.shortcuts import render, redirect, get_object_or_404
from django.contrib.auth import logout
from django.contrib.auth.decorators import login_required
from accueil.models import Classe, Matiere, Colleur, Message, Destinataire, Eleve, Config, Prof
from accueil.forms import UserForm, UserProfprincipalForm, SelectMessageForm, EcrireForm, ReponseForm
from django.contrib import messages as messagees
from ecolle.settings import IP_FILTRE_ADMIN, IP_FILTRE_ADRESSES
import re
import qrcode as qr
from django.db.models import Q
from io import BytesIO
def home(request):
"""Renvoie la vue d'accueil ou, si l'utilisateur est déjà identifié, redirige vers la section adéquate"""
user=request.user
if user.is_authenticated:
if user.username=="admin":
return redirect('action_admin')
elif user.username=="Secrétariat":
return redirect('action_secret')
elif user.colleur:
return redirect('action_colleur')
elif user.eleve:
return redirect('action_eleve')
classes=Classe.objects.all()
matieres=list(Matiere.objects.all())
for i in range(len(matieres)-1,0,-1):
if matieres[i].nom.lower() == matieres[i-1].nom.lower():
matieres.pop(i)
show_admin=True
if IP_FILTRE_ADMIN:
show_admin=False
user_ip = request.META['REMOTE_ADDR']
for ip in IP_FILTRE_ADRESSES:
authenticated_by_ip = re.compile(ip).match(user_ip)
if authenticated_by_ip:
show_admin = True
break
return render(request,'accueil/home.html',{'classes':classes,'matieres':matieres,'show_admin':show_admin})
def deconnexion(request):
"""Déconnecte l'utilisateur courant et redirige vers la page d'accueil"""
logout(request)
return redirect('accueil')
@login_required(login_url='accueil')
def profil(request):
"""Renvoie la vue du profil où on peut modifier son email et/ou son mot de passe"""
user=request.user
if not user.is_authenticated:
return HttpResponseForbidden("Vous devez être connecté pour accéder à cette page")
profprincipal = bool(user.colleur and Classe.objects.filter(profprincipal=user.colleur))
if profprincipal:
classes = Classe.objects.filter(profprincipal=user.colleur)
initial = {'email':user.email}
for classe in classes:
initial["{}_groupe".format(classe.pk)] = Colleur.objects.filter(colleurprof__classe=classe,colleurprof__modifgroupe=True)
initial["{}_colloscope".format(classe.pk)] = Colleur.objects.filter(colleurprof__classe=classe,colleurprof__modifcolloscope=True)
form = UserProfprincipalForm(user.colleur,classes,request.POST or None,instance = user, initial = initial)
if form.is_valid():
form.save()
return redirect('accueil')
else:
form=UserForm(request.POST or None,instance = user)
if form.is_valid():
form.save()
return redirect('accueil')
return render(request,"accueil/profil.html",{'form':form})
@login_required(login_url='accueil')
def messages(request):
"""Renvoie vers la vue des messages"""
form = SelectMessageForm(request.user,request.POST or None)
if form.is_valid():
form.save()
return redirect('messages')
peut_composer = True
if request.user.eleve:
peut_composer = Config.objects.get_config().message_eleves
return render(request,"accueil/messages.html",{'form':form,'peut_composer':peut_composer,'nonvide':form.fields['message'].queryset.exists()})
@login_required(login_url='accueil')
def message(request,id_message):
"""Renvoie vers la vue du message dont l'id est id_message"""
message = Message.objects.filter(pk=id_message).filter(Q(auteur = request.user, hasAuteur = True) | Q(messagerecu__user = request.user))
if not message.exists():
raise Http404("Message non trouvé")
message = message.first()
repondre = True
envoye = False
if message.auteur == request.user: # si c'est un message envoyé
envoye = True
if request.user.eleve: # on peut répondre, sauf si on est élève et que les élèves n'ont le droit que de répondre
repondre = Config.objects.get_config().message_eleves
else: # si c'est un message reçu
destinataire = Destinataire.objects.get(message = message,user=request.user)
if not destinataire.lu: # on met à jour le destinataire
message.luPar += str(request.user) + "; "
message.save()
destinataire.lu=True
destinataire.save()
if request.user.eleve and destinataire.reponses and not Config.objects.get_config().message_eleves:
repondre = False
if message.auteur.username in ['admin','Secrétariat']:
repondre = False
return render(request,"accueil/message.html",{'message':message,'repondre':repondre,'envoye':envoye})
@login_required(login_url='accueil')
def ecrire(request):
"""Renvoie vers la vue d'écriture d'un message """
if request.user.eleve and not Config.objects.get_config().message_eleves:
return HttpResponseForbidden("Vous n'avez pas le droit d'écrire une message")
message = Message(auteur=request.user)
form=EcrireForm(request.user,request.POST or None,request.FILES or None, instance = message)
if form.is_valid():
form.save()
messagees.error(request, "Message envoyé")
return redirect('messages')
return render(request,"accueil/ecrire.html",{'form':form})
@login_required(login_url='accueil')
def repondre(request,message_id):
"""Renvoie vers la vue de réponse au message dont l'id est message_id"""
message = get_object_or_404(Message, pk=message_id)
if message.auteur == request.user: # on ne peut que "répondre à tous" à un message qu'on a envoyé
raise Http404
destinataire = get_object_or_404(Destinataire,message=message,user=request.user)
if request.user.eleve and destinataire.reponses and not Config.objects.get_config().message_eleves or message.auteur.username in ['admin','Secrétariat']:
return HttpResponseForbidden("Vous n'avez pas le droit de répondre")
reponse = Message(auteur=request.user, listedestinataires=str(message.auteur), titre = "Re: "+ message.titre, corps = (">"+message.corps.strip().replace("\n","\n>")+"\n"))
form = ReponseForm(message, False, request.user, request.POST or None, request.FILES or None, initial = {'destinataire': reponse.listedestinataires }, instance = reponse)
if form.is_valid():
form.save()
messagees.error(request, "Message envoyé")
destinataire.reponses +=1
destinataire.save()
return redirect('messages')
return render(request,"accueil/repondre.html",{'form':form,'message':message})
@login_required(login_url='accueil')
def repondreatous(request,message_id):
"""Renvoie vers la vue de réponse au message dont l'id est message_id"""
message = Message.objects.filter(pk=message_id).filter(Q(auteur = request.user, hasAuteur = True) | Q(messagerecu__user = request.user))
if not message.exists():
raise Http404("Message non trouvé")
message = message.first()
destinataires = list(message.messagerecu.all())
if message.auteur == request.user: # si on répond à un message qu'on a envoyé
if request.user.eleve and not Config.objects.get_config().message_eleves:
return HttpResponseForbidden("Vous n'avez pas le droit de répondre")
else:
desti = get_object_or_404(Destinataire,message=message,user=request.user)
if request.user.eleve and desti.reponses and not Config.objects.get_config().message_eleves:
return HttpResponseForbidden("Vous n'avez pas le droit de répondre")
destinataires.append(Destinataire(user=message.auteur,message=None))
listedestinataires = "; ".join([str(desti.user) for desti in destinataires])
reponse = Message(auteur=request.user , listedestinataires=listedestinataires, titre = "Re: "+ message.titre, corps = (">"+message.corps.strip().replace("\n","\n>")+"\n"))
form = ReponseForm(message, destinataires, request.user, request.POST or None, request.FILES or None, initial = {"destinataire": listedestinataires}, instance = reponse)
if form.is_valid():
form.save()
messagees.error(request, "Message envoyé")
if message.auteur != request.user:
desti.reponses +=1
desti.save()
return redirect('messages')
return render(request,"accueil/repondre.html",{'form':form,'message':message})
@login_required(login_url='accueil')
def qrcode(request):
return render(request,"accueil/qrcode.html")
@login_required(login_url='accueil')
def qrcodepng(request):
url = request.build_absolute_uri('/')
img = qr.make(url)
buffer = BytesIO()
img.save(buffer,format="PNG")
response = HttpResponse(content_type='image/png')
response.write(buffer.getvalue())
buffer.close()
return response | agpl-3.0 | -153,182,821,385,879,520 | 44.831522 | 172 | 0.746205 | false |
erdincay/pyload | module/plugins/hoster/EuroshareEu.py | 6 | 2197 | # -*- coding: utf-8 -*-
import re
from module.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
class EuroshareEu(SimpleHoster):
__name__ = "EuroshareEu"
__type__ = "hoster"
__version__ = "0.30"
__status__ = "testing"
__pattern__ = r'http://(?:www\.)?euroshare\.(eu|sk|cz|hu|pl)/file/.+'
__config__ = [("use_premium", "bool", "Use premium account if available", True)]
__description__ = """Euroshare.eu hoster plugin"""
__license__ = "GPLv3"
__authors__ = [("zoidberg", "[email protected]")]
INFO_PATTERN = r'<span style="float: left;"><strong>(?P<N>.+?)</strong> \((?P<S>.+?)\)</span>'
OFFLINE_PATTERN = ur'<h2>S.bor sa nena.iel</h2>|Požadovaná stránka neexistuje!'
LINK_FREE_PATTERN = r'<a href="(/file/\d+/[^/]*/download/)"><div class="downloadButton"'
DL_LIMIT_PATTERN = r'<h2>Prebieha s.ahovanie</h2>|<p>Naraz je z jednej IP adresy mo.n. s.ahova. iba jeden s.bor'
ERROR_PATTERN = r'href="/customer-zone/login/"'
URL_REPLACEMENTS = [(r"(http://[^/]*\.)(sk|cz|hu|pl)/", r"\1eu/")]
def handle_premium(self, pyfile):
if self.ERROR_PATTERN in self.html:
self.account.relogin(self.user)
self.retry(reason=_("User not logged in"))
self.link = pyfile.url.rstrip('/') + "/download/"
check = self.check_download({'login': re.compile(self.ERROR_PATTERN),
'json' : re.compile(r'\{"status":"error".*?"message":"(.*?)"')})
if check == "login" or (check == "json" and self.last_check.group(1) == "Access token expired"):
self.account.relogin(self.user)
self.retry(reason=_("Access token expired"))
elif check == "json":
self.fail(self.last_check.group(1))
def handle_free(self, pyfile):
if re.search(self.DL_LIMIT_PATTERN, self.html):
self.wait(5 * 60, 12, _("Download limit reached"))
m = re.search(self.LINK_FREE_PATTERN, self.html)
if m is None:
self.error(_("LINK_FREE_PATTERN not found"))
self.link = "http://euroshare.eu%s" % m.group(1)
getInfo = create_getInfo(EuroshareEu)
| gpl-3.0 | -4,623,037,890,310,080,000 | 34.387097 | 116 | 0.574294 | false |
sysalexis/kbengine | kbe/res/scripts/common/Lib/test/test_importlib/source/test_source_encoding.py | 81 | 5396 | from .. import util
from . import util as source_util
machinery = util.import_importlib('importlib.machinery')
import codecs
import importlib.util
import re
import sys
import types
# Because sys.path gets essentially blanked, need to have unicodedata already
# imported for the parser to use.
import unicodedata
import unittest
import warnings
CODING_RE = re.compile(r'^[ \t\f]*#.*coding[:=][ \t]*([-\w.]+)', re.ASCII)
class EncodingTest:
"""PEP 3120 makes UTF-8 the default encoding for source code
[default encoding].
PEP 263 specifies how that can change on a per-file basis. Either the first
or second line can contain the encoding line [encoding first line]
encoding second line]. If the file has the BOM marker it is considered UTF-8
implicitly [BOM]. If any encoding is specified it must be UTF-8, else it is
an error [BOM and utf-8][BOM conflict].
"""
variable = '\u00fc'
character = '\u00c9'
source_line = "{0} = '{1}'\n".format(variable, character)
module_name = '_temp'
def run_test(self, source):
with source_util.create_modules(self.module_name) as mapping:
with open(mapping[self.module_name], 'wb') as file:
file.write(source)
loader = self.machinery.SourceFileLoader(self.module_name,
mapping[self.module_name])
return self.load(loader)
def create_source(self, encoding):
encoding_line = "# coding={0}".format(encoding)
assert CODING_RE.match(encoding_line)
source_lines = [encoding_line.encode('utf-8')]
source_lines.append(self.source_line.encode(encoding))
return b'\n'.join(source_lines)
def test_non_obvious_encoding(self):
# Make sure that an encoding that has never been a standard one for
# Python works.
encoding_line = "# coding=koi8-r"
assert CODING_RE.match(encoding_line)
source = "{0}\na=42\n".format(encoding_line).encode("koi8-r")
self.run_test(source)
# [default encoding]
def test_default_encoding(self):
self.run_test(self.source_line.encode('utf-8'))
# [encoding first line]
def test_encoding_on_first_line(self):
encoding = 'Latin-1'
source = self.create_source(encoding)
self.run_test(source)
# [encoding second line]
def test_encoding_on_second_line(self):
source = b"#/usr/bin/python\n" + self.create_source('Latin-1')
self.run_test(source)
# [BOM]
def test_bom(self):
self.run_test(codecs.BOM_UTF8 + self.source_line.encode('utf-8'))
# [BOM and utf-8]
def test_bom_and_utf_8(self):
source = codecs.BOM_UTF8 + self.create_source('utf-8')
self.run_test(source)
# [BOM conflict]
def test_bom_conflict(self):
source = codecs.BOM_UTF8 + self.create_source('latin-1')
with self.assertRaises(SyntaxError):
self.run_test(source)
class EncodingTestPEP451(EncodingTest):
def load(self, loader):
module = types.ModuleType(self.module_name)
module.__spec__ = importlib.util.spec_from_loader(self.module_name, loader)
loader.exec_module(module)
return module
Frozen_EncodingTestPEP451, Source_EncodingTestPEP451 = util.test_both(
EncodingTestPEP451, machinery=machinery)
class EncodingTestPEP302(EncodingTest):
def load(self, loader):
with warnings.catch_warnings():
warnings.simplefilter('ignore', DeprecationWarning)
return loader.load_module(self.module_name)
Frozen_EncodingTestPEP302, Source_EncodingTestPEP302 = util.test_both(
EncodingTestPEP302, machinery=machinery)
class LineEndingTest:
r"""Source written with the three types of line endings (\n, \r\n, \r)
need to be readable [cr][crlf][lf]."""
def run_test(self, line_ending):
module_name = '_temp'
source_lines = [b"a = 42", b"b = -13", b'']
source = line_ending.join(source_lines)
with source_util.create_modules(module_name) as mapping:
with open(mapping[module_name], 'wb') as file:
file.write(source)
loader = self.machinery.SourceFileLoader(module_name,
mapping[module_name])
return self.load(loader, module_name)
# [cr]
def test_cr(self):
self.run_test(b'\r')
# [crlf]
def test_crlf(self):
self.run_test(b'\r\n')
# [lf]
def test_lf(self):
self.run_test(b'\n')
class LineEndingTestPEP451(LineEndingTest):
def load(self, loader, module_name):
module = types.ModuleType(module_name)
module.__spec__ = importlib.util.spec_from_loader(module_name, loader)
loader.exec_module(module)
return module
Frozen_LineEndingTestPEP451, Source_LineEndingTestPEP451 = util.test_both(
LineEndingTestPEP451, machinery=machinery)
class LineEndingTestPEP302(LineEndingTest):
def load(self, loader, module_name):
with warnings.catch_warnings():
warnings.simplefilter('ignore', DeprecationWarning)
return loader.load_module(module_name)
Frozen_LineEndingTestPEP302, Source_LineEndingTestPEP302 = util.test_both(
LineEndingTestPEP302, machinery=machinery)
if __name__ == '__main__':
unittest.main()
| lgpl-3.0 | 5,622,177,556,574,542,000 | 31.70303 | 83 | 0.641772 | false |
Datera/cinder | cinder/tests/unit/volume/drivers/dell_emc/sc/test_fc.py | 3 | 48496 | # Copyright (c) 2014 Dell Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from cinder import context
from cinder import exception
from cinder import test
from cinder.tests.unit import fake_constants as fake
from cinder.volume.drivers.dell_emc.sc import storagecenter_api
from cinder.volume.drivers.dell_emc.sc import storagecenter_fc
# We patch these here as they are used by every test to keep
# from trying to contact a Dell Storage Center.
@mock.patch.object(storagecenter_api.HttpClient,
'__init__',
return_value=None)
@mock.patch.object(storagecenter_api.SCApi,
'open_connection')
@mock.patch.object(storagecenter_api.SCApi,
'close_connection')
class DellSCSanFCDriverTestCase(test.TestCase):
VOLUME = {u'instanceId': u'64702.4829',
u'scSerialNumber': 64702,
u'replicationSource': False,
u'liveVolume': False,
u'vpdId': 4831,
u'objectType': u'ScVolume',
u'index': 4829,
u'volumeFolderPath': u'dopnstktst/',
u'hostCacheEnabled': False,
u'usedByLegacyFluidFsNasVolume': False,
u'inRecycleBin': False,
u'volumeFolderIndex': 17,
u'instanceName': u'5729f1db-4c45-416c-bc15-c8ea13a4465d',
u'statusMessage': u'',
u'status': u'Down',
u'storageType': {u'instanceId': u'64702.1',
u'instanceName': u'Assigned - Redundant - 2 MB',
u'objectType': u'ScStorageType'},
u'cmmDestination': False,
u'replicationDestination': False,
u'volumeFolder': {u'instanceId': u'64702.17',
u'instanceName': u'opnstktst',
u'objectType': u'ScVolumeFolder'},
u'deviceId': u'6000d31000fcbe0000000000000012df',
u'active': False,
u'portableVolumeDestination': False,
u'deleteAllowed': True,
u'name': u'5729f1db-4c45-416c-bc15-c8ea13a4465d',
u'scName': u'Storage Center 64702',
u'secureDataUsed': False,
u'serialNumber': u'0000fcbe-000012df',
u'replayAllowed': False,
u'flashOptimized': False,
u'configuredSize': u'1.073741824E9 Bytes',
u'mapped': False,
u'cmmSource': False}
SCSERVER = {u'scName': u'Storage Center 64702',
u'volumeCount': 0,
u'removeHbasAllowed': True,
u'legacyFluidFs': False,
u'serverFolderIndex': 4,
u'alertOnConnectivity': True,
u'objectType': u'ScPhysicalServer',
u'instanceName': u'Server_21000024ff30441d',
u'instanceId': u'64702.47',
u'serverFolderPath': u'opnstktst/',
u'portType': [u'FibreChannel'],
u'type': u'Physical',
u'statusMessage': u'Only 5 of 6 expected paths are up',
u'status': u'Degraded',
u'scSerialNumber': 64702,
u'serverFolder': {u'instanceId': u'64702.4',
u'instanceName': u'opnstktst',
u'objectType': u'ScServerFolder'},
u'parentIndex': 0,
u'connectivity': u'Partial',
u'hostCacheIndex': 0,
u'deleteAllowed': True,
u'pathCount': 5,
u'name': u'Server_21000024ff30441d',
u'hbaPresent': True,
u'hbaCount': 2,
u'notes': u'Created by Dell EMC Cinder Driver',
u'mapped': False,
u'operatingSystem': {u'instanceId': u'64702.38',
u'instanceName': u'Red Hat Linux 6.x',
u'objectType': u'ScServerOperatingSystem'}
}
MAPPING = {u'instanceId': u'64702.2183',
u'scName': u'Storage Center 64702',
u'scSerialNumber': 64702,
u'controller': {u'instanceId': u'64702.64702',
u'instanceName': u'SN 64702',
u'objectType': u'ScController'},
u'lunUsed': [1],
u'server': {u'instanceId': u'64702.47',
u'instanceName': u'Server_21000024ff30441d',
u'objectType': u'ScPhysicalServer'},
u'volume': {u'instanceId': u'64702.4829',
u'instanceName':
u'5729f1db-4c45-416c-bc15-c8ea13a4465d',
u'objectType': u'ScVolume'},
u'connectivity': u'Up',
u'readOnly': False,
u'objectType': u'ScMappingProfile',
u'hostCache': False,
u'mappedVia': u'Server',
u'mapCount': 2,
u'instanceName': u'4829-47',
u'lunRequested': u'N/A'
}
def setUp(self):
super(DellSCSanFCDriverTestCase, self).setUp()
# configuration is a mock. A mock is pretty much a blank
# slate. I believe mock's done in setup are not happy time
# mocks. So we just do a few things like driver config here.
self.configuration = mock.Mock()
self.configuration.san_is_local = False
self.configuration.san_ip = "192.168.0.1"
self.configuration.san_login = "admin"
self.configuration.san_password = "pwd"
self.configuration.dell_sc_ssn = 64702
self.configuration.dell_sc_server_folder = 'opnstktst'
self.configuration.dell_sc_volume_folder = 'opnstktst'
self.configuration.dell_sc_api_port = 3033
self._context = context.get_admin_context()
self.driver = storagecenter_fc.SCFCDriver(
configuration=self.configuration)
self.driver.do_setup(None)
self.driver._stats = {'QoS_support': False,
'volume_backend_name': 'dell-1',
'free_capacity_gb': 12123,
'driver_version': '1.0.1',
'total_capacity_gb': 12388,
'reserved_percentage': 0,
'vendor_name': 'Dell',
'storage_protocol': 'FC'}
# Start with none. Add in the specific tests later.
# Mock tests bozo this.
self.driver.backends = None
self.driver.replication_enabled = False
self.volid = '5729f1db-4c45-416c-bc15-c8ea13a4465d'
self.volume_name = "volume" + self.volid
self.connector = {'ip': '192.168.0.77',
'host': 'cinderfc-vm',
'wwnns': ['20000024ff30441c', '20000024ff30441d'],
'initiator': 'iqn.1993-08.org.debian:01:e1b1312f9e1',
'wwpns': ['21000024ff30441c', '21000024ff30441d']}
@mock.patch.object(storagecenter_api.SCApi,
'find_server',
return_value=None)
@mock.patch.object(storagecenter_api.SCApi,
'create_server',
return_value=SCSERVER)
@mock.patch.object(storagecenter_api.SCApi,
'find_volume',
return_value=VOLUME)
@mock.patch.object(storagecenter_api.SCApi,
'get_volume',
return_value=VOLUME)
@mock.patch.object(storagecenter_api.SCApi,
'map_volume',
return_value=MAPPING)
@mock.patch.object(storagecenter_api.SCApi,
'find_wwns',
return_value=(1,
[u'5000D31000FCBE3D',
u'5000D31000FCBE35'],
{u'21000024FF30441C':
[u'5000D31000FCBE35'],
u'21000024FF30441D':
[u'5000D31000FCBE3D']}))
def test_initialize_connection(self,
mock_find_wwns,
mock_map_volume,
mock_get_volume,
mock_find_volume,
mock_create_server,
mock_find_server,
mock_close_connection,
mock_open_connection,
mock_init):
volume = {'id': fake.VOLUME_ID}
connector = self.connector
res = self.driver.initialize_connection(volume, connector)
expected = {'data':
{'discard': True,
'initiator_target_map':
{u'21000024FF30441C': [u'5000D31000FCBE35'],
u'21000024FF30441D': [u'5000D31000FCBE3D']},
'target_discovered': True,
'target_lun': 1,
'target_wwn':
[u'5000D31000FCBE3D', u'5000D31000FCBE35']},
'driver_volume_type': 'fibre_channel'}
self.assertEqual(expected, res, 'Unexpected return data')
# verify find_volume has been called and that is has been called twice
mock_find_volume.assert_called_once_with(fake.VOLUME_ID, None, False)
mock_get_volume.assert_called_once_with(self.VOLUME[u'instanceId'])
@mock.patch.object(storagecenter_api.SCApi,
'find_server',
return_value=SCSERVER)
@mock.patch.object(storagecenter_api.SCApi,
'find_volume',
return_value=VOLUME)
@mock.patch.object(storagecenter_api.SCApi,
'get_volume',
return_value=VOLUME)
@mock.patch.object(storagecenter_api.SCApi,
'map_volume',
return_value=MAPPING)
@mock.patch.object(storagecenter_fc.SCFCDriver,
'_is_live_vol')
@mock.patch.object(storagecenter_api.SCApi,
'find_wwns')
@mock.patch.object(storagecenter_fc.SCFCDriver,
'initialize_secondary')
@mock.patch.object(storagecenter_api.SCApi,
'get_live_volume')
def test_initialize_connection_live_vol(self,
mock_get_live_volume,
mock_initialize_secondary,
mock_find_wwns,
mock_is_live_volume,
mock_map_volume,
mock_get_volume,
mock_find_volume,
mock_find_server,
mock_close_connection,
mock_open_connection,
mock_init):
volume = {'id': fake.VOLUME_ID}
connector = self.connector
sclivevol = {'instanceId': '101.101',
'secondaryVolume': {'instanceId': '102.101',
'instanceName': fake.VOLUME_ID},
'secondaryScSerialNumber': 102,
'secondaryRole': 'Secondary'}
mock_is_live_volume.return_value = True
mock_find_wwns.return_value = (
1, [u'5000D31000FCBE3D', u'5000D31000FCBE35'],
{u'21000024FF30441C': [u'5000D31000FCBE35'],
u'21000024FF30441D': [u'5000D31000FCBE3D']})
mock_initialize_secondary.return_value = (
1, [u'5000D31000FCBE3E', u'5000D31000FCBE36'],
{u'21000024FF30441E': [u'5000D31000FCBE36'],
u'21000024FF30441F': [u'5000D31000FCBE3E']})
mock_get_live_volume.return_value = sclivevol
res = self.driver.initialize_connection(volume, connector)
expected = {'data':
{'discard': True,
'initiator_target_map':
{u'21000024FF30441C': [u'5000D31000FCBE35'],
u'21000024FF30441D': [u'5000D31000FCBE3D'],
u'21000024FF30441E': [u'5000D31000FCBE36'],
u'21000024FF30441F': [u'5000D31000FCBE3E']},
'target_discovered': True,
'target_lun': 1,
'target_wwn': [u'5000D31000FCBE3D', u'5000D31000FCBE35',
u'5000D31000FCBE3E', u'5000D31000FCBE36']},
'driver_volume_type': 'fibre_channel'}
self.assertEqual(expected, res, 'Unexpected return data')
# verify find_volume has been called and that is has been called twice
mock_find_volume.assert_called_once_with(fake.VOLUME_ID, None, True)
mock_get_volume.assert_called_once_with(self.VOLUME[u'instanceId'])
@mock.patch.object(storagecenter_api.SCApi,
'find_server',
return_value=SCSERVER)
@mock.patch.object(storagecenter_api.SCApi,
'find_volume')
@mock.patch.object(storagecenter_api.SCApi,
'get_volume')
@mock.patch.object(storagecenter_api.SCApi,
'map_volume',
return_value=MAPPING)
@mock.patch.object(storagecenter_fc.SCFCDriver,
'_is_live_vol')
@mock.patch.object(storagecenter_api.SCApi,
'find_wwns')
@mock.patch.object(storagecenter_fc.SCFCDriver,
'initialize_secondary')
@mock.patch.object(storagecenter_api.SCApi,
'get_live_volume')
def test_initialize_connection_live_vol_afo(self,
mock_get_live_volume,
mock_initialize_secondary,
mock_find_wwns,
mock_is_live_volume,
mock_map_volume,
mock_get_volume,
mock_find_volume,
mock_find_server,
mock_close_connection,
mock_open_connection,
mock_init):
volume = {'id': fake.VOLUME_ID, 'provider_id': '101.101'}
scvol = {'instanceId': '102.101'}
mock_find_volume.return_value = scvol
mock_get_volume.return_value = scvol
connector = self.connector
sclivevol = {'instanceId': '101.10001',
'primaryVolume': {'instanceId': '102.101',
'instanceName': fake.VOLUME_ID},
'primaryScSerialNumber': 102,
'secondaryVolume': {'instanceId': '101.101',
'instanceName': fake.VOLUME_ID},
'secondaryScSerialNumber': 101,
'secondaryRole': 'Activated'}
mock_is_live_volume.return_value = True
mock_find_wwns.return_value = (
1, [u'5000D31000FCBE3D', u'5000D31000FCBE35'],
{u'21000024FF30441C': [u'5000D31000FCBE35'],
u'21000024FF30441D': [u'5000D31000FCBE3D']})
mock_get_live_volume.return_value = sclivevol
res = self.driver.initialize_connection(volume, connector)
expected = {'data':
{'discard': True,
'initiator_target_map':
{u'21000024FF30441C': [u'5000D31000FCBE35'],
u'21000024FF30441D': [u'5000D31000FCBE3D']},
'target_discovered': True,
'target_lun': 1,
'target_wwn': [u'5000D31000FCBE3D', u'5000D31000FCBE35']},
'driver_volume_type': 'fibre_channel'}
self.assertEqual(expected, res, 'Unexpected return data')
# verify find_volume has been called and that is has been called twice
self.assertFalse(mock_initialize_secondary.called)
mock_find_volume.assert_called_once_with(
fake.VOLUME_ID, '101.101', True)
mock_get_volume.assert_called_once_with('102.101')
@mock.patch.object(storagecenter_api.SCApi,
'find_server',
return_value=SCSERVER)
@mock.patch.object(storagecenter_api.SCApi,
'find_volume',
return_value=VOLUME)
@mock.patch.object(storagecenter_api.SCApi,
'get_volume',
return_value=VOLUME)
@mock.patch.object(storagecenter_api.SCApi,
'map_volume',
return_value=MAPPING)
@mock.patch.object(storagecenter_api.SCApi,
'find_wwns',
return_value=(None, [], {}))
def test_initialize_connection_no_wwns(self,
mock_find_wwns,
mock_map_volume,
mock_get_volume,
mock_find_volume,
mock_find_server,
mock_close_connection,
mock_open_connection,
mock_init):
volume = {'id': fake.VOLUME_ID}
connector = self.connector
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.initialize_connection,
volume,
connector)
@mock.patch.object(storagecenter_api.SCApi,
'find_server',
return_value=None)
@mock.patch.object(storagecenter_api.SCApi,
'create_server',
return_value=None)
@mock.patch.object(storagecenter_api.SCApi,
'find_volume',
return_value=VOLUME)
@mock.patch.object(storagecenter_api.SCApi,
'map_volume',
return_value=MAPPING)
@mock.patch.object(storagecenter_api.SCApi,
'find_wwns',
return_value=(None, [], {}))
def test_initialize_connection_no_server(self,
mock_find_wwns,
mock_map_volume,
mock_find_volume,
mock_create_server,
mock_find_server,
mock_close_connection,
mock_open_connection,
mock_init):
volume = {'id': fake.VOLUME_ID}
connector = self.connector
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.initialize_connection,
volume,
connector)
@mock.patch.object(storagecenter_api.SCApi,
'find_server',
return_value=SCSERVER)
@mock.patch.object(storagecenter_api.SCApi,
'find_volume',
return_value=None)
@mock.patch.object(storagecenter_api.SCApi,
'map_volume',
return_value=MAPPING)
@mock.patch.object(storagecenter_api.SCApi,
'find_wwns',
return_value=(None, [], {}))
def test_initialize_connection_vol_not_found(self,
mock_find_wwns,
mock_map_volume,
mock_find_volume,
mock_find_server,
mock_close_connection,
mock_open_connection,
mock_init):
volume = {'id': fake.VOLUME_ID}
connector = self.connector
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.initialize_connection,
volume,
connector)
@mock.patch.object(storagecenter_api.SCApi,
'find_server',
return_value=SCSERVER)
@mock.patch.object(storagecenter_api.SCApi,
'find_volume',
return_value=VOLUME)
@mock.patch.object(storagecenter_api.SCApi,
'map_volume',
return_value=None)
@mock.patch.object(storagecenter_api.SCApi,
'find_wwns',
return_value=(None, [], {}))
def test_initialize_connection_map_vol_fail(self,
mock_find_wwns,
mock_map_volume,
mock_find_volume,
mock_find_server,
mock_close_connection,
mock_open_connection,
mock_init):
# Test case where map_volume returns None (no mappings)
volume = {'id': fake.VOLUME_ID}
connector = self.connector
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.initialize_connection,
volume,
connector)
def test_initialize_secondary(self,
mock_close_connection,
mock_open_connection,
mock_init):
sclivevol = {'instanceId': '101.101',
'secondaryVolume': {'instanceId': '102.101',
'instanceName': fake.VOLUME_ID},
'secondaryScSerialNumber': 102}
mock_api = mock.MagicMock()
mock_api.find_server = mock.MagicMock(return_value=self.SCSERVER)
mock_api.map_secondary_volume = mock.MagicMock(
return_value=self.VOLUME)
find_wwns_ret = (1, [u'5000D31000FCBE3D', u'5000D31000FCBE35'],
{u'21000024FF30441C': [u'5000D31000FCBE35'],
u'21000024FF30441D': [u'5000D31000FCBE3D']})
mock_api.find_wwns = mock.MagicMock(return_value=find_wwns_ret)
mock_api.get_volume = mock.MagicMock(return_value=self.VOLUME)
ret = self.driver.initialize_secondary(mock_api, sclivevol,
['wwn1', 'wwn2'])
self.assertEqual(find_wwns_ret, ret)
def test_initialize_secondary_create_server(self,
mock_close_connection,
mock_open_connection,
mock_init):
sclivevol = {'instanceId': '101.101',
'secondaryVolume': {'instanceId': '102.101',
'instanceName': fake.VOLUME_ID},
'secondaryScSerialNumber': 102}
mock_api = mock.MagicMock()
mock_api.find_server = mock.MagicMock(return_value=None)
mock_api.create_server = mock.MagicMock(return_value=self.SCSERVER)
mock_api.map_secondary_volume = mock.MagicMock(
return_value=self.VOLUME)
find_wwns_ret = (1, [u'5000D31000FCBE3D', u'5000D31000FCBE35'],
{u'21000024FF30441C': [u'5000D31000FCBE35'],
u'21000024FF30441D': [u'5000D31000FCBE3D']})
mock_api.find_wwns = mock.MagicMock(return_value=find_wwns_ret)
mock_api.get_volume = mock.MagicMock(return_value=self.VOLUME)
ret = self.driver.initialize_secondary(mock_api, sclivevol,
['wwn1', 'wwn2'])
self.assertEqual(find_wwns_ret, ret)
def test_initialize_secondary_no_server(self,
mock_close_connection,
mock_open_connection,
mock_init):
sclivevol = {'instanceId': '101.101',
'secondaryVolume': {'instanceId': '102.101',
'instanceName': fake.VOLUME_ID},
'secondaryScSerialNumber': 102}
mock_api = mock.MagicMock()
mock_api.find_server = mock.MagicMock(return_value=None)
mock_api.create_server = mock.MagicMock(return_value=None)
ret = self.driver.initialize_secondary(mock_api, sclivevol,
['wwn1', 'wwn2'])
expected = (None, [], {})
self.assertEqual(expected, ret)
def test_initialize_secondary_map_fail(self,
mock_close_connection,
mock_open_connection,
mock_init):
sclivevol = {'instanceId': '101.101',
'secondaryVolume': {'instanceId': '102.101',
'instanceName': fake.VOLUME_ID},
'secondaryScSerialNumber': 102}
mock_api = mock.MagicMock()
mock_api.find_server = mock.MagicMock(return_value=self.SCSERVER)
mock_api.map_secondary_volume = mock.MagicMock(return_value=None)
ret = self.driver.initialize_secondary(mock_api, sclivevol,
['wwn1', 'wwn2'])
expected = (None, [], {})
self.assertEqual(expected, ret)
def test_initialize_secondary_vol_not_found(self,
mock_close_connection,
mock_open_connection,
mock_init):
sclivevol = {'instanceId': '101.101',
'secondaryVolume': {'instanceId': '102.101',
'instanceName': fake.VOLUME_ID},
'secondaryScSerialNumber': 102}
mock_api = mock.MagicMock()
mock_api.find_server = mock.MagicMock(return_value=self.SCSERVER)
mock_api.map_secondary_volume = mock.MagicMock(
return_value=self.VOLUME)
mock_api.get_volume = mock.MagicMock(return_value=None)
ret = self.driver.initialize_secondary(mock_api, sclivevol,
['wwn1', 'wwn2'])
expected = (None, [], {})
self.assertEqual(expected, ret)
@mock.patch.object(storagecenter_api.SCApi,
'find_volume')
@mock.patch.object(storagecenter_api.SCApi,
'unmap_all')
@mock.patch.object(storagecenter_fc.SCFCDriver,
'_is_live_vol')
def test_force_detach(self, mock_is_live_vol, mock_unmap_all,
mock_find_volume, mock_close_connection,
mock_open_connection, mock_init):
mock_is_live_vol.return_value = False
scvol = {'instandId': '12345.1'}
mock_find_volume.return_value = scvol
mock_unmap_all.return_value = True
volume = {'id': fake.VOLUME_ID}
res = self.driver.force_detach(volume)
mock_unmap_all.assert_called_once_with(scvol)
expected = {'driver_volume_type': 'fibre_channel',
'data': {}}
self.assertEqual(expected, res)
mock_unmap_all.assert_called_once_with(scvol)
@mock.patch.object(storagecenter_api.SCApi,
'find_volume')
@mock.patch.object(storagecenter_api.SCApi,
'unmap_all')
@mock.patch.object(storagecenter_fc.SCFCDriver,
'_is_live_vol')
def test_force_detach_fail(self, mock_is_live_vol, mock_unmap_all,
mock_find_volume, mock_close_connection,
mock_open_connection, mock_init):
mock_is_live_vol.return_value = False
scvol = {'instandId': '12345.1'}
mock_find_volume.return_value = scvol
mock_unmap_all.return_value = False
volume = {'id': fake.VOLUME_ID}
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.force_detach, volume)
mock_unmap_all.assert_called_once_with(scvol)
@mock.patch.object(storagecenter_api.SCApi,
'find_volume')
@mock.patch.object(storagecenter_api.SCApi,
'unmap_all')
@mock.patch.object(storagecenter_fc.SCFCDriver,
'_is_live_vol')
@mock.patch.object(storagecenter_fc.SCFCDriver,
'terminate_secondary')
@mock.patch.object(storagecenter_api.SCApi,
'get_live_volume')
def test_force_detach_lv(self, mock_get_live_volume,
mock_terminate_secondary, mock_is_live_vol,
mock_unmap_all, mock_find_volume,
mock_close_connection, mock_open_connection,
mock_init):
mock_is_live_vol.return_value = True
scvol = {'instandId': '12345.1'}
mock_find_volume.return_value = scvol
sclivevol = {'instandId': '12345.1.0'}
mock_get_live_volume.return_value = sclivevol
mock_terminate_secondary.return_value = True
volume = {'id': fake.VOLUME_ID}
mock_unmap_all.return_value = True
res = self.driver.force_detach(volume)
mock_unmap_all.assert_called_once_with(scvol)
expected = {'driver_volume_type': 'fibre_channel', 'data': {}}
self.assertEqual(expected, res)
self.assertEqual(1, mock_terminate_secondary.call_count)
mock_unmap_all.assert_called_once_with(scvol)
@mock.patch.object(storagecenter_api.SCApi,
'find_volume')
@mock.patch.object(storagecenter_fc.SCFCDriver,
'_is_live_vol')
def test_force_detach_vol_not_found(self,
mock_is_live_vol, mock_find_volume,
mock_close_connection,
mock_open_connection, mock_init):
mock_is_live_vol.return_value = False
mock_find_volume.return_value = None
volume = {'id': fake.VOLUME_ID}
res = self.driver.force_detach(volume)
expected = {'driver_volume_type': 'fibre_channel', 'data': {}}
self.assertEqual(expected, res)
@mock.patch.object(storagecenter_api.SCApi,
'find_server',
return_value=SCSERVER)
@mock.patch.object(storagecenter_api.SCApi,
'find_volume',
return_value=VOLUME)
@mock.patch.object(storagecenter_api.SCApi,
'unmap_volume',
return_value=True)
@mock.patch.object(storagecenter_api.SCApi,
'find_wwns',
return_value=(1,
[u'5000D31000FCBE3D',
u'5000D31000FCBE35'],
{u'21000024FF30441C':
[u'5000D31000FCBE35'],
u'21000024FF30441D':
[u'5000D31000FCBE3D']}))
@mock.patch.object(storagecenter_api.SCApi,
'get_volume_count',
return_value=1)
def test_terminate_connection(self,
mock_get_volume_count,
mock_find_wwns,
mock_unmap_volume,
mock_find_volume,
mock_find_server,
mock_close_connection,
mock_open_connection,
mock_init):
volume = {'id': fake.VOLUME_ID}
connector = self.connector
res = self.driver.terminate_connection(volume, connector)
mock_unmap_volume.assert_called_once_with(self.VOLUME, self.SCSERVER)
expected = {'driver_volume_type': 'fibre_channel',
'data': {}}
self.assertEqual(expected, res, 'Unexpected return data')
@mock.patch.object(storagecenter_fc.SCFCDriver,
'force_detach')
def test_terminate_connection_none_connector(self, mock_force_detach,
mock_close_connection,
mock_open_connection,
mock_init):
volume = {'id': fake.VOLUME_ID}
self.driver.terminate_connection(volume, None)
mock_force_detach.assert_called_once_with(volume)
@mock.patch.object(storagecenter_api.SCApi,
'find_server',
return_value=SCSERVER)
@mock.patch.object(storagecenter_api.SCApi,
'find_volume',
return_value=VOLUME)
@mock.patch.object(storagecenter_api.SCApi,
'unmap_volume',
return_value=True)
@mock.patch.object(storagecenter_api.SCApi,
'find_wwns',
return_value=(1,
[u'5000D31000FCBE3D',
u'5000D31000FCBE35'],
{u'21000024FF30441C':
[u'5000D31000FCBE35'],
u'21000024FF30441D':
[u'5000D31000FCBE3D']}))
@mock.patch.object(storagecenter_api.SCApi,
'get_volume_count',
return_value=1)
@mock.patch.object(storagecenter_fc.SCFCDriver,
'_is_live_vol')
@mock.patch.object(storagecenter_fc.SCFCDriver,
'terminate_secondary')
def test_terminate_connection_live_vol(self,
mock_terminate_secondary,
mock_is_live_vol,
mock_get_volume_count,
mock_find_wwns,
mock_unmap_volume,
mock_find_volume,
mock_find_server,
mock_close_connection,
mock_open_connection,
mock_init):
volume = {'id': fake.VOLUME_ID}
connector = self.connector
mock_terminate_secondary.return_value = (None, [], {})
mock_is_live_vol.return_value = True
res = self.driver.terminate_connection(volume, connector)
mock_unmap_volume.assert_called_once_with(self.VOLUME, self.SCSERVER)
expected = {'driver_volume_type': 'fibre_channel',
'data': {}}
self.assertEqual(expected, res, 'Unexpected return data')
@mock.patch.object(storagecenter_api.SCApi,
'find_server',
return_value=None)
@mock.patch.object(storagecenter_api.SCApi,
'find_volume',
return_value=VOLUME)
@mock.patch.object(storagecenter_api.SCApi,
'unmap_volume',
return_value=True)
@mock.patch.object(storagecenter_api.SCApi,
'find_wwns',
return_value=(1,
[u'5000D31000FCBE3D',
u'5000D31000FCBE35'],
{u'21000024FF30441C':
[u'5000D31000FCBE35'],
u'21000024FF30441D':
[u'5000D31000FCBE3D']}))
@mock.patch.object(storagecenter_api.SCApi,
'get_volume_count',
return_value=1)
def test_terminate_connection_no_server(self,
mock_get_volume_count,
mock_find_wwns,
mock_unmap_volume,
mock_find_volume,
mock_find_server,
mock_close_connection,
mock_open_connection,
mock_init):
volume = {'id': fake.VOLUME_ID}
connector = self.connector
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.terminate_connection,
volume,
connector)
@mock.patch.object(storagecenter_api.SCApi,
'find_server',
return_value=SCSERVER)
@mock.patch.object(storagecenter_api.SCApi,
'find_volume',
return_value=None)
@mock.patch.object(storagecenter_api.SCApi,
'unmap_volume',
return_value=True)
@mock.patch.object(storagecenter_api.SCApi,
'find_wwns',
return_value=(1,
[u'5000D31000FCBE3D',
u'5000D31000FCBE35'],
{u'21000024FF30441C':
[u'5000D31000FCBE35'],
u'21000024FF30441D':
[u'5000D31000FCBE3D']}))
@mock.patch.object(storagecenter_api.SCApi,
'get_volume_count',
return_value=1)
def test_terminate_connection_no_volume(self,
mock_get_volume_count,
mock_find_wwns,
mock_unmap_volume,
mock_find_volume,
mock_find_server,
mock_close_connection,
mock_open_connection,
mock_init):
volume = {'id': fake.VOLUME_ID}
connector = self.connector
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.terminate_connection,
volume,
connector)
@mock.patch.object(storagecenter_api.SCApi,
'find_server',
return_value=SCSERVER)
@mock.patch.object(storagecenter_api.SCApi,
'find_volume',
return_value=VOLUME)
@mock.patch.object(storagecenter_api.SCApi,
'unmap_volume',
return_value=True)
@mock.patch.object(storagecenter_api.SCApi,
'find_wwns',
return_value=(None,
[],
{}))
@mock.patch.object(storagecenter_api.SCApi,
'get_volume_count',
return_value=1)
def test_terminate_connection_no_wwns(self,
mock_get_volume_count,
mock_find_wwns,
mock_unmap_volume,
mock_find_volume,
mock_find_server,
mock_close_connection,
mock_open_connection,
mock_init):
volume = {'id': fake.VOLUME_ID}
connector = self.connector
res = self.driver.terminate_connection(volume, connector)
expected = {'driver_volume_type': 'fibre_channel',
'data': {}}
self.assertEqual(expected, res, 'Unexpected return data')
@mock.patch.object(storagecenter_api.SCApi,
'find_server',
return_value=SCSERVER)
@mock.patch.object(storagecenter_api.SCApi,
'find_volume',
return_value=VOLUME)
@mock.patch.object(storagecenter_api.SCApi,
'unmap_volume',
return_value=False)
@mock.patch.object(storagecenter_api.SCApi,
'find_wwns',
return_value=(1,
[u'5000D31000FCBE3D',
u'5000D31000FCBE35'],
{u'21000024FF30441C':
[u'5000D31000FCBE35'],
u'21000024FF30441D':
[u'5000D31000FCBE3D']}))
@mock.patch.object(storagecenter_api.SCApi,
'get_volume_count',
return_value=1)
def test_terminate_connection_failure(self,
mock_get_volume_count,
mock_find_wwns,
mock_unmap_volume,
mock_find_volume,
mock_find_server,
mock_close_connection,
mock_open_connection,
mock_init):
volume = {'id': fake.VOLUME_ID}
connector = self.connector
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.terminate_connection,
volume,
connector)
@mock.patch.object(storagecenter_api.SCApi,
'find_server',
return_value=SCSERVER)
@mock.patch.object(storagecenter_api.SCApi,
'find_volume',
return_value=VOLUME)
@mock.patch.object(storagecenter_api.SCApi,
'unmap_volume',
return_value=True)
@mock.patch.object(storagecenter_api.SCApi,
'find_wwns',
return_value=(1,
[u'5000D31000FCBE3D',
u'5000D31000FCBE35'],
{u'21000024FF30441C':
[u'5000D31000FCBE35'],
u'21000024FF30441D':
[u'5000D31000FCBE3D']}))
@mock.patch.object(storagecenter_api.SCApi,
'get_volume_count',
return_value=0)
def test_terminate_connection_vol_count_zero(self,
mock_get_volume_count,
mock_find_wwns,
mock_unmap_volume,
mock_find_volume,
mock_find_server,
mock_close_connection,
mock_open_connection,
mock_init):
# Test case where get_volume_count is zero
volume = {'id': fake.VOLUME_ID}
connector = self.connector
res = self.driver.terminate_connection(volume, connector)
mock_unmap_volume.assert_called_once_with(self.VOLUME, self.SCSERVER)
expected = {'data':
{'initiator_target_map':
{u'21000024FF30441C': [u'5000D31000FCBE35'],
u'21000024FF30441D': [u'5000D31000FCBE3D']},
'target_wwn':
[u'5000D31000FCBE3D', u'5000D31000FCBE35']},
'driver_volume_type': 'fibre_channel'}
self.assertEqual(expected, res, 'Unexpected return data')
def test_terminate_secondary(self,
mock_close_connection,
mock_open_connection,
mock_init):
mock_api = mock.MagicMock()
mock_api.find_server = mock.MagicMock(return_value=self.SCSERVER)
mock_api.get_volume = mock.MagicMock(return_value=self.VOLUME)
mock_api.find_wwns = mock.MagicMock(return_value=(None, [], {}))
mock_api.unmap_volume = mock.MagicMock(return_value=True)
sclivevol = {'instanceId': '101.101',
'secondaryVolume': {'instanceId': '102.101',
'instanceName': fake.VOLUME_ID},
'secondaryScSerialNumber': 102}
ret = self.driver.terminate_secondary(mock_api, sclivevol,
['wwn1', 'wwn2'])
expected = (None, [], {})
self.assertEqual(expected, ret)
@mock.patch.object(storagecenter_api.SCApi,
'get_storage_usage',
return_value={'availableSpace': 100, 'freeSpace': 50})
def test_update_volume_stats_with_refresh(self,
mock_get_storage_usage,
mock_close_connection,
mock_open_connection,
mock_init):
stats = self.driver.get_volume_stats(True)
self.assertEqual('FC', stats['storage_protocol'])
mock_get_storage_usage.assert_called_once_with()
@mock.patch.object(storagecenter_api.SCApi,
'get_storage_usage',
return_value={'availableSpace': 100, 'freeSpace': 50})
def test_get_volume_stats_no_refresh(self,
mock_get_storage_usage,
mock_close_connection,
mock_open_connection,
mock_init):
stats = self.driver.get_volume_stats(False)
self.assertEqual('FC', stats['storage_protocol'])
mock_get_storage_usage.assert_not_called()
| apache-2.0 | 113,141,114,097,677,780 | 48.384929 | 79 | 0.464698 | false |
nayomal/bloom | bloom/generators/__init__.py | 5 | 2000 | # Software License Agreement (BSD License)
#
# Copyright (c) 2013, Willow Garage, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Willow Garage, Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from __future__ import print_function
from .common import BloomGenerator
from .common import GeneratorError
from .common import list_generators
from .common import load_generator
from .common import resolve_dependencies
from .common import update_rosdep
__all__ = [
'BloomGenerator', 'GeneratorError',
'list_generators', 'load_generator',
'resolve_dependencies', 'update_rosdep'
]
| bsd-3-clause | -3,984,928,813,110,039,000 | 42.478261 | 70 | 0.7715 | false |
MikkelSchubert/paleomix | paleomix/nodes/fastqc.py | 1 | 1586 | #!/usr/bin/env python3
"""
FastQC - A quality control analysis tool for high throughput sequencing data
https://github.com/s-andrews/FastQC
"""
import os
import re
from paleomix.common.command import AtomicCmd, InputFile, OutputFile
from paleomix.common.versions import Requirement
from paleomix.node import CommandNode
# File extensions striped by FASTQ for output filenames
_FASTQC_EXCLUDED_EXTENSIONS = re.compile(
r"(\.gz|\.bz2|\.txt|\.fastq|\.fq|\.csfastq|\.sam|\.bam)+$"
)
class FastQCNode(CommandNode):
def __init__(self, in_file, out_folder, options={}, dependencies=()):
out_prefix = _FASTQC_EXCLUDED_EXTENSIONS.sub("", os.path.basename(in_file))
command = AtomicCmd(
["fastqc", InputFile(in_file)],
extra_files=[
OutputFile(os.path.join(out_folder, out_prefix + "_fastqc.html")),
OutputFile(os.path.join(out_folder, out_prefix + "_fastqc.zip")),
],
requirements=[
Requirement(
name="FastQC",
call=["fastqc", "--version"],
search=r"FastQC v(\d+).(\d+).(\d+)",
),
],
)
command.merge_options(
user_options=options,
fixed_options={
"--outdir": "%(TEMP_DIR)s",
"--dir": "%(TEMP_DIR)s",
},
)
CommandNode.__init__(
self,
command=command,
description="fastQC of {}".format(in_file),
dependencies=dependencies,
)
| mit | -1,665,407,592,755,111,400 | 29.5 | 83 | 0.545397 | false |
lbartoletti/QGIS | python/plugins/processing/algs/grass7/ext/r_statistics.py | 36 | 2298 | # -*- coding: utf-8 -*-
"""
***************************************************************************
r_statistics.py
---------------
Date : September 2017
Copyright : (C) 2017 by Médéric Ribreux
Email : medspx at medspx dot fr
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Médéric Ribreux'
__date__ = 'September 2017'
__copyright__ = '(C) 2017, Médéric Ribreux'
from qgis.core import QgsProcessingParameterString
from processing.algs.grass7.Grass7Utils import Grass7Utils
def processCommand(alg, parameters, context, feedback):
# We had a new "output" parameter
out = 'output{}'.format(alg.uniqueSuffix)
p = QgsProcessingParameterString('~output', None, out, False, False)
alg.addParameter(p)
# We need to remove all outputs
alg.processCommand(parameters, context, feedback, True)
# Then we add a new command for treating results
calcExpression = 'correctedoutput{}=@{}'.format(
alg.uniqueSuffix, out)
command = 'r.mapcalc expression="{}"'.format(calcExpression)
alg.commands.append(command)
def processOutputs(alg, parameters, context, feedback):
createOpt = alg.parameterAsString(parameters, alg.GRASS_RASTER_FORMAT_OPT, context)
metaOpt = alg.parameterAsString(parameters, alg.GRASS_RASTER_FORMAT_META, context)
# Export the results from correctedoutput
grassName = 'correctedoutput{}'.format(alg.uniqueSuffix)
fileName = alg.parameterAsOutputLayer(
parameters, 'routput', context)
outFormat = Grass7Utils.getRasterFormatFromFilename(fileName)
alg.exportRasterLayer(grassName, fileName, True,
outFormat, createOpt, metaOpt)
| gpl-2.0 | 5,256,959,600,569,611,000 | 41.444444 | 87 | 0.561082 | false |
kevint2u/audio-collector | server/node_modules/binaryjs/node_modules/binarypack/node_modules/buffercursor/node_modules/verror/node_modules/extsprintf/deps/javascriptlint/javascriptlint/util.py | 28 | 4517 | # vim: ts=4 sw=4 expandtab
import cgi
import os.path
import re
import unittest
_identifier = re.compile('^[A-Za-z_$][A-Za-z0-9_$]*$')
_contenttypes = (
'text/javascript',
'text/ecmascript',
'application/javascript',
'application/ecmascript',
'application/x-javascript',
)
class JSVersion:
def __init__(self, jsversion, is_e4x):
self.version = jsversion
self.e4x = is_e4x
def __eq__(self, other):
return self.version == other.version and \
self.e4x == other.e4x
@classmethod
def default(klass):
return klass('default', False)
@classmethod
def fromattr(klass, attr, default_version=None):
if attr.get('type'):
return klass.fromtype(attr['type'])
if attr.get('language'):
return klass.fromlanguage(attr['language'])
return default_version
@classmethod
def fromtype(klass, type_):
typestr, typeparms = cgi.parse_header(type_)
if typestr.lower() in _contenttypes:
jsversion = typeparms.get('version', 'default')
is_e4x = typeparms.get('e4x') == '1'
return klass(jsversion, is_e4x)
return None
@classmethod
def fromlanguage(klass, language):
if language.lower() in ('javascript', 'livescript', 'mocha'):
return klass.default()
# Simplistic parsing of javascript/x.y
if language.lower().startswith('javascript'):
language = language[len('javascript'):]
if language.replace('.', '').isdigit():
return klass(language, False)
return None
def isidentifier(text):
return _identifier.match(text)
def _encode_error_keyword(s):
s = s.replace('\\', '\\\\')
s = s.replace('"', '\\"')
s = s.replace("'", "\\'")
s = s.replace("\t", "\\t")
s = s.replace("\r", "\\r")
s = s.replace("\n", "\\n")
return s
def format_error(output_format, path, line, col, errname, errdesc):
errprefix = 'warning' #TODO
replacements = {
'__FILE__': path,
'__FILENAME__': os.path.basename(path),
'__LINE__': str(line+1),
'__COL__': str(col),
'__ERROR__': '%s: %s' % (errprefix, errdesc),
'__ERROR_NAME__': errname,
'__ERROR_PREFIX__': errprefix,
'__ERROR_MSG__': errdesc,
'__ERROR_MSGENC__': errdesc,
}
formatted_error = output_format
# If the output format starts with encode:, all of the keywords should be
# encoded.
if formatted_error.startswith('encode:'):
formatted_error = formatted_error[len('encode:'):]
encoded_keywords = replacements.keys()
else:
encoded_keywords = ['__ERROR_MSGENC__']
for keyword in encoded_keywords:
replacements[keyword] = _encode_error_keyword(replacements[keyword])
regexp = '|'.join(replacements.keys())
return re.sub(regexp, lambda match: replacements[match.group(0)],
formatted_error)
class TestUtil(unittest.TestCase):
def testIdentifier(self):
assert not isidentifier('')
assert not isidentifier('0a')
assert not isidentifier('a b')
assert isidentifier('a')
assert isidentifier('$0')
def testEncodeKeyword(self):
self.assertEquals(_encode_error_keyword(r'normal text'), 'normal text')
self.assertEquals(_encode_error_keyword(r'a\b'), r'a\\b')
self.assertEquals(_encode_error_keyword(r"identifier's"), r"identifier\'s")
self.assertEquals(_encode_error_keyword(r'"i"'), r'\"i\"')
self.assertEquals(_encode_error_keyword('a\tb'), r'a\tb')
self.assertEquals(_encode_error_keyword('a\rb'), r'a\rb')
self.assertEquals(_encode_error_keyword('a\nb'), r'a\nb')
def testFormattedError(self):
self.assertEquals(format_error('__FILE__', '__LINE__', 1, 2, 'name', 'desc'),
'__LINE__')
self.assertEquals(format_error('__FILE__', r'c:\my\file', 1, 2, 'name', 'desc'),
r'c:\my\file')
self.assertEquals(format_error('encode:__FILE__', r'c:\my\file', 1, 2, 'name', 'desc'),
r'c:\\my\\file')
self.assertEquals(format_error('__ERROR_MSGENC__', r'c:\my\file', 1, 2, 'name', r'a\b'),
r'a\\b')
self.assertEquals(format_error('encode:__ERROR_MSGENC__', r'c:\my\file', 1, 2, 'name', r'a\b'),
r'a\\b')
if __name__ == '__main__':
unittest.main()
| mit | -5,480,281,473,856,276,000 | 32.708955 | 103 | 0.566748 | false |
yfdyh000/kuma | kuma/core/managers.py | 2 | 6153 | """Extras for django-taggit
Includes:
- Handle tag namespaces (eg. tech:javascript, profile:interest:homebrewing)
TODO:
- Permissions for tag namespaces (eg. system:* is superuser-only)
- Machine tag assists
"""
from datetime import date, timedelta
from django.db import models
from django.db.models.fields import BLANK_CHOICE_DASH
from django.contrib.auth.models import AnonymousUser
from taggit.managers import TaggableManager, _TaggableManager
from taggit.models import Tag
from taggit.utils import edit_string_for_tags, require_instance_manager
class NamespacedTaggableManager(TaggableManager):
"""TaggableManager with tag namespace support"""
# HACK: Yes, I really do want to allow tags in admin change lists
flatchoices = None
# HACK: This is expensive, too, but should help with list_filter in admin
def get_choices(self, include_blank=True, blank_choice=BLANK_CHOICE_DASH):
return [(t.id, t.name) for t in Tag.objects.all()]
def __init__(self, *args, **kwargs):
kwargs['manager'] = _NamespacedTaggableManager
super(NamespacedTaggableManager, self).__init__(*args, **kwargs)
class _NamespacedTaggableManager(_TaggableManager):
def __unicode__(self):
"""Return the list of tags as an editable string.
Expensive: Does a DB query for the tags"""
# HACK: Yes, I really do want to allow tags in admin change lists
return edit_string_for_tags(self.all())
def all_ns(self, namespace=None):
"""Fetch tags by namespace, or collate all into namespaces"""
tags = self.all()
if namespace == '':
# Empty namespace is special - just look for absence of ':'
return tags.exclude(name__contains=':')
if namespace is not None:
# Namespace requested, so generate filtered set
results = []
for tag in tags:
if tag.name.startswith(namespace):
results.append(tag)
return results
# No namespace requested, so collate into namespaces
ns_tags = {}
for tag in tags:
(ns, name) = self._parse_ns(tag)
if ns not in ns_tags:
ns_tags[ns] = [tag]
else:
ns_tags[ns].append(tag)
return ns_tags
@require_instance_manager
def add_ns(self, namespace, *tags):
"""Add tags within a namespace"""
ns_tags = self._ensure_ns(namespace, tags)
super(_NamespacedTaggableManager, self).add(*ns_tags)
@require_instance_manager
def remove_ns(self, namespace=None, *tags):
"""Remove tags within a namespace"""
ns_tags = self._ensure_ns(namespace, tags)
super(_NamespacedTaggableManager, self).remove(*ns_tags)
@require_instance_manager
def clear_ns(self, namespace=None):
"""Clear tags within a namespace"""
lookup_kwargs = self._lookup_kwargs()
lookup_kwargs['tag__name__startswith'] = namespace
self.through.objects.filter(**lookup_kwargs).delete()
@require_instance_manager
def set_ns(self, namespace=None, *tags):
"""Set tags within a namespace"""
self.clear_ns(namespace)
self.add_ns(namespace, *tags)
def _parse_ns(self, tag):
"""Extract namespace from tag name.
Namespace is tag name text up to and including the last
occurrence of ':'
"""
if (':' in tag.name):
(ns, name) = tag.name.rsplit(':', 1)
return ('%s:' % ns, name)
else:
return ('', tag.name)
def _ensure_ns(self, namespace, tags):
"""Ensure each tag name in the list starts with the given namespace"""
ns_tags = []
for t in tags:
if not t.startswith(namespace):
t = '%s%s' % (namespace, t)
ns_tags.append(t)
return ns_tags
def parse_tag_namespaces(tag_list):
"""Parse a list of tags out into a dict of lists by namespace"""
namespaces = {}
for tag in tag_list:
ns = (':' in tag) and ('%s:' % tag.rsplit(':', 1)[0]) or ''
if ns not in namespaces:
namespaces[ns] = []
namespaces[ns].append(tag)
return namespaces
def allows_tag_namespace_for(model_obj, ns, user):
"""Decide whether a tag namespace is editable by a user"""
if user.is_staff or user.is_superuser:
# Staff / superuser can manage any tag namespace
return True
if not ns.startswith('system:'):
return True
return False
def resolve_allowed_tags(model_obj, tags_curr, tags_new,
request_user=AnonymousUser):
"""Given a new set of tags and a user, build a list of allowed new tags
with changes accepted only for namespaces where editing is allowed for
the user. For disallowed namespaces, this object's current tag set will
be imposed.
No changes are made; the new tag list is just returned.
"""
# Produce namespaced sets of current and incoming new tags.
ns_tags_curr = parse_tag_namespaces(tags_curr)
ns_tags_new = parse_tag_namespaces(tags_new)
# Produce a union of all namespaces, current and new tag set
all_ns = set(ns_tags_curr.keys() + ns_tags_new.keys())
# Assemble accepted changed tag set according to permissions
tags_out = []
for ns in all_ns:
if model_obj.allows_tag_namespace_for(ns, request_user):
# If the user is allowed this namespace, apply changes by
# accepting new tags or lack thereof.
if ns in ns_tags_new:
tags_out.extend(ns_tags_new[ns])
elif ns in ns_tags_curr:
# If the user is not allowed this namespace, carry over
# existing tags or lack thereof
tags_out.extend(ns_tags_curr[ns])
return tags_out
class IPBanManager(models.Manager):
def active(self, ip):
return self.filter(ip=ip, deleted__isnull=True)
def delete_old(self, days=30):
cutoff_date = date.today() - timedelta(days=days)
old_ip_bans = self.filter(created__lte=cutoff_date)
old_ip_bans.delete()
| mpl-2.0 | 7,750,946,828,839,546,000 | 33.960227 | 78 | 0.624736 | false |
erilyth/sugar | src/jarabe/testrunner.py | 13 | 1674 | # Copyright (C) 2013, Daniel Narvaez
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
import os
import sys
import subprocess
from gi.repository import GLib
from sugar3.logger import get_logs_dir
def _test_child_watch_cb(pid, condition, log_file):
if os.WIFEXITED(condition):
log_file.close()
sys.exit(os.WEXITSTATUS(condition))
def check_environment():
run_test = os.environ.get("SUGAR_RUN_TEST", None)
if run_test is not None:
log_path = os.environ.get("SUGAR_TEST_LOG", None)
if log_path is None:
log_path = os.path.join(get_logs_dir(), "test.log")
log_file = open(log_path, "w")
else:
log_file = open(log_path, "a")
test_process = subprocess.Popen(run_test,
stdout=log_file,
stderr=subprocess.STDOUT,
shell=True)
GLib.child_watch_add(test_process.pid, _test_child_watch_cb, log_file)
| gpl-2.0 | 3,178,866,983,334,097,000 | 34.617021 | 78 | 0.649343 | false |
psychopy/versions | psychopy/experiment/components/envelopegrating/__init__.py | 1 | 15422 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Part of the PsychoPy library
# Copyright (C) 2002-2018 Jonathan Peirce (C) 2019-2020 Open Science Tools Ltd.
# Distributed under the terms of the GNU General Public License (GPL).
from __future__ import absolute_import, print_function
from builtins import super # provides Py3-style super() using python-future
from os import path
from psychopy.experiment.components import BaseVisualComponent, Param, getInitVals, _translate
# the absolute path to the folder containing this path
thisFolder = path.abspath(path.dirname(__file__))
iconFile = path.join(thisFolder, 'envelopegrating.png')
tooltip = _translate('Envelope Grating: present cyclic textures including 2nd order envelope stimuli, prebuilt or from a '
'file')
# only use _localized values for label values, nothing functional:
_localized = {'carrier': _translate('Carrier texture'),
'ori': _translate('Carrier Orientation'),
'mask': _translate('Mask'),
'sf': _translate('Carrier spatial frequency'),
'phase': _translate('Carrier phase (in cycles)'),
'contrast': _translate('Carrier contrast'),
'texture resolution': _translate('Texture resolution'),
'interpolate': _translate('Interpolate'),
'envelope': _translate('Envelope texture'),
'envsf':_translate('Envelope spatial frequency'),
'envori':_translate('Envelope orientation'),
'envphase':_translate('Envelope phase'),
'moddepth':_translate('Envelope modulation depth'),
'power':_translate('Power to which envelope is raised'),
'beat':_translate('Is modulation a beat'),
'blendmode':_translate('OpenGL blend mode')
}
class EnvGratingComponent(BaseVisualComponent):
"""A class for presenting grating stimuli"""
def __init__(self, exp, parentName, name='env_grating', carrier='sin',
mask='None', sf=1.0, interpolate='linear',
units='from exp settings', color='$[1,1,1]', colorSpace='rgb',
pos=(0, 0), size=(0.5, 0.5), ori=0, phase=0.0, texRes='128',
envelope='sin',envsf=1.0,envori=0.0,envphase=0.0,
beat=False, power=1.0,
contrast=0.5, moddepth=1.0, blendmode='avg',
startType='time (s)', startVal=0.0,
stopType='duration (s)', stopVal=1.0,
startEstim='', durationEstim=''):
super().__init__(
exp, parentName, name=name, units=units,
color=color, colorSpace=colorSpace,
pos=pos, size=size, ori=ori,
startType=startType, startVal=startVal,
stopType=stopType, stopVal=stopVal,
startEstim=startEstim, durationEstim=durationEstim)
self.type = 'EnvGrating'
self.url = "http://www.psychopy.org/builder/components/EnvelopeGrating.html"
self.order = ['carrier', 'mask']
# params
self.params['ori'] = Param(
ori, valType='code', allowedTypes=[],
updates='constant',
allowedUpdates=['constant', 'set every repeat', 'set every frame'],
hint=_translate("Orientation of this stimulus (in deg)"),
label=_localized['ori'],categ="Carrier")
msg = _translate("The (2D) texture of the background - can be sin, sqr,"
" sinXsin... or a filename (including path)")
self.params['carrier'] = Param(
carrier, valType='str', allowedTypes=[],
updates='constant',
allowedUpdates=['constant', 'set every repeat', 'set every frame'],
hint=msg,
label=_localized['carrier'], categ="Carrier")
msg = _translate("An image to define the alpha mask (ie shape)- "
"gauss, circle... or a filename (including path)")
self.params['mask'] = Param(
mask, valType='str', allowedTypes=[],
updates='constant',
allowedUpdates=['constant', 'set every repeat', 'set every frame'],
hint=msg,
label=_localized['mask'], categ="Carrier")
msg = _translate("Contrast of background carrier")
self.params['contrast'] = Param(
contrast, valType='code', allowedTypes=[],
updates='constant',
allowedUpdates=['constant', 'set every repeat', 'set every frame'],
hint=msg,
label=_localized['contrast'], categ="Carrier")
msg = _translate("Spatial frequency of background carrier repeats across the "
"grating in 1 or 2 dimensions, e.g. 4 or [2,3]")
self.params['sf'] = Param(
sf, valType='code', allowedTypes=[],
updates='constant',
allowedUpdates=['constant', 'set every repeat', 'set every frame'],
hint=msg,
label=_localized['sf'], categ="Carrier")
msg = _translate("Spatial positioning of the background carrier "
"(wraps in range 0-1.0)")
self.params['phase'] = Param(
phase, valType='code', allowedTypes=[],
updates='constant',
allowedUpdates=['constant', 'set every repeat', 'set every frame'],
hint=msg,
label=_localized['phase'], categ="Carrier")
msg = _translate(
"Resolution of the texture for standard ones such as sin, sqr "
"etc. For most cases a value of 256 pixels will suffice")
self.params['texture resolution'] = Param(
texRes,
valType='code', allowedVals=['32', '64', '128', '256', '512'],
updates='constant', allowedUpdates=[],
hint=msg,
label=_localized['texture resolution'], categ="Carrier")
msg = _translate("How should the image be interpolated if/when "
"rescaled")
self.params['interpolate'] = Param(
interpolate, valType='str', allowedVals=['linear', 'nearest'],
updates='constant', allowedUpdates=[],
hint=msg,
label=_localized['interpolate'], categ="Carrier")
msg = _translate("The (2D) texture of the envelope - can be sin, sqr,"
" sinXsin... or a filename (including path)")
self.params['envelope'] = Param(
envelope, valType='str', allowedTypes=[],
updates='constant',
allowedUpdates=['constant', 'set every repeat', 'set every frame'],
hint=msg,
label=_localized['envelope'], categ="Envelope")
msg = _translate("Spatial frequency of the modulation envelope repeats across the "
"grating in 1 or 2 dimensions, e.g. 4 or [2,3]")
self.params['envsf'] = Param(
envsf, valType='code', allowedTypes=[],
updates='constant',
allowedUpdates=['constant', 'set every repeat', 'set every frame'],
hint=msg,
label=_localized['envsf'], categ="Envelope")
msg = _translate("Spatial positioning of the modulation envelope"
"(wraps in range 0-1.0)")
self.params['envphase'] = Param(
envphase, valType='code', allowedTypes=[],
updates='constant',
allowedUpdates=['constant', 'set every repeat', 'set every frame'],
hint=msg,
label=_localized['envphase'], categ="Envelope")
msg = _translate("Orientation of the modulation envelope"
"(wraps in range 0-360)")
self.params['envori'] = Param(
envori, valType='code', allowedTypes=[],
updates='constant',
allowedUpdates=['constant', 'set every repeat', 'set every frame'],
hint=msg,
label=_localized['envori'], categ="Envelope")
msg = _translate("Modulation depth of modulation envelope")
self.params['moddepth'] = Param(
moddepth, valType='code', allowedTypes=[],
updates='constant',
allowedUpdates=['constant', 'set every repeat', 'set every frame'],
hint=msg,
label=_localized['moddepth'], categ="Envelope")
msg = _translate("Power of modulation envelope. "
"The modulator will be raised to this power "
"according to the equation S=cC*(1+mM)^power, "
"where C is the carrier and M is the modulator. "
"and c and m are there respective contrast and modulation depth. "
"Only works with AM envelopes (hence +1) in "
"equation. Power is ignored if a beat is requested. "
"This is used to obtain the square root of the modulator (power = 0.5) "
"which is useful if combining two envelope gratings "
"with different carriers and a 180 degree phase shift "
"as the resulting combined signal will not "
"have any reduction in local contrast at any point in the image. "
"This is similar - but not identical to - the method used by "
"Landy and Oruc, Vis Res 2002. "
"Note overall contrast (apparent carrier contrast) will be altered.")
self.params['power'] = Param(
moddepth, valType='code', allowedTypes=[],
updates='constant',
allowedUpdates=['constant', 'set every repeat', 'set every frame'],
hint=msg,
label=_localized['power'], categ="Envelope")
msg = _translate("Do you want a 'beat'? [beat = carrier*envelope, "
"no beat = carrier*(1+envelope), True/False, Y/N]")
self.params['beat'] = Param(
beat, valType='str', allowedTypes=[],
updates='constant',
allowedUpdates=['constant', 'set every repeat', 'set every frame'],
hint=msg,
label=_localized['beat'], categ="Envelope")
msg = _translate("OpenGL Blendmode. Avg is most common mode"
" in PsychoPy, add is useful if combining a beat with"
" the carrier image or numpy array at point of display")
self.params['blendmode'] = Param(
blendmode, valType='str', allowedVals=['avg', 'add'],
updates='constant',
allowedUpdates=['constant', 'set every repeat', 'set every frame'],
hint=msg,
label=_localized['blendmode'], categ="Basic")
def writeInitCode(self, buff):
# do we need units code?
if self.params['units'].val == 'from exp settings':
unitsStr = ""
else:
unitsStr = "units=%(units)s, " % self.params
#buff.writeIndented("from psychopy.visual.secondorder import EnvelopeGrating\n")
# replaces variable params with defaults and sets sample updating flag
inits = getInitVals(self.params)
code = ("%s = visual.EnvelopeGrating(\n" % inits['name'] +
" win=win, name='%s',%s\n" % (inits['name'], unitsStr) +
" carrier=%(carrier)s, mask=%(mask)s,\n" % inits +
" ori=%(ori)s, pos=%(pos)s, size=%(size)s,\n" % inits +
" sf=%(sf)s, phase=%(phase)s,\n" % inits +
" color=%(color)s, colorSpace=%(colorSpace)s,\n " % inits +
" opacity=%(opacity)s, contrast=%(contrast)s,\n" % inits +
" texRes=%(texture resolution)s, envelope=%(envelope)s,\n" % inits +
" envori=%(envori)s, envsf=%(envsf)s,\n" % inits +
" envphase=%(envphase)s, power=%(power)s,\n" % inits +
" moddepth=%(moddepth)s, blendmode=%(blendmode)s" %inits )
if self.params['beat'].val in ['Y','y','Yes', 'yes','True','true']:
code += ", beat=True"
elif self.params['beat'].val in ['N','n','No', 'no','False','false']:
code += ", beat=False"
else:
code += ", beat=%(beat)s" %inits
if self.params['interpolate'].val == 'linear':
code += ", interpolate=True"
else:
code += ", interpolate=False"
depth = -self.getPosInRoutine()
code += ", depth=%.1f)\n" % depth
code += "if sys.version[0]=='3' and np.min(win.gamma) == None:\n"
code += " logging.warning('Envelope grating in use with no gamma set. Unless you have hardware gamma correction the image will be distorted.')\n"
code += "elif np.min(win.gamma) < 1.01:\n"
code += " logging.warning('Envelope grating in use with window gamma <= 1.0 or no gamma set at all. Unless you have hardware gamma correction the image will be distorted.')\n"
buff.writeIndentedLines(code)
def writeRoutineStartCode(self,buff):
super().writeRoutineStartCode(buff)
#if self.params['blendmode'].val!='default':
#buff.writeIndented("__allEnvSaveBlendMode=win.blendMode #required to clean up after %(name)s\n" %self.params)
def writeFrameCode(self, buff):
"""Write the code that will be called every frame
"""
buff.writeIndented("\n")
buff.writeIndented("# *%s* updates\n" % self.params['name'])
# writes an if statement to determine whether to draw etc
self.writeStartTestCode(buff)
buff.writeIndented("%(name)s.setAutoDraw(True)\n" % self.params)
#if self.params['blendmode'].val!='default':
#buff.writeIndented("%(name)s_SaveBlendMode=win.blendMode\n" %self.params)
#buff.writeIndented("win.blendMode=%(blendmode)s\n" %self.params)
# to get out of the if statement
buff.setIndentLevel(-1, relative=True)
# test for stop (only if there was some setting for duration or stop)
if self.params['stopVal'].val not in ('', None, -1, 'None'):
# writes an if statement to determine whether to draw etc
self.writeStopTestCode(buff)
buff.writeIndented("%(name)s.setAutoDraw(False)\n" % self.params)
#if self.params['blendmode'].val!='default':
#buff.writeIndented("win.blendMode=%(name)s_SaveBlendMode\n" % self.params)
# to get out of the if statement
buff.setIndentLevel(-2, relative=True)
# set parameters that need updating every frame
# do any params need updating? (this method inherited from _base)
if self.checkNeedToUpdate('set every frame'):
code = "if %(name)s.status == STARTED: # only update if drawing\n"
buff.writeIndented(code % self.params)
buff.setIndentLevel(+1, relative=True) # to enter the if block
self.writeParamUpdates(buff, 'set every frame')
buff.setIndentLevel(-1, relative=True) # to exit the if block
def writeRoutineEndCode(self, buff):
super().writeRoutineEndCode(buff) # adds start/stop times to data
#if self.params['blendmode'].val!='default':
#buff.writeIndented("win.blendMode=__allEnvSaveBlendMode #clean up for %(name)s\n" %self.params)
| gpl-3.0 | -4,578,092,333,961,507,300 | 49.398693 | 194 | 0.567371 | false |
alown/chromium | crserverlib/server_dispatch_header.py | 4 | 1079 | # Copyright (c) 2001, Stanford University
# All rights reserved.
#
# See the file LICENSE.txt for information on redistributing this software.
import sys
sys.path.append( "../glapi_parser" )
import apiutil
apiutil.CopyrightC()
print """
/* DO NOT EDIT - THIS FILE AUTOMATICALLY GENERATED BY server_dispatch_header.py SCRIPT */
#ifndef SERVER_DISPATCH_HEADER
#define SERVER_DISPATCH_HEADER
#ifdef WINDOWS
#define SERVER_DISPATCH_APIENTRY __stdcall
#else
#define SERVER_DISPATCH_APIENTRY
#endif
#include "chromium.h"
#include "state/cr_statetypes.h"
"""
keys = apiutil.GetDispatchedFunctions("../glapi_parser/APIspec.txt")
for func_name in keys:
if ("get" in apiutil.Properties(func_name) or
apiutil.FindSpecial( "server", func_name ) or
apiutil.FindSpecial( "../state_tracker/state", func_name )):
params = apiutil.Parameters(func_name)
return_type = apiutil.ReturnType(func_name)
print '%s SERVER_DISPATCH_APIENTRY crServerDispatch%s( %s );' % (return_type, func_name, apiutil.MakeDeclarationString( params ))
print '#endif /* SERVER_DISPATCH_HEADER */'
| bsd-3-clause | 4,388,132,822,241,963,500 | 25.317073 | 131 | 0.742354 | false |
UnionEvoRobo/evofab | src/ann_runner.py | 1 | 2802 | from printer import Printer
from camera import Camera
from grid import Grid
from vector import Vector
from gridworld import GridWorld
from ann import Network
class AnnRunner(object):
"""Wraps up the gross reality of running a ``print'' using the printer simulation (controlled by a neural network)"""
camera_size = 3
def __init__(self, ideal_grid_path, cell_size, units_per_cell=10):
"""Sets up all the pieces needed to perform a print with the simulated 3d printer (controlled by the neural network). Takes in a path to an a ``goal'' or ``ideal'' grid, and constructs the GridWorld based on the dimensions of that goal grid. Understands both a ``camera'', which observes the actual world (around the print head) and an ``ideal camera'' which observes the same location but based on the ``goal grid''
"""
ideal_grid = Grid(path=ideal_grid_path, scale=cell_size)
self.ideal_grid = ideal_grid
self.gridworld = GridWorld(ideal_grid.width, ideal_grid.height, cell_size)
self.gridworld.set_ideal_grid(ideal_grid)
self.printer = Printer(10, 10, 9, self.gridworld, units_per_cell) #TODO: shouldn't be giving location values here when it's determined somewhere else. that smells a lot
self.camera = Camera(self.gridworld.grid, self.printer, self.camera_size)
self.ideal_camera = Camera(self.gridworld.ideal_grid, self.printer, self.camera_size)
def run(self, n, iterations=10000):
"""Runs a simulated print run with the printer simulation (controlled by an ANN. Starts the printer in the location provided by the ideal grid spec
"""
#set the printer location to the starting postition as defined by the ideal_grid spec
self.printer.set_position_on_grid(*self.gridworld.get_starting_position())
for i in xrange(iterations):
self.printer.setPenDown()
actual = self.camera.all_cell_values()
ideal = self.ideal_camera.all_cell_values()
pattern = [i - a for i,a in zip(actual, ideal)]
result = n.propagate(pattern)
result = [int(round(x)) for x in result]
result = ''.join(map(str, result))
self.printer.set_printer_direction(self.get_velocity(result[:2]), self.get_velocity(result[2:]))
self.printer.simulate()
self.update()
return (self.ideal_grid, self.gridworld.grid)
def update(self):
return
def get_velocity(self, instruction):
"""Translates between the output of the neural network and direction instructions for the printer. leftright and updown are translated separately"""
if instruction == "10":
return -1
elif instruction == "01":
return 1
else:
return 0
| gpl-2.0 | -4,218,987,848,206,655,500 | 50.888889 | 424 | 0.669165 | false |
globaltoken/globaltoken | test/functional/mempool_limit.py | 1 | 3174 | #!/usr/bin/env python3
# Copyright (c) 2014-2017 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test mempool limiting together/eviction with the wallet."""
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
class MempoolLimitTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
self.extra_args = [["-maxmempool=5", "-spendzeroconfchange=0"]]
def run_test(self):
txouts = gen_return_txouts()
relayfee = self.nodes[0].getnetworkinfo()['relayfee']
self.log.info('Check that mempoolminfee is minrelytxfee')
assert_equal(self.nodes[0].getmempoolinfo()['minrelaytxfee'], Decimal('0.00001000'))
assert_equal(self.nodes[0].getmempoolinfo()['mempoolminfee'], Decimal('0.00001000'))
txids = []
utxos = create_confirmed_utxos(relayfee, self.nodes[0], 91)
self.log.info('Create a mempool tx that will be evicted')
us0 = utxos.pop()
inputs = [{ "txid" : us0["txid"], "vout" : us0["vout"]}]
outputs = {self.nodes[0].getnewaddress() : 0.0001}
tx = self.nodes[0].createrawtransaction(inputs, outputs)
self.nodes[0].settxfee(relayfee) # specifically fund this tx with low fee
txF = self.nodes[0].fundrawtransaction(tx)
self.nodes[0].settxfee(0) # return to automatic fee selection
txFS = self.nodes[0].signrawtransactionwithwallet(txF['hex'])
txid = self.nodes[0].sendrawtransaction(txFS['hex'])
relayfee = self.nodes[0].getnetworkinfo()['relayfee']
base_fee = relayfee*100
for i in range (3):
txids.append([])
txids[i] = create_lots_of_big_transactions(self.nodes[0], txouts, utxos[30*i:30*i+30], 30, (i+1)*base_fee)
self.log.info('The tx should be evicted by now')
assert(txid not in self.nodes[0].getrawmempool())
txdata = self.nodes[0].gettransaction(txid)
assert(txdata['confirmations'] == 0) #confirmation should still be 0
self.log.info('Check that mempoolminfee is larger than minrelytxfee')
assert_equal(self.nodes[0].getmempoolinfo()['minrelaytxfee'], Decimal('0.00001000'))
assert_greater_than(self.nodes[0].getmempoolinfo()['mempoolminfee'], Decimal('0.00001000'))
self.log.info('Create a mempool tx that will not pass mempoolminfee')
us0 = utxos.pop()
inputs = [{ "txid" : us0["txid"], "vout" : us0["vout"]}]
outputs = {self.nodes[0].getnewaddress() : 0.0001}
tx = self.nodes[0].createrawtransaction(inputs, outputs)
# specifically fund this tx with a fee < mempoolminfee, >= than minrelaytxfee
txF = self.nodes[0].fundrawtransaction(tx, {'feeRate': relayfee})
txFS = self.nodes[0].signrawtransactionwithwallet(txF['hex'])
assert_raises_rpc_error(-26, "mempool min fee not met, 166 < 411 (code 66)", self.nodes[0].sendrawtransaction, txFS['hex'])
if __name__ == '__main__':
MempoolLimitTest().main()
| mit | 6,556,133,842,987,770,000 | 48.59375 | 131 | 0.65564 | false |
enthought/traitsgui | examples/file_node_tree.py | 1 | 2630 | #------------------------------------------------------------------------------
# Copyright (c) 2005, Enthought, Inc.
# All rights reserved.
#
# This software is provided without warranty under the terms of the BSD
# license included in enthought/LICENSE.txt and may be redistributed only
# under the conditions described in the aforementioned license. The license
# is also available online at http://www.enthought.com/licenses/BSD.txt
# Thanks for using Enthought open source!
#
# Author: Enthought, Inc.
# Description: <Enthought pyface package component>
#------------------------------------------------------------------------------
""" A file system tree. """
# Standard library imports.
from os import listdir
from os.path import basename, isdir, isfile, join
# Enthought library imports.
from enthought.pyface.tree.api import NodeManager, NodeType
class FileNode(NodeType):
""" Node type for files. """
###########################################################################
# 'NodeType' interface.
###########################################################################
def is_type_for(self, node):
""" Returns True if this node type recognizes a node. """
return isfile(node)
def allows_children(self, node):
""" Does the node allow children (ie. a folder vs a file). """
return False
def get_text(self, node):
""" Returns the label text for a node. """
return basename(node)
class FolderNode(NodeType):
""" Node type for folders. """
#########################################################################
# 'NodeType' interface.
#########################################################################
def is_type_for(self, node):
""" Returns True if this node type recognizes a node. """
return isdir(node)
def allows_children(self, node):
""" Does the node allow children (ie. a folder vs a file). """
return True
def has_children(self, node):
""" Returns True if a node has children, otherwise False. """
return len(listdir(node)) > 0
def get_children(self, node):
""" Returns the children of a node. """
return [join(node, filename) for filename in listdir(node)]
def get_text(self, node):
""" Returns the label text for a node. """
return basename(node)
# Add all types to the node manager.
node_manager = NodeManager()
node_manager.add_node_type(FileNode())
node_manager.add_node_type(FolderNode())
##### EOF #####################################################################
| bsd-3-clause | -4,378,289,933,988,114,400 | 29.581395 | 79 | 0.523194 | false |
vinchoi/fishplay | flask/lib/python2.7/site-packages/setuptools/ssl_support.py | 100 | 8119 | import os
import socket
import atexit
import re
from setuptools.extern.six.moves import urllib, http_client, map
import pkg_resources
from pkg_resources import ResolutionError, ExtractionError
try:
import ssl
except ImportError:
ssl = None
__all__ = [
'VerifyingHTTPSHandler', 'find_ca_bundle', 'is_available', 'cert_paths',
'opener_for'
]
cert_paths = """
/etc/pki/tls/certs/ca-bundle.crt
/etc/ssl/certs/ca-certificates.crt
/usr/share/ssl/certs/ca-bundle.crt
/usr/local/share/certs/ca-root.crt
/etc/ssl/cert.pem
/System/Library/OpenSSL/certs/cert.pem
/usr/local/share/certs/ca-root-nss.crt
""".strip().split()
try:
HTTPSHandler = urllib.request.HTTPSHandler
HTTPSConnection = http_client.HTTPSConnection
except AttributeError:
HTTPSHandler = HTTPSConnection = object
is_available = ssl is not None and object not in (HTTPSHandler, HTTPSConnection)
try:
from ssl import CertificateError, match_hostname
except ImportError:
try:
from backports.ssl_match_hostname import CertificateError
from backports.ssl_match_hostname import match_hostname
except ImportError:
CertificateError = None
match_hostname = None
if not CertificateError:
class CertificateError(ValueError):
pass
if not match_hostname:
def _dnsname_match(dn, hostname, max_wildcards=1):
"""Matching according to RFC 6125, section 6.4.3
http://tools.ietf.org/html/rfc6125#section-6.4.3
"""
pats = []
if not dn:
return False
# Ported from python3-syntax:
# leftmost, *remainder = dn.split(r'.')
parts = dn.split(r'.')
leftmost = parts[0]
remainder = parts[1:]
wildcards = leftmost.count('*')
if wildcards > max_wildcards:
# Issue #17980: avoid denials of service by refusing more
# than one wildcard per fragment. A survey of established
# policy among SSL implementations showed it to be a
# reasonable choice.
raise CertificateError(
"too many wildcards in certificate DNS name: " + repr(dn))
# speed up common case w/o wildcards
if not wildcards:
return dn.lower() == hostname.lower()
# RFC 6125, section 6.4.3, subitem 1.
# The client SHOULD NOT attempt to match a presented identifier in which
# the wildcard character comprises a label other than the left-most label.
if leftmost == '*':
# When '*' is a fragment by itself, it matches a non-empty dotless
# fragment.
pats.append('[^.]+')
elif leftmost.startswith('xn--') or hostname.startswith('xn--'):
# RFC 6125, section 6.4.3, subitem 3.
# The client SHOULD NOT attempt to match a presented identifier
# where the wildcard character is embedded within an A-label or
# U-label of an internationalized domain name.
pats.append(re.escape(leftmost))
else:
# Otherwise, '*' matches any dotless string, e.g. www*
pats.append(re.escape(leftmost).replace(r'\*', '[^.]*'))
# add the remaining fragments, ignore any wildcards
for frag in remainder:
pats.append(re.escape(frag))
pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE)
return pat.match(hostname)
def match_hostname(cert, hostname):
"""Verify that *cert* (in decoded format as returned by
SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125
rules are followed, but IP addresses are not accepted for *hostname*.
CertificateError is raised on failure. On success, the function
returns nothing.
"""
if not cert:
raise ValueError("empty or no certificate")
dnsnames = []
san = cert.get('subjectAltName', ())
for key, value in san:
if key == 'DNS':
if _dnsname_match(value, hostname):
return
dnsnames.append(value)
if not dnsnames:
# The subject is only checked when there is no dNSName entry
# in subjectAltName
for sub in cert.get('subject', ()):
for key, value in sub:
# XXX according to RFC 2818, the most specific Common Name
# must be used.
if key == 'commonName':
if _dnsname_match(value, hostname):
return
dnsnames.append(value)
if len(dnsnames) > 1:
raise CertificateError("hostname %r "
"doesn't match either of %s"
% (hostname, ', '.join(map(repr, dnsnames))))
elif len(dnsnames) == 1:
raise CertificateError("hostname %r "
"doesn't match %r"
% (hostname, dnsnames[0]))
else:
raise CertificateError("no appropriate commonName or "
"subjectAltName fields were found")
class VerifyingHTTPSHandler(HTTPSHandler):
"""Simple verifying handler: no auth, subclasses, timeouts, etc."""
def __init__(self, ca_bundle):
self.ca_bundle = ca_bundle
HTTPSHandler.__init__(self)
def https_open(self, req):
return self.do_open(
lambda host, **kw: VerifyingHTTPSConn(host, self.ca_bundle, **kw), req
)
class VerifyingHTTPSConn(HTTPSConnection):
"""Simple verifying connection: no auth, subclasses, timeouts, etc."""
def __init__(self, host, ca_bundle, **kw):
HTTPSConnection.__init__(self, host, **kw)
self.ca_bundle = ca_bundle
def connect(self):
sock = socket.create_connection(
(self.host, self.port), getattr(self, 'source_address', None)
)
# Handle the socket if a (proxy) tunnel is present
if hasattr(self, '_tunnel') and getattr(self, '_tunnel_host', None):
self.sock = sock
self._tunnel()
# http://bugs.python.org/issue7776: Python>=3.4.1 and >=2.7.7
# change self.host to mean the proxy server host when tunneling is
# being used. Adapt, since we are interested in the destination
# host for the match_hostname() comparison.
actual_host = self._tunnel_host
else:
actual_host = self.host
self.sock = ssl.wrap_socket(
sock, cert_reqs=ssl.CERT_REQUIRED, ca_certs=self.ca_bundle
)
try:
match_hostname(self.sock.getpeercert(), actual_host)
except CertificateError:
self.sock.shutdown(socket.SHUT_RDWR)
self.sock.close()
raise
def opener_for(ca_bundle=None):
"""Get a urlopen() replacement that uses ca_bundle for verification"""
return urllib.request.build_opener(
VerifyingHTTPSHandler(ca_bundle or find_ca_bundle())
).open
_wincerts = None
def get_win_certfile():
global _wincerts
if _wincerts is not None:
return _wincerts.name
try:
from wincertstore import CertFile
except ImportError:
return None
class MyCertFile(CertFile):
def __init__(self, stores=(), certs=()):
CertFile.__init__(self)
for store in stores:
self.addstore(store)
self.addcerts(certs)
atexit.register(self.close)
def close(self):
try:
super(MyCertFile, self).close()
except OSError:
pass
_wincerts = MyCertFile(stores=['CA', 'ROOT'])
return _wincerts.name
def find_ca_bundle():
"""Return an existing CA bundle path, or None"""
if os.name=='nt':
return get_win_certfile()
else:
for cert_path in cert_paths:
if os.path.isfile(cert_path):
return cert_path
try:
return pkg_resources.resource_filename('certifi', 'cacert.pem')
except (ImportError, ResolutionError, ExtractionError):
return None
| gpl-3.0 | -6,532,622,208,705,187,000 | 32.411523 | 82 | 0.595024 | false |
with-git/tensorflow | tensorflow/contrib/keras/api/keras/activations/__init__.py | 11 | 1882 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Keras built-in activation functions."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# Activation functions.
from tensorflow.contrib.keras.python.keras.activations import elu
from tensorflow.contrib.keras.python.keras.activations import hard_sigmoid
from tensorflow.contrib.keras.python.keras.activations import linear
from tensorflow.contrib.keras.python.keras.activations import relu
from tensorflow.contrib.keras.python.keras.activations import selu
from tensorflow.contrib.keras.python.keras.activations import sigmoid
from tensorflow.contrib.keras.python.keras.activations import softmax
from tensorflow.contrib.keras.python.keras.activations import softplus
from tensorflow.contrib.keras.python.keras.activations import softsign
from tensorflow.contrib.keras.python.keras.activations import tanh
# Auxiliary utils.
# pylint: disable=g-bad-import-order
from tensorflow.contrib.keras.python.keras.activations import deserialize
from tensorflow.contrib.keras.python.keras.activations import serialize
from tensorflow.contrib.keras.python.keras.activations import get
del absolute_import
del division
del print_function
| apache-2.0 | 1,427,858,077,124,444,200 | 44.902439 | 80 | 0.782147 | false |
pdellaert/ansible | lib/ansible/modules/cloud/hcloud/hcloud_server_type_info.py | 21 | 5682 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2019, Hetzner Cloud GmbH <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
"metadata_version": "1.1",
"status": ["preview"],
"supported_by": "community",
}
DOCUMENTATION = """
---
module: hcloud_server_type_info
short_description: Gather infos about the Hetzner Cloud server types.
version_added: "2.8"
description:
- Gather infos about your Hetzner Cloud server types.
- This module was called C(hcloud_server_type_facts) before Ansible 2.9, returning C(ansible_facts) and C(hcloud_server_type_facts).
Note that the M(hcloud_server_type_info) module no longer returns C(ansible_facts) and the value was renamed to C(hcloud_server_type_info)!
author:
- Lukas Kaemmerling (@LKaemmerling)
options:
id:
description:
- The ID of the server type you want to get.
type: int
name:
description:
- The name of the server type you want to get.
type: str
extends_documentation_fragment: hcloud
"""
EXAMPLES = """
- name: Gather hcloud server type infos
hcloud_server_type_info:
register: output
- name: Print the gathered infos
debug:
var: output.hcloud_server_type_info
"""
RETURN = """
hcloud_server_type_info:
description: The server type infos as list
returned: always
type: complex
contains:
id:
description: Numeric identifier of the server type
returned: always
type: int
sample: 1937415
name:
description: Name of the server type
returned: always
type: str
sample: fsn1
description:
description: Detail description of the server type
returned: always
type: str
sample: Falkenstein DC Park 1
cores:
description: Number of cpu cores a server of this type will have
returned: always
type: int
sample: 1
memory:
description: Memory a server of this type will have in GB
returned: always
type: int
sample: 1
disk:
description: Disk size a server of this type will have in GB
returned: always
type: int
sample: 25
storage_type:
description: Type of server boot drive
returned: always
type: str
sample: local
cpu_type:
description: Type of cpu
returned: always
type: str
sample: shared
"""
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_native
from ansible.module_utils.hcloud import Hcloud
try:
from hcloud import APIException
except ImportError:
pass
class AnsibleHcloudServerTypeInfo(Hcloud):
def __init__(self, module):
Hcloud.__init__(self, module, "hcloud_server_type_info")
self.hcloud_server_type_info = None
def _prepare_result(self):
tmp = []
for server_type in self.hcloud_server_type_info:
if server_type is not None:
tmp.append({
"id": to_native(server_type.id),
"name": to_native(server_type.name),
"description": to_native(server_type.description),
"cores": server_type.cores,
"memory": server_type.memory,
"disk": server_type.disk,
"storage_type": to_native(server_type.storage_type),
"cpu_type": to_native(server_type.cpu_type)
})
return tmp
def get_server_types(self):
try:
if self.module.params.get("id") is not None:
self.hcloud_server_type_info = [self.client.server_types.get_by_id(
self.module.params.get("id")
)]
elif self.module.params.get("name") is not None:
self.hcloud_server_type_info = [self.client.server_types.get_by_name(
self.module.params.get("name")
)]
else:
self.hcloud_server_type_info = self.client.server_types.get_all()
except APIException as e:
self.module.fail_json(msg=e.message)
@staticmethod
def define_module():
return AnsibleModule(
argument_spec=dict(
id={"type": "int"},
name={"type": "str"},
**Hcloud.base_module_arguments()
),
supports_check_mode=True,
)
def main():
module = AnsibleHcloudServerTypeInfo.define_module()
is_old_facts = module._name == 'hcloud_server_type_facts'
if is_old_facts:
module.deprecate("The 'hcloud_server_type_info' module has been renamed to 'hcloud_server_type_info', "
"and the renamed one no longer returns ansible_facts", version='2.13')
hcloud = AnsibleHcloudServerTypeInfo(module)
hcloud.get_server_types()
result = hcloud.get_result()
if is_old_facts:
ansible_info = {
'hcloud_server_type_info': result['hcloud_server_type_info']
}
module.exit_json(ansible_facts=ansible_info)
else:
ansible_info = {
'hcloud_server_type_info': result['hcloud_server_type_info']
}
module.exit_json(**ansible_info)
if __name__ == "__main__":
main()
| gpl-3.0 | -4,546,811,383,487,072,000 | 29.385027 | 145 | 0.580605 | false |
thulasi-ram/django-feature-toggle | docs/conf.py | 1 | 5358 | # -*- coding: utf-8 -*-
#
# Feature Toggle documentation build configuration file, created by
# sphinx-quickstart on Thu Sep 7 17:35:12 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.githubpages']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'Feature Toggle'
copyright = '2017, Damodharan Thulasiram'
author = 'Damodharan Thulasiram'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.2'
# The full version, including alpha/beta/rc tags.
release = '0.2.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# This is required for the alabaster theme
# refs: http://alabaster.readthedocs.io/en/latest/installation.html#sidebars
# html_sidebars = {
# '**': [
# 'about.html',
# 'navigation.html',
# 'relations.html', # needs 'show_related': True theme option to display
# 'searchbox.html',
# 'donate.html',
# ]
# }
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'FeatureToggledoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'FeatureToggle.tex', 'Feature Toggle Documentation',
'Damodharan Thulasiram', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'featuretoggle', 'Feature Toggle Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'FeatureToggle', 'Feature Toggle Documentation',
author, 'FeatureToggle', 'One line description of project.',
'Miscellaneous'),
]
| gpl-3.0 | 6,447,481,646,597,982,000 | 29.617143 | 81 | 0.673946 | false |
marvinpinto/charlesbot-rundeck | charlesbot_rundeck/http.py | 1 | 1032 | import asyncio
import aiohttp
import logging
log = logging.getLogger(__name__)
@asyncio.coroutine
def http_post_request(url, headers): # pragma: no cover
response = yield from aiohttp.post(url, headers=headers)
if not response.status == 200:
text = yield from response.text()
log.error("URL: %s" % url)
log.error("Response status code was %s" % str(response.status))
log.error(response.headers)
log.error(text)
response.close()
return ""
return (yield from response.text())
@asyncio.coroutine
def http_get_request(url, headers, params): # pragma: no cover
response = yield from aiohttp.get(url, headers=headers, params=params)
if not response.status == 200:
text = yield from response.text()
log.error("URL: %s" % url)
log.error("Response status code was %s" % str(response.status))
log.error(response.headers)
log.error(text)
response.close()
return ""
return (yield from response.text())
| mit | 3,398,819,411,319,134,000 | 31.25 | 74 | 0.641473 | false |
68foxboris/enigma2-openpli-vuplus | lib/python/Screens/Menu.py | 6 | 13944 | from Screen import Screen
from Screens.MessageBox import MessageBox
from Screens.ParentalControlSetup import ProtectedScreen
from Components.Sources.List import List
from Components.ActionMap import NumberActionMap, ActionMap
from Components.Sources.StaticText import StaticText
from Components.config import configfile
from Components.PluginComponent import plugins
from Components.config import config, ConfigDictionarySet, NoSave
from Components.SystemInfo import SystemInfo
from Components.Label import Label
from Tools.BoundFunction import boundFunction
from Plugins.Plugin import PluginDescriptor
from Tools.Directories import resolveFilename, SCOPE_SKIN
from enigma import eTimer
import xml.etree.cElementTree
from Screens.Setup import Setup, getSetupTitle
# read the menu
mdom = xml.etree.cElementTree.parse(resolveFilename(SCOPE_SKIN, 'menu.xml'))
class MenuUpdater:
def __init__(self):
self.updatedMenuItems = {}
def addMenuItem(self, id, pos, text, module, screen, weight):
if not self.updatedMenuAvailable(id):
self.updatedMenuItems[id] = []
self.updatedMenuItems[id].append([text, pos, module, screen, weight])
def delMenuItem(self, id, pos, text, module, screen, weight):
self.updatedMenuItems[id].remove([text, pos, module, screen, weight])
def updatedMenuAvailable(self, id):
return id in self.updatedMenuItems
def getUpdatedMenu(self, id):
return self.updatedMenuItems[id]
menuupdater = MenuUpdater()
class MenuSummary(Screen):
pass
class Menu(Screen, ProtectedScreen):
ALLOW_SUSPEND = True
def okbuttonClick(self):
self.resetNumberKey()
selection = self["menu"].getCurrent()
if selection and selection[1]:
selection[1]()
def execText(self, text):
exec text
def runScreen(self, arg):
# arg[0] is the module (as string)
# arg[1] is Screen inside this module
# plus possible arguments, as
# string (as we want to reference
# stuff which is just imported)
# FIXME. somehow
if arg[0] != "":
exec "from " + arg[0] + " import *"
self.openDialog(*eval(arg[1]))
def nothing(self): #dummy
pass
def openDialog(self, *dialog): # in every layer needed
self.session.openWithCallback(self.menuClosed, *dialog)
def openSetup(self, dialog):
self.session.openWithCallback(self.menuClosed, Setup, dialog)
def addMenu(self, destList, node):
requires = node.get("requires")
if requires:
if requires[0] == '!':
if SystemInfo.get(requires[1:], False):
return
elif not SystemInfo.get(requires, False):
return
MenuTitle = _(node.get("text", "??").encode("UTF-8"))
entryID = node.get("entryID", "undefined")
weight = node.get("weight", 50)
x = node.get("flushConfigOnClose")
if x:
a = boundFunction(self.session.openWithCallback, self.menuClosedWithConfigFlush, Menu, node)
else:
a = boundFunction(self.session.openWithCallback, self.menuClosed, Menu, node)
#TODO add check if !empty(node.childNodes)
destList.append((MenuTitle, a, entryID, weight))
def menuClosedWithConfigFlush(self, *res):
configfile.save()
self.menuClosed(*res)
def menuClosed(self, *res):
if res and res[0]:
self.close(True)
def addItem(self, destList, node):
requires = node.get("requires")
if requires:
if requires[0] == '!':
if SystemInfo.get(requires[1:], False):
return
elif not SystemInfo.get(requires, False):
return
configCondition = node.get("configcondition")
if configCondition and not eval(configCondition + ".value"):
return
item_text = node.get("text", "").encode("UTF-8")
entryID = node.get("entryID", "undefined")
weight = node.get("weight", 50)
for x in node:
if x.tag == 'screen':
module = x.get("module")
screen = x.get("screen")
if screen is None:
screen = module
# print module, screen
if module:
module = "Screens." + module
else:
module = ""
# check for arguments. they will be appended to the
# openDialog call
args = x.text or ""
screen += ", " + args
destList.append((_(item_text or "??"), boundFunction(self.runScreen, (module, screen)), entryID, weight))
return
elif x.tag == 'code':
destList.append((_(item_text or "??"), boundFunction(self.execText, x.text), entryID, weight))
return
elif x.tag == 'setup':
id = x.get("id")
if item_text == "":
item_text = _(getSetupTitle(id))
else:
item_text = _(item_text)
destList.append((item_text, boundFunction(self.openSetup, id), entryID, weight))
return
destList.append((item_text, self.nothing, entryID, weight))
def sortByName(self, listentry):
return listentry[0].lower()
def __init__(self, session, parent):
self.parentmenu = parent
Screen.__init__(self, session)
self["menu"] = List([])
self["menu"].enableWrapAround = True
self.createMenuList()
# for the skin: first try a menu_<menuID>, then Menu
self.skinName = [ ]
if self.menuID:
self.skinName.append("menu_" + self.menuID)
self.skinName.append("Menu")
ProtectedScreen.__init__(self)
self["actions"] = NumberActionMap(["OkCancelActions", "MenuActions", "NumberActions"],
{
"ok": self.okbuttonClick,
"cancel": self.closeNonRecursive,
"menu": self.closeRecursive,
"0": self.keyNumberGlobal,
"1": self.keyNumberGlobal,
"2": self.keyNumberGlobal,
"3": self.keyNumberGlobal,
"4": self.keyNumberGlobal,
"5": self.keyNumberGlobal,
"6": self.keyNumberGlobal,
"7": self.keyNumberGlobal,
"8": self.keyNumberGlobal,
"9": self.keyNumberGlobal
})
if config.usage.menu_sort_mode.value == "user":
self["EditActions"] = ActionMap(["ColorActions"],
{
"blue": self.keyBlue,
})
title = parent.get("title", "").encode("UTF-8") or None
title = title and _(title) or _(parent.get("text", "").encode("UTF-8"))
title = self.__class__.__name__ == "MenuSort" and _("Menusort (%s)") % title or title
self["title"] = StaticText(title)
self.setScreenPathMode(True)
self.setTitle(title)
self.number = 0
self.nextNumberTimer = eTimer()
self.nextNumberTimer.callback.append(self.okbuttonClick)
def createMenuList(self):
self.list = []
self.menuID = None
for x in self.parentmenu: #walk through the actual nodelist
if not x.tag:
continue
if x.tag == 'item':
item_level = int(x.get("level", 0))
if item_level <= config.usage.setup_level.index:
self.addItem(self.list, x)
count += 1
elif x.tag == 'menu':
item_level = int(x.get("level", 0))
if item_level <= config.usage.setup_level.index:
self.addMenu(self.list, x)
count += 1
elif x.tag == "id":
self.menuID = x.get("val")
count = 0
if self.menuID:
# menuupdater?
if menuupdater.updatedMenuAvailable(self.menuID):
for x in menuupdater.getUpdatedMenu(self.menuID):
if x[1] == count:
self.list.append((x[0], boundFunction(self.runScreen, (x[2], x[3] + ", ")), x[4]))
count += 1
if self.menuID:
# plugins
for l in plugins.getPluginsForMenu(self.menuID):
# check if a plugin overrides an existing menu
plugin_menuid = l[2]
for x in self.list:
if x[2] == plugin_menuid:
self.list.remove(x)
break
self.list.append((l[0], boundFunction(l[1], self.session, close=self.close), l[2], l[3] or 50))
if config.usage.menu_sort_mode.value == "user" and self.menuID == "mainmenu":
plugin_list = []
id_list = []
for l in plugins.getPlugins([PluginDescriptor.WHERE_PLUGINMENU ,PluginDescriptor.WHERE_EXTENSIONSMENU, PluginDescriptor.WHERE_EVENTINFO]):
l.id = (l.name.lower()).replace(' ','_')
if l.id not in id_list:
id_list.append(l.id)
plugin_list.append((l.name, boundFunction(l.__call__, self.session), l.id, 200))
if self.menuID is not None and config.usage.menu_sort_mode.value == "user":
self.sub_menu_sort = NoSave(ConfigDictionarySet())
self.sub_menu_sort.value = config.usage.menu_sort_weight.getConfigValue(self.menuID, "submenu") or {}
idx = 0
for x in self.list:
entry = list(self.list.pop(idx))
m_weight = self.sub_menu_sort.getConfigValue(entry[2], "sort") or entry[3]
entry.append(m_weight)
self.list.insert(idx, tuple(entry))
self.sub_menu_sort.changeConfigValue(entry[2], "sort", m_weight)
idx += 1
self.full_list = list(self.list)
if config.usage.menu_sort_mode.value == "a_z":
# Sort by Name
self.list.sort(key=self.sortByName)
elif config.usage.menu_sort_mode.value == "user":
self.hide_show_entries()
else:
# Sort by Weight
self.list.sort(key=lambda x: int(x[3]))
if config.usage.menu_show_numbers.value:
self.list = [(str(x[0] + 1) + " " +x[1][0], x[1][1], x[1][2]) for x in enumerate(self.list)]
self["menu"].updateList(self.list)
def keyNumberGlobal(self, number):
self.number = self.number * 10 + number
if self.number and self.number <= len(self["menu"].list):
self["menu"].setIndex(self.number - 1)
if len(self["menu"].list) < 10 or self.number >= 10:
self.okbuttonClick()
else:
self.nextNumberTimer.start(1500, True)
else:
self.number = 0
def resetNumberKey(self):
self.nextNumberTimer.stop()
self.number = 0
def closeNonRecursive(self):
self.resetNumberKey()
self.close(False)
def closeRecursive(self):
self.resetNumberKey()
self.close(True)
def createSummary(self):
return MenuSummary
def isProtected(self):
if config.ParentalControl.setuppinactive.value:
if config.ParentalControl.config_sections.main_menu.value and not(hasattr(self.session, 'infobar') and self.session.infobar is None):
return self.menuID == "mainmenu"
elif config.ParentalControl.config_sections.configuration.value and self.menuID == "setup":
return True
elif config.ParentalControl.config_sections.timer_menu.value and self.menuID == "timermenu":
return True
elif config.ParentalControl.config_sections.standby_menu.value and self.menuID == "shutdown":
return True
def keyBlue(self):
if config.usage.menu_sort_mode.value == "user":
self.session.openWithCallback(self.menuSortCallBack, MenuSort, self.parentmenu)
def menuSortCallBack(self, key=False):
self.createMenuList()
def keyCancel(self):
self.closeNonRecursive()
def hide_show_entries(self):
self.list = []
for entry in self.full_list:
if not self.sub_menu_sort.getConfigValue(entry[2], "hidden"):
self.list.append(entry)
if not self.list:
self.list.append(('',None,'dummy','10',10))
self.list.sort(key=lambda listweight : int(listweight[4]))
class MenuSort(Menu):
def __init__(self, session, parent):
self["key_red"] = Label(_("Exit"))
self["key_green"] = Label(_("Save changes"))
self["key_yellow"] = Label(_("Toggle show/hide"))
self["key_blue"] = Label(_("Reset order (All)"))
self.somethingChanged = False
Menu.__init__(self, session, parent)
self.skinName = "MenuSort"
self["menu"].onSelectionChanged.append(self.selectionChanged)
self["MoveActions"] = ActionMap(["WizardActions", "DirectionActions"],
{
"moveUp": boundFunction(self.moveChoosen, -1),
"moveDown": boundFunction(self.moveChoosen, +1),
}, -1
)
self["EditActions"] = ActionMap(["ColorActions"],
{
"red": self.closeMenuSort,
"green": self.keySave,
"yellow": self.keyToggleShowHide,
"blue": self.resetSortOrder,
})
self.onLayoutFinish.append(self.selectionChanged)
def isProtected(self):
return config.ParentalControl.setuppinactive.value and config.ParentalControl.config_sections.menu_sort.value
def resetSortOrder(self, key = None):
config.usage.menu_sort_weight.value = { "mainmenu" : {"submenu" : {} }}
config.usage.menu_sort_weight.save()
self.createMenuList()
def hide_show_entries(self):
self.list = list(self.full_list)
if not self.list:
self.list.append(('',None,'dummy','10',10))
self.list.sort(key=lambda listweight : int(listweight[4]))
def selectionChanged(self):
selection = self["menu"].getCurrent()[2]
if self.sub_menu_sort.getConfigValue(selection, "hidden"):
self["key_yellow"].setText(_("show"))
else:
self["key_yellow"].setText(_("hide"))
def keySave(self):
if self.somethingChanged:
i = 10
idx = 0
for x in self.list:
self.sub_menu_sort.changeConfigValue(x[2], "sort", i)
if len(x) >= 5:
entry = list(x)
entry[4] = i
entry = tuple(entry)
self.list.pop(idx)
self.list.insert(idx, entry)
i += 10
idx += 1
config.usage.menu_sort_weight.changeConfigValue(self.menuID, "submenu", self.sub_menu_sort.value)
config.usage.menu_sort_weight.save()
self.close()
def closeNonRecursive(self):
self.closeMenuSort()
def closeRecursive(self):
self.closeMenuSort()
def closeMenuSort(self):
if self.somethingChanged:
self.session.openWithCallback(self.cancelConfirm, MessageBox, _("Really close without saving settings?"))
else:
self.close()
def cancelConfirm(self, result):
if result:
config.usage.menu_sort_weight.cancel()
self.close()
def okbuttonClick(self):
self.keyToggleShowHide()
def keyToggleShowHide(self):
self.somethingChanged = True
selection = self["menu"].getCurrent()[2]
if self.sub_menu_sort.getConfigValue(selection, "hidden"):
self.sub_menu_sort.removeConfigValue(selection, "hidden")
self["key_yellow"].setText(_("hide"))
else:
self.sub_menu_sort.changeConfigValue(selection, "hidden", 1)
self["key_yellow"].setText(_("show"))
def moveChoosen(self, direction):
self.somethingChanged = True
currentIndex = self["menu"].getSelectedIndex()
swapIndex = (currentIndex + direction) % len(self["menu"].list)
self["menu"].list[currentIndex], self["menu"].list[swapIndex] = self["menu"].list[swapIndex], self["menu"].list[currentIndex]
self["menu"].updateList(self["menu"].list)
if direction > 0:
self["menu"].down()
else:
self["menu"].up()
class MainMenu(Menu):
#add file load functions for the xml-file
def __init__(self, *x):
self.skinName = "Menu"
Menu.__init__(self, *x)
| gpl-2.0 | 8,475,415,589,029,347,000 | 30.125 | 141 | 0.684022 | false |
elingg/tensorflow | tensorflow/contrib/tensor_forest/client/__init__.py | 164 | 1043 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Random forest implementation in tensorflow."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# pylint: disable=unused-import
from tensorflow.contrib.tensor_forest.client import eval_metrics
from tensorflow.contrib.tensor_forest.client import random_forest
# pylint: enable=unused-import
| apache-2.0 | -2,657,751,034,958,576,000 | 44.347826 | 80 | 0.718121 | false |
azunite/chrome_build | tests/gclient_utils_test.py | 44 | 7351 | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import StringIO
import sys
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from testing_support.super_mox import SuperMoxTestBase
from testing_support import trial_dir
import gclient_utils
import subprocess2
class GclientUtilBase(SuperMoxTestBase):
def setUp(self):
super(GclientUtilBase, self).setUp()
gclient_utils.sys.stdout.flush = lambda: None
self.mox.StubOutWithMock(subprocess2, 'Popen')
self.mox.StubOutWithMock(subprocess2, 'communicate')
class CheckCallAndFilterTestCase(GclientUtilBase):
class ProcessIdMock(object):
def __init__(self, test_string):
self.stdout = StringIO.StringIO(test_string)
self.pid = 9284
# pylint: disable=R0201
def wait(self):
return 0
def _inner(self, args, test_string):
cwd = 'bleh'
gclient_utils.sys.stdout.write(
'\n________ running \'boo foo bar\' in \'bleh\'\n')
for i in test_string:
gclient_utils.sys.stdout.write(i)
# pylint: disable=E1101
subprocess2.Popen(
args,
cwd=cwd,
stdout=subprocess2.PIPE,
stderr=subprocess2.STDOUT,
bufsize=0).AndReturn(self.ProcessIdMock(test_string))
os.getcwd()
self.mox.ReplayAll()
compiled_pattern = gclient_utils.re.compile(r'a(.*)b')
line_list = []
capture_list = []
def FilterLines(line):
line_list.append(line)
assert isinstance(line, str), type(line)
match = compiled_pattern.search(line)
if match:
capture_list.append(match.group(1))
gclient_utils.CheckCallAndFilterAndHeader(
args, cwd=cwd, always=True, filter_fn=FilterLines)
self.assertEquals(line_list, ['ahah', 'accb', 'allo', 'addb'])
self.assertEquals(capture_list, ['cc', 'dd'])
def testCheckCallAndFilter(self):
args = ['boo', 'foo', 'bar']
test_string = 'ahah\naccb\nallo\naddb\n'
self._inner(args, test_string)
self.checkstdout('\n________ running \'boo foo bar\' in \'bleh\'\n'
'ahah\naccb\nallo\naddb\n\n'
'________ running \'boo foo bar\' in \'bleh\'\nahah\naccb\nallo\naddb'
'\n')
def testNoLF(self):
# Exactly as testCheckCallAndFilterAndHeader without trailing \n
args = ['boo', 'foo', 'bar']
test_string = 'ahah\naccb\nallo\naddb'
self._inner(args, test_string)
self.checkstdout('\n________ running \'boo foo bar\' in \'bleh\'\n'
'ahah\naccb\nallo\naddb\n'
'________ running \'boo foo bar\' in \'bleh\'\nahah\naccb\nallo\naddb')
class SplitUrlRevisionTestCase(GclientUtilBase):
def testSSHUrl(self):
url = "ssh://[email protected]/test.git"
rev = "ac345e52dc"
out_url, out_rev = gclient_utils.SplitUrlRevision(url)
self.assertEquals(out_rev, None)
self.assertEquals(out_url, url)
out_url, out_rev = gclient_utils.SplitUrlRevision("%s@%s" % (url, rev))
self.assertEquals(out_rev, rev)
self.assertEquals(out_url, url)
url = "ssh://example.com/test.git"
out_url, out_rev = gclient_utils.SplitUrlRevision(url)
self.assertEquals(out_rev, None)
self.assertEquals(out_url, url)
out_url, out_rev = gclient_utils.SplitUrlRevision("%s@%s" % (url, rev))
self.assertEquals(out_rev, rev)
self.assertEquals(out_url, url)
url = "ssh://example.com/git/test.git"
out_url, out_rev = gclient_utils.SplitUrlRevision(url)
self.assertEquals(out_rev, None)
self.assertEquals(out_url, url)
out_url, out_rev = gclient_utils.SplitUrlRevision("%s@%s" % (url, rev))
self.assertEquals(out_rev, rev)
self.assertEquals(out_url, url)
rev = "test-stable"
out_url, out_rev = gclient_utils.SplitUrlRevision("%s@%s" % (url, rev))
self.assertEquals(out_rev, rev)
self.assertEquals(out_url, url)
url = "ssh://[email protected]/~/test.git"
out_url, out_rev = gclient_utils.SplitUrlRevision(url)
self.assertEquals(out_rev, None)
self.assertEquals(out_url, url)
out_url, out_rev = gclient_utils.SplitUrlRevision("%s@%s" % (url, rev))
self.assertEquals(out_rev, rev)
self.assertEquals(out_url, url)
url = "ssh://[email protected]/~username/test.git"
out_url, out_rev = gclient_utils.SplitUrlRevision(url)
self.assertEquals(out_rev, None)
self.assertEquals(out_url, url)
out_url, out_rev = gclient_utils.SplitUrlRevision("%s@%s" % (url, rev))
self.assertEquals(out_rev, rev)
self.assertEquals(out_url, url)
url = "[email protected]:dart-lang/spark.git"
out_url, out_rev = gclient_utils.SplitUrlRevision(url)
self.assertEquals(out_rev, None)
self.assertEquals(out_url, url)
out_url, out_rev = gclient_utils.SplitUrlRevision("%s@%s" % (url, rev))
self.assertEquals(out_rev, rev)
self.assertEquals(out_url, url)
def testSVNUrl(self):
url = "svn://example.com/test"
rev = "ac345e52dc"
out_url, out_rev = gclient_utils.SplitUrlRevision(url)
self.assertEquals(out_rev, None)
self.assertEquals(out_url, url)
out_url, out_rev = gclient_utils.SplitUrlRevision("%s@%s" % (url, rev))
self.assertEquals(out_rev, rev)
self.assertEquals(out_url, url)
class GClientUtilsTest(trial_dir.TestCase):
def testHardToDelete(self):
# Use the fact that tearDown will delete the directory to make it hard to do
# so.
l1 = os.path.join(self.root_dir, 'l1')
l2 = os.path.join(l1, 'l2')
l3 = os.path.join(l2, 'l3')
f3 = os.path.join(l3, 'f3')
os.mkdir(l1)
os.mkdir(l2)
os.mkdir(l3)
gclient_utils.FileWrite(f3, 'foo')
os.chmod(f3, 0)
os.chmod(l3, 0)
os.chmod(l2, 0)
os.chmod(l1, 0)
def testUpgradeToHttps(self):
values = [
['', ''],
[None, None],
['foo', 'https://foo'],
['http://foo', 'https://foo'],
['foo/', 'https://foo/'],
['ssh-svn://foo', 'ssh-svn://foo'],
['ssh-svn://foo/bar/', 'ssh-svn://foo/bar/'],
['codereview.chromium.org', 'https://codereview.chromium.org'],
['codereview.chromium.org/', 'https://codereview.chromium.org/'],
['http://foo:10000', 'http://foo:10000'],
['http://foo:10000/bar', 'http://foo:10000/bar'],
['foo:10000', 'http://foo:10000'],
['foo:', 'https://foo:'],
]
for content, expected in values:
self.assertEquals(
expected, gclient_utils.UpgradeToHttps(content))
def testParseCodereviewSettingsContent(self):
values = [
['# bleh\n', {}],
['\t# foo : bar\n', {}],
['Foo:bar', {'Foo': 'bar'}],
['Foo:bar:baz\n', {'Foo': 'bar:baz'}],
[' Foo : bar ', {'Foo': 'bar'}],
[' Foo : bar \n', {'Foo': 'bar'}],
['a:b\n\rc:d\re:f', {'a': 'b', 'c': 'd', 'e': 'f'}],
['an_url:http://value/', {'an_url': 'http://value/'}],
[
'CODE_REVIEW_SERVER : http://r/s',
{'CODE_REVIEW_SERVER': 'https://r/s'}
],
['VIEW_VC:http://r/s', {'VIEW_VC': 'https://r/s'}],
]
for content, expected in values:
self.assertEquals(
expected, gclient_utils.ParseCodereviewSettingsContent(content))
if __name__ == '__main__':
import unittest
unittest.main()
# vim: ts=2:sw=2:tw=80:et:
| bsd-3-clause | 1,080,130,835,526,816,100 | 34.341346 | 80 | 0.621956 | false |
anryko/ansible | lib/ansible/modules/cloud/azure/azure_rm_mariadbfirewallrule.py | 40 | 9856 | #!/usr/bin/python
#
# Copyright (c) 2018 Zim Kalinowski, <[email protected]>
# Copyright (c) 2019 Matti Ranta, (@techknowlogick)
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: azure_rm_mariadbfirewallrule
version_added: "2.8"
short_description: Manage MariaDB firewall rule instance
description:
- Create, update and delete instance of MariaDB firewall rule.
options:
resource_group:
description:
- The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal.
required: True
server_name:
description:
- The name of the server.
required: True
name:
description:
- The name of the MariaDB firewall rule.
required: True
start_ip_address:
description:
- The start IP address of the MariaDB firewall rule. Must be IPv4 format.
end_ip_address:
description:
- The end IP address of the MariaDB firewall rule. Must be IPv4 format.
state:
description:
- Assert the state of the MariaDB firewall rule. Use C(present) to create or update a rule and C(absent) to ensure it is not present.
default: present
choices:
- absent
- present
extends_documentation_fragment:
- azure
author:
- Zim Kalinowski (@zikalino)
- Matti Ranta (@techknowlogick)
'''
EXAMPLES = '''
- name: Create (or update) MariaDB firewall rule
azure_rm_mariadbfirewallrule:
resource_group: myResourceGroup
server_name: testserver
name: rule1
start_ip_address: 10.0.0.17
end_ip_address: 10.0.0.20
'''
RETURN = '''
id:
description:
- Resource ID.
returned: always
type: str
sample: "/subscriptions/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/resourceGroups/myResourceGroup/providers/Microsoft.DBforMariaDB/servers/testserver/fire
wallRules/rule1"
'''
import time
from ansible.module_utils.azure_rm_common import AzureRMModuleBase
try:
from msrestazure.azure_exceptions import CloudError
from msrest.polling import LROPoller
from azure.mgmt.rdbms.mariadb import MariaDBManagementClient
from msrest.serialization import Model
except ImportError:
# This is handled in azure_rm_common
pass
class Actions:
NoAction, Create, Update, Delete = range(4)
class AzureRMMariaDbFirewallRule(AzureRMModuleBase):
"""Configuration class for an Azure RM MariaDB firewall rule resource"""
def __init__(self):
self.module_arg_spec = dict(
resource_group=dict(
type='str',
required=True
),
server_name=dict(
type='str',
required=True
),
name=dict(
type='str',
required=True
),
start_ip_address=dict(
type='str'
),
end_ip_address=dict(
type='str'
),
state=dict(
type='str',
default='present',
choices=['present', 'absent']
)
)
self.resource_group = None
self.server_name = None
self.name = None
self.start_ip_address = None
self.end_ip_address = None
self.results = dict(changed=False)
self.state = None
self.to_do = Actions.NoAction
super(AzureRMMariaDbFirewallRule, self).__init__(derived_arg_spec=self.module_arg_spec,
supports_check_mode=True,
supports_tags=False)
def exec_module(self, **kwargs):
"""Main module execution method"""
for key in list(self.module_arg_spec.keys()):
if hasattr(self, key):
setattr(self, key, kwargs[key])
old_response = None
response = None
resource_group = self.get_resource_group(self.resource_group)
old_response = self.get_firewallrule()
if not old_response:
self.log("MariaDB firewall rule instance doesn't exist")
if self.state == 'absent':
self.log("Old instance didn't exist")
else:
self.to_do = Actions.Create
else:
self.log("MariaDB firewall rule instance already exists")
if self.state == 'absent':
self.to_do = Actions.Delete
elif self.state == 'present':
self.log("Need to check if MariaDB firewall rule instance has to be deleted or may be updated")
if (self.start_ip_address is not None) and (self.start_ip_address != old_response['start_ip_address']):
self.to_do = Actions.Update
if (self.end_ip_address is not None) and (self.end_ip_address != old_response['end_ip_address']):
self.to_do = Actions.Update
if (self.to_do == Actions.Create) or (self.to_do == Actions.Update):
self.log("Need to Create / Update the MariaDB firewall rule instance")
if self.check_mode:
self.results['changed'] = True
return self.results
response = self.create_update_firewallrule()
if not old_response:
self.results['changed'] = True
else:
self.results['changed'] = old_response.__ne__(response)
self.log("Creation / Update done")
elif self.to_do == Actions.Delete:
self.log("MariaDB firewall rule instance deleted")
self.results['changed'] = True
if self.check_mode:
return self.results
self.delete_firewallrule()
# make sure instance is actually deleted, for some Azure resources, instance is hanging around
# for some time after deletion -- this should be really fixed in Azure
while self.get_firewallrule():
time.sleep(20)
else:
self.log("MariaDB firewall rule instance unchanged")
self.results['changed'] = False
response = old_response
if response:
self.results["id"] = response["id"]
return self.results
def create_update_firewallrule(self):
'''
Creates or updates MariaDB firewall rule with the specified configuration.
:return: deserialized MariaDB firewall rule instance state dictionary
'''
self.log("Creating / Updating the MariaDB firewall rule instance {0}".format(self.name))
try:
response = self.mariadb_client.firewall_rules.create_or_update(resource_group_name=self.resource_group,
server_name=self.server_name,
firewall_rule_name=self.name,
start_ip_address=self.start_ip_address,
end_ip_address=self.end_ip_address)
if isinstance(response, LROPoller):
response = self.get_poller_result(response)
except CloudError as exc:
self.log('Error attempting to create the MariaDB firewall rule instance.')
self.fail("Error creating the MariaDB firewall rule instance: {0}".format(str(exc)))
return response.as_dict()
def delete_firewallrule(self):
'''
Deletes specified MariaDB firewall rule instance in the specified subscription and resource group.
:return: True
'''
self.log("Deleting the MariaDB firewall rule instance {0}".format(self.name))
try:
response = self.mariadb_client.firewall_rules.delete(resource_group_name=self.resource_group,
server_name=self.server_name,
firewall_rule_name=self.name)
except CloudError as e:
self.log('Error attempting to delete the MariaDB firewall rule instance.')
self.fail("Error deleting the MariaDB firewall rule instance: {0}".format(str(e)))
return True
def get_firewallrule(self):
'''
Gets the properties of the specified MariaDB firewall rule.
:return: deserialized MariaDB firewall rule instance state dictionary
'''
self.log("Checking if the MariaDB firewall rule instance {0} is present".format(self.name))
found = False
try:
response = self.mariadb_client.firewall_rules.get(resource_group_name=self.resource_group,
server_name=self.server_name,
firewall_rule_name=self.name)
found = True
self.log("Response : {0}".format(response))
self.log("MariaDB firewall rule instance : {0} found".format(response.name))
except CloudError as e:
self.log('Did not find the MariaDB firewall rule instance.')
if found is True:
return response.as_dict()
return False
def main():
"""Main execution"""
AzureRMMariaDbFirewallRule()
if __name__ == '__main__':
main()
| gpl-3.0 | -4,423,914,319,902,104,000 | 34.581227 | 152 | 0.567573 | false |
jramos/p2pool | SOAPpy/GSIServer.py | 289 | 5238 | from __future__ import nested_scopes
"""
GSIServer - Contributed by Ivan R. Judson <[email protected]>
################################################################################
#
# SOAPpy - Cayce Ullman ([email protected])
# Brian Matthews ([email protected])
# Gregory Warnes ([email protected])
# Christopher Blunck ([email protected])
#
################################################################################
# Copyright (c) 2003, Pfizer
# Copyright (c) 2001, Cayce Ullman.
# Copyright (c) 2001, Brian Matthews.
#
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# Neither the name of actzero, inc. nor the names of its contributors may
# be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
################################################################################
"""
ident = '$Id: GSIServer.py 1468 2008-05-24 01:55:33Z warnes $'
from version import __version__
#import xml.sax
import re
import socket
import sys
import SocketServer
from types import *
import BaseHTTPServer
# SOAPpy modules
from Parser import parseSOAPRPC
from Config import SOAPConfig
from Types import faultType, voidType, simplify
from NS import NS
from SOAPBuilder import buildSOAP
from Utilities import debugHeader, debugFooter
try: from M2Crypto import SSL
except: pass
#####
from Server import *
from pyGlobus.io import GSITCPSocketServer, ThreadingGSITCPSocketServer
from pyGlobus import ioc
def GSIConfig():
config = SOAPConfig()
config.channel_mode = ioc.GLOBUS_IO_SECURE_CHANNEL_MODE_GSI_WRAP
config.delegation_mode = ioc.GLOBUS_IO_SECURE_DELEGATION_MODE_FULL_PROXY
config.tcpAttr = None
config.authMethod = "_authorize"
return config
Config = GSIConfig()
class GSISOAPServer(GSITCPSocketServer, SOAPServerBase):
def __init__(self, addr = ('localhost', 8000),
RequestHandler = SOAPRequestHandler, log = 0,
encoding = 'UTF-8', config = Config, namespace = None):
# Test the encoding, raising an exception if it's not known
if encoding != None:
''.encode(encoding)
self.namespace = namespace
self.objmap = {}
self.funcmap = {}
self.encoding = encoding
self.config = config
self.log = log
self.allow_reuse_address= 1
GSITCPSocketServer.__init__(self, addr, RequestHandler,
self.config.channel_mode,
self.config.delegation_mode,
tcpAttr = self.config.tcpAttr)
def get_request(self):
sock, addr = GSITCPSocketServer.get_request(self)
return sock, addr
class ThreadingGSISOAPServer(ThreadingGSITCPSocketServer, SOAPServerBase):
def __init__(self, addr = ('localhost', 8000),
RequestHandler = SOAPRequestHandler, log = 0,
encoding = 'UTF-8', config = Config, namespace = None):
# Test the encoding, raising an exception if it's not known
if encoding != None:
''.encode(encoding)
self.namespace = namespace
self.objmap = {}
self.funcmap = {}
self.encoding = encoding
self.config = config
self.log = log
self.allow_reuse_address= 1
ThreadingGSITCPSocketServer.__init__(self, addr, RequestHandler,
self.config.channel_mode,
self.config.delegation_mode,
tcpAttr = self.config.tcpAttr)
def get_request(self):
sock, addr = ThreadingGSITCPSocketServer.get_request(self)
return sock, addr
| gpl-3.0 | 8,312,064,694,743,674,000 | 35.629371 | 80 | 0.613402 | false |
atmark-techno/atmark-dist | user/python/Lib/knee.py | 4 | 3522 | """An Python re-implementation of hierarchical module import.
This code is intended to be read, not executed. However, it does work
-- all you need to do to enable it is "import knee".
(The name is a pun on the klunkier predecessor of this module, "ni".)
"""
import sys, imp, __builtin__, string
# Replacement for __import__()
def import_hook(name, globals=None, locals=None, fromlist=None):
parent = determine_parent(globals)
q, tail = find_head_package(parent, name)
m = load_tail(q, tail)
if not fromlist:
return q
if hasattr(m, "__path__"):
ensure_fromlist(m, fromlist)
return m
def determine_parent(globals):
if not globals or not globals.has_key("__name__"):
return None
pname = globals['__name__']
if globals.has_key("__path__"):
parent = sys.modules[pname]
assert globals is parent.__dict__
return parent
if '.' in pname:
i = string.rfind(pname, '.')
pname = pname[:i]
parent = sys.modules[pname]
assert parent.__name__ == pname
return parent
return None
def find_head_package(parent, name):
if '.' in name:
i = string.find(name, '.')
head = name[:i]
tail = name[i+1:]
else:
head = name
tail = ""
if parent:
qname = "%s.%s" % (parent.__name__, head)
else:
qname = head
q = import_module(head, qname, parent)
if q: return q, tail
if parent:
qname = head
parent = None
q = import_module(head, qname, parent)
if q: return q, tail
raise ImportError, "No module named " + qname
def load_tail(q, tail):
m = q
while tail:
i = string.find(tail, '.')
if i < 0: i = len(tail)
head, tail = tail[:i], tail[i+1:]
mname = "%s.%s" % (m.__name__, head)
m = import_module(head, mname, m)
if not m:
raise ImportError, "No module named " + mname
return m
def ensure_fromlist(m, fromlist, recursive=0):
for sub in fromlist:
if sub == "*":
if not recursive:
try:
all = m.__all__
except AttributeError:
pass
else:
ensure_fromlist(m, all, 1)
continue
if sub != "*" and not hasattr(m, sub):
subname = "%s.%s" % (m.__name__, sub)
submod = import_module(sub, subname, m)
if not submod:
raise ImportError, "No module named " + subname
def import_module(partname, fqname, parent):
try:
return sys.modules[fqname]
except KeyError:
pass
try:
fp, pathname, stuff = imp.find_module(partname,
parent and parent.__path__)
except ImportError:
return None
try:
m = imp.load_module(fqname, fp, pathname, stuff)
finally:
if fp: fp.close()
if parent:
setattr(parent, partname, m)
return m
# Replacement for reload()
def reload_hook(module):
name = module.__name__
if '.' not in name:
return import_module(name, name, None)
i = string.rfind(name, '.')
pname = name[:i]
parent = sys.modules[pname]
return import_module(name[i+1:], name, parent)
# Save the original hooks
original_import = __builtin__.__import__
original_reload = __builtin__.reload
# Now install our hooks
__builtin__.__import__ = import_hook
__builtin__.reload = reload_hook
| gpl-2.0 | -7,560,908,602,098,218,000 | 26.952381 | 73 | 0.549972 | false |
google/telluride_decoding | test/scaled_lda_test.py | 1 | 6747 | # Copyright 2020 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Test for telluride_decoding.scaled_lda."""
import os
from absl.testing import absltest
import matplotlib.pyplot as plt
import numpy as np
from telluride_decoding import scaled_lda
class ScaledLdaTest(absltest.TestCase):
def test_one_dimensional_data(self):
num_points = 1000
d1 = np.random.randn(num_points,) - 5
d2 = np.random.randn(num_points,) + 5
lda = scaled_lda.ScaledLinearDiscriminantAnalysis()
lda.fit_two_classes(d1, d2)
d1_transformed = lda.transform(d1)
self.assertAlmostEqual(np.mean(d1_transformed), 0)
d2_transformed = lda.transform(d2)
self.assertAlmostEqual(np.mean(d2_transformed), 1)
def test_two_dimensional_data(self):
num_points = 1000
num_dims = 2
d1 = np.matmul(np.random.randn(num_points, num_dims),
[[2, 0], [0, 0.5]]) + [-2, 1]
d2 = np.matmul(np.random.randn(num_points, num_dims),
[[2, 0], [0, 0.5]]) + [2, -1]
# Plot the original data.
plt.clf()
plt.subplot(2, 1, 1)
plt.plot(d1[:, 0], d1[:, 1], 'rx')
plt.plot(d2[:, 0], d2[:, 1], 'bo')
plt.title('Original Data')
x = np.concatenate((d1, d2), axis=0)
y = np.concatenate((np.ones(d1.shape[0])*42,
np.ones(d2.shape[0])*-12))
lda = scaled_lda.LinearDiscriminantAnalysis()
with self.assertRaisesRegex(
ValueError, 'Must fit the model before transforming.'):
lda.transform(d1)
with self.assertRaisesRegex(
ValueError, 'Must fit the model before transforming.'):
lda.explained_variance_ratio()
x_lda = lda.fit_transform(x, y)
labels = lda.labels
self.assertLen(labels, 2)
# Plot the transformed data.
plt.subplot(2, 1, 2)
plt.plot(x_lda[y == labels[0], 0], x_lda[y == labels[0], 1], 'rx')
plt.plot(x_lda[y == labels[1], 0], x_lda[y == labels[1], 1], 'bo')
plt.title('Transfomed Data')
# Make sure the transformed centers are symmetric on the first (x) axis.
mean_vectors = [np.reshape(v, (1, -1)) for v in lda.mean_vectors]
centers = lda.transform(np.concatenate(mean_vectors, axis=0))
print('Transformed centers are:', centers)
self.assertAlmostEqual(centers[0, 0], -centers[1, 0], delta=0.1)
np.testing.assert_allclose(centers[:, 1], [0., 0.], atol=0.1)
plt.savefig(os.path.join(os.environ.get('TMPDIR') or '/tmp',
'scaled_lda.png'))
with self.assertRaisesRegex(
TypeError, 'Inconsistent training and transform sizes'):
lda.transform(d1[:, 0:1])
# Now test model from saved parameters
nlda = scaled_lda.LinearDiscriminantAnalysis()
nlda.model_parameters = lda.model_parameters # Get/set parameters test
centers = nlda.transform(np.concatenate(mean_vectors, axis=0))
self.assertAlmostEqual(centers[0, 0], -centers[1, 0], delta=0.1)
np.testing.assert_allclose(centers[:, 1], [0., 0.], atol=0.1)
def test_fitted_data(self):
"""Makes sure we can generate a fitted model with .from_fitted_data.
"""
num_points = 1000
num_dims = 2
d1 = np.matmul(np.random.randn(num_points, num_dims),
[[2, 0], [0, 0.5]]) + [-2, 1]
d2 = np.matmul(np.random.randn(num_points, num_dims),
[[2, 0], [0, 0.5]]) + [2, -1]
x = np.concatenate((d1, d2), axis=0)
y = np.concatenate((np.ones(d1.shape[0])*42,
np.ones(d2.shape[0])*-12))
lda = scaled_lda.LinearDiscriminantAnalysis.from_fitted_data(x, y)
explained = lda.explained_variance_ratio()
np.testing.assert_allclose(explained, [1., 0.], atol=1e-8)
def test_three_class_data(self):
num_points = 1000
num_dims = 2
d1 = np.matmul(np.random.randn(num_points, num_dims),
[[2, 0], [0, 0.5]]) + [-2, 1]
d2 = np.matmul(np.random.randn(num_points, num_dims),
[[2, 0], [0, 0.5]]) + [2, -1]
d3 = np.matmul(np.random.randn(num_points, num_dims),
[[2, 0], [0, 0.5]])
x = np.concatenate((d1, d2, d3), axis=0)
y = np.concatenate((np.ones(d1.shape[0])*42,
np.ones(d2.shape[0])*-12,
np.ones(d3.shape[0])))
lda = scaled_lda.LinearDiscriminantAnalysis()
x_lda = lda.fit_transform(x, y)
self.assertEqual(x_lda.shape[0], 3*num_points)
self.assertEqual(x_lda.shape[1], 2) # Only two dimensional data.
labels = lda.labels
self.assertLen(labels, 3)
def test_four_dimensional_data(self):
num_points = 1000
num_dims = 4
center = np.array([-2, 1, 3, 2]) # Arbitrary
m1 = np.random.randn(num_points, num_dims) + center
m2 = np.random.randn(num_points, num_dims) + -center
x = np.concatenate((m1, m2), axis=0)
y = np.concatenate((np.ones(m1.shape[0])*0,
np.ones(m2.shape[0])*1.0))
slda = scaled_lda.ScaledLinearDiscriminantAnalysis()
slda.fit_two_classes(m1, m2)
m_lda = slda.transform(x)
self.assertEqual(m_lda.shape, (2*num_points, 2))
self.assertEqual(slda.coef_array.shape[0], num_dims)
self.assertLen(slda.labels, slda.coef_array.shape[1])
mean_vectors = [np.reshape(v, (1, -1)) for v in slda.mean_vectors]
centers = slda.transform(np.concatenate(mean_vectors, axis=0))[:, 0]
np.testing.assert_allclose(centers, [0., 1.0], atol=1e-8)
explained = slda.explained_variance_ratio()
np.testing.assert_allclose(explained, [1., 0., 0., 0.], atol=1e-8)
# Now test save and restoring parameters.
param_dict = slda.model_parameters
nlda = scaled_lda.ScaledLinearDiscriminantAnalysis()
nlda.model_parameters = param_dict
mean_vectors = [np.reshape(v, (1, -1)) for v in nlda.mean_vectors]
centers = nlda.transform(np.concatenate(mean_vectors, axis=0))[:, 0]
np.testing.assert_allclose(centers, [0., 1.0], atol=1e-8)
# Make sure we fail with more than two classes.
with self.assertRaisesRegex(
ValueError, 'Scaled LDA can only be done on two-class data.'):
y[0:2] = 42
slda.fit_transform(x, y)
if __name__ == '__main__':
absltest.main()
| apache-2.0 | -3,103,978,920,472,958,500 | 34.698413 | 80 | 0.615977 | false |
mrbandrews/bitcoin | qa/rpc-tests/receivedby.py | 8 | 7385 | #!/usr/bin/env python
# Copyright (c) 2014 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
# Exercise the listreceivedbyaddress API
from test_framework import BitcoinTestFramework
from bitcoinrpc.authproxy import AuthServiceProxy, JSONRPCException
from util import *
def get_sub_array_from_array(object_array, to_match):
'''
Finds and returns a sub array from an array of arrays.
to_match should be a unique idetifier of a sub array
'''
num_matched = 0
for item in object_array:
all_match = True
for key,value in to_match.items():
if item[key] != value:
all_match = False
if not all_match:
continue
return item
return []
def check_array_result(object_array, to_match, expected, should_not_find = False):
"""
Pass in array of JSON objects, a dictionary with key/value pairs
to match against, and another dictionary with expected key/value
pairs.
If the should_not_find flag is true, to_match should not be found in object_array
"""
if should_not_find == True:
expected = { }
num_matched = 0
for item in object_array:
all_match = True
for key,value in to_match.items():
if item[key] != value:
all_match = False
if not all_match:
continue
for key,value in expected.items():
if item[key] != value:
raise AssertionError("%s : expected %s=%s"%(str(item), str(key), str(value)))
num_matched = num_matched+1
if num_matched == 0 and should_not_find != True:
raise AssertionError("No objects matched %s"%(str(to_match)))
if num_matched > 0 and should_not_find == True:
raise AssertionError("Objects was matched %s"%(str(to_match)))
class ReceivedByTest(BitcoinTestFramework):
def run_test(self):
'''
listreceivedbyaddress Test
'''
# Send from node 0 to 1
addr = self.nodes[1].getnewaddress()
txid = self.nodes[0].sendtoaddress(addr, 0.1)
self.sync_all()
#Check not listed in listreceivedbyaddress because has 0 confirmations
check_array_result(self.nodes[1].listreceivedbyaddress(),
{"address":addr},
{ },
True)
#Bury Tx under 10 block so it will be returned by listreceivedbyaddress
self.nodes[1].setgenerate(True, 10)
self.sync_all()
check_array_result(self.nodes[1].listreceivedbyaddress(),
{"address":addr},
{"address":addr, "account":"", "amount":Decimal("0.1"), "confirmations":10, "txids":[txid,]})
#With min confidence < 10
check_array_result(self.nodes[1].listreceivedbyaddress(5),
{"address":addr},
{"address":addr, "account":"", "amount":Decimal("0.1"), "confirmations":10, "txids":[txid,]})
#With min confidence > 10, should not find Tx
check_array_result(self.nodes[1].listreceivedbyaddress(11),{"address":addr},{ },True)
#Empty Tx
addr = self.nodes[1].getnewaddress()
check_array_result(self.nodes[1].listreceivedbyaddress(0,True),
{"address":addr},
{"address":addr, "account":"", "amount":0, "confirmations":0, "txids":[]})
'''
getreceivedbyaddress Test
'''
# Send from node 0 to 1
addr = self.nodes[1].getnewaddress()
txid = self.nodes[0].sendtoaddress(addr, 0.1)
self.sync_all()
#Check balance is 0 because of 0 confirmations
balance = self.nodes[1].getreceivedbyaddress(addr)
if balance != Decimal("0.0"):
raise AssertionError("Wrong balance returned by getreceivedbyaddress, %0.2f"%(balance))
#Check balance is 0.1
balance = self.nodes[1].getreceivedbyaddress(addr,0)
if balance != Decimal("0.1"):
raise AssertionError("Wrong balance returned by getreceivedbyaddress, %0.2f"%(balance))
#Bury Tx under 10 block so it will be returned by the default getreceivedbyaddress
self.nodes[1].setgenerate(True, 10)
self.sync_all()
balance = self.nodes[1].getreceivedbyaddress(addr)
if balance != Decimal("0.1"):
raise AssertionError("Wrong balance returned by getreceivedbyaddress, %0.2f"%(balance))
'''
listreceivedbyaccount + getreceivedbyaccount Test
'''
#set pre-state
addrArr = self.nodes[1].getnewaddress()
account = self.nodes[1].getaccount(addrArr)
received_by_account_json = get_sub_array_from_array(self.nodes[1].listreceivedbyaccount(),{"account":account})
if len(received_by_account_json) == 0:
raise AssertionError("No accounts found in node")
balance_by_account = rec_by_accountArr = self.nodes[1].getreceivedbyaccount(account)
txid = self.nodes[0].sendtoaddress(addr, 0.1)
# listreceivedbyaccount should return received_by_account_json because of 0 confirmations
check_array_result(self.nodes[1].listreceivedbyaccount(),
{"account":account},
received_by_account_json)
# getreceivedbyaddress should return same balance because of 0 confirmations
balance = self.nodes[1].getreceivedbyaccount(account)
if balance != balance_by_account:
raise AssertionError("Wrong balance returned by getreceivedbyaccount, %0.2f"%(balance))
self.nodes[1].setgenerate(True, 10)
self.sync_all()
# listreceivedbyaccount should return updated account balance
check_array_result(self.nodes[1].listreceivedbyaccount(),
{"account":account},
{"account":received_by_account_json["account"], "amount":(received_by_account_json["amount"] + Decimal("0.1"))})
# getreceivedbyaddress should return updates balance
balance = self.nodes[1].getreceivedbyaccount(account)
if balance != balance_by_account + Decimal("0.1"):
raise AssertionError("Wrong balance returned by getreceivedbyaccount, %0.2f"%(balance))
#Create a new account named "mynewaccount" that has a 0 balance
self.nodes[1].getaccountaddress("mynewaccount")
received_by_account_json = get_sub_array_from_array(self.nodes[1].listreceivedbyaccount(0,True),{"account":"mynewaccount"})
if len(received_by_account_json) == 0:
raise AssertionError("No accounts found in node")
# Test includeempty of listreceivedbyaccount
if received_by_account_json["amount"] != Decimal("0.0"):
raise AssertionError("Wrong balance returned by listreceivedbyaccount, %0.2f"%(received_by_account_json["amount"]))
# Test getreceivedbyaccount for 0 amount accounts
balance = self.nodes[1].getreceivedbyaccount("mynewaccount")
if balance != Decimal("0.0"):
raise AssertionError("Wrong balance returned by getreceivedbyaccount, %0.2f"%(balance))
if __name__ == '__main__':
ReceivedByTest().main()
| mit | 8,158,783,455,221,258,000 | 43.487952 | 139 | 0.616655 | false |
ATIX-AG/ansible | lib/ansible/modules/network/interface/net_interface.py | 58 | 3658 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2017, Ansible by Red Hat, inc
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'network'}
DOCUMENTATION = """
---
module: net_interface
version_added: "2.4"
author: "Ganesh Nalawade (@ganeshrn)"
short_description: Manage Interface on network devices
description:
- This module provides declarative management of Interfaces
on network devices.
options:
name:
description:
- Name of the Interface.
required: true
description:
description:
- Description of Interface.
enabled:
description:
- Configure interface link status.
speed:
description:
- Interface link speed.
mtu:
description:
- Maximum size of transmit packet.
duplex:
description:
- Interface link status
default: auto
choices: ['full', 'half', 'auto']
tx_rate:
description:
- Transmit rate in bits per second (bps).
- This is state check parameter only.
- Supports conditionals, see L(Conditionals in Networking Modules,../network/user_guide/network_working_with_command_output.html)
rx_rate:
description:
- Receiver rate in bits per second (bps).
- This is state check parameter only.
- Supports conditionals, see L(Conditionals in Networking Modules,../network/user_guide/network_working_with_command_output.html)
delay:
description:
- Time in seconds to wait before checking for the operational state on remote
device. This wait is applicable for operational state argument which are
I(state) with values C(up)/C(down), I(tx_rate) and I(rx_rate).
default: 10
aggregate:
description: List of Interfaces definitions.
purge:
description:
- Purge Interfaces not defined in the aggregate parameter.
This applies only for logical interface.
default: no
state:
description:
- State of the Interface configuration, C(up) indicates present and
operationally up and C(down) indicates present and operationally C(down)
default: present
choices: ['present', 'absent', 'up', 'down']
"""
EXAMPLES = """
- name: configure interface
net_interface:
name: ge-0/0/1
description: test-interface
- name: remove interface
net_interface:
name: ge-0/0/1
state: absent
- name: make interface up
net_interface:
name: ge-0/0/1
description: test-interface
enabled: True
- name: make interface down
net_interface:
name: ge-0/0/1
description: test-interface
enabled: False
- name: Create interface using aggregate
net_interface:
aggregate:
- { name: ge-0/0/1, description: test-interface-1 }
- { name: ge-0/0/2, description: test-interface-2 }
speed: 1g
duplex: full
mtu: 512
- name: Delete interface using aggregate
junos_interface:
aggregate:
- { name: ge-0/0/1 }
- { name: ge-0/0/2 }
state: absent
- name: Check intent arguments
net_interface:
name: fxp0
state: up
tx_rate: ge(0)
rx_rate: le(0)
- name: Config + intent
net_interface:
name: fxp0
enabled: False
state: down
"""
RETURN = """
commands:
description: The list of configuration mode commands to send to the device.
returned: always, except for the platforms that use Netconf transport to manage the device.
type: list
sample:
- interface 20
- name test-interface
"""
| gpl-3.0 | 3,942,301,023,897,405,000 | 25.316547 | 135 | 0.663477 | false |
palmerjh/iEBE | PlayGround/job-2/iSS/for_paraview/lib152/DataSetAttr.py | 9 | 1758 | #!/usr/bin/env python
"""
Copyright 2001 Pearu Peterson all rights reserved,
Pearu Peterson <[email protected]>
Permission to use, modify, and distribute this software is given under the
terms of the LGPL. See http://www.fsf.org
NO WARRANTY IS EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK.
$Revision: 1.1 $
$Date: 2001-05-20 12:51:29 $
Pearu Peterson
"""
__version__ = "$Id: DataSetAttr.py,v 1.1 2001-05-20 12:51:29 pearu Exp $"
import common
import string
class DataSetAttr(common.Common):
"""Abstract class for VTK data."""
counters = {}
default_value = 0
def _get_default_name(self):
n = self.__class__.__name__
try:
self.counters[n] = self.counters[n] + 1
except KeyError:
self.counters[n] = 0
return self.__class__.__name__+str(self.counters[n])
def _get_name(self,name):
if name is None:
name = self._get_default_name()
self.warning('Using name=%s'%(`name`))
return name
if common.is_string(name):
name = string.replace(string.strip(name),' ','_')
#name = name.strip().replace(' ','_')
if name:
return name
raise ValueError,'name=%s must be non-empty string'%(`name`)
def _get_lookup_table(self,name):
if name is None:
name = 'default'
self.warning('Using lookup_table=%s'%(`name`))
return name
if common.is_string(name):
name = string.replace(string.strip(name),' ','_')
#name = name.strip().replace(' ','_')
if name:
return name
raise ValueError,'lookup_table=%s must be nonempty string'%(`name`)
if __name__ == "__main__":
pass
| gpl-3.0 | -4,663,415,652,427,356,000 | 30.963636 | 75 | 0.564278 | false |
PierreBizouard/pizco-utils | pizcoutils/helpers/BuildExeStartup.py | 1 | 2974 | #------------------------------------------------------------------------------
# BuildExeStartup.py
# Initialization script for cx_Freeze which manipulates the path so that the
# directory in which the executable is found is searched for extensions but
# no other directory is searched. It also sets the attribute sys.frozen so that
# the Win32 extensions behave as expected.
#------------------------------------------------------------------------------
import os
import sys
subpath = "bin"
# if trap for frozen script wrapping
base_path = os.path.join(os.path.dirname(sys.executable),subpath)
sys.path.insert(0,base_path+'\\library.zip')
sys.path.insert(0,base_path)
os.environ['MATPLOTLIBDATA'] = os.path.join(os.path.dirname(sys.executable),subpath+'\\mpl-data')
import zipimport
sys.frozen = True
sys.path = sys.path[:2]
#print "IPython can require the zip_imp utils // patching qt_loaders allows this"
#from zip_imp import patch
#patch()
#However it does work so we end monkey patching qt loading
from helpers.LogUtils import *
if (os.path.isdir(DIR_NAME+"\\IPython") or os.path.isdir(base_path+"\\IPython")):
debug("monkey patching ipython")
os.environ["IPYTHONDIR"] = base_path
from IPython.external import qt_loaders
from IPython.external.qt_loaders import *
def new_load_qt(api_option):
loaders = {QT_API_PYSIDE: import_pyside,
QT_API_PYQT: import_pyqt4,
QT_API_PYQTv1: partial(import_pyqt4, version=1),
QT_API_PYQT_DEFAULT: partial(import_pyqt4, version=None)
}
api = loaded_api()
result = loaders[api]()
api = result[-1] # changed if api = QT_API_PYQT_DEFAULT
commit_api(loaded_api())
return result
qt_loaders.load_qt = new_load_qt
os.environ["TCL_LIBRARY"] = os.path.join(DIR_NAME, "tcl")
os.environ["TK_LIBRARY"] = os.path.join(DIR_NAME, "tk")
#Enforce sip vars version on loading
if (os.path.isfile(DIR_NAME+"\\QtGui4.dll") or os.path.isfile(base_path+"\\QtGui4.dll")):
debug("setting sip to v2")
#perform qt4 rthook like pyinstaller
import sip
sip.setapi(u'QDate', 2)
sip.setapi(u'QDateTime', 2)
sip.setapi(u'QString', 2)
sip.setapi(u'QTextStream', 2)
sip.setapi(u'QTime', 2)
sip.setapi(u'QUrl', 2)
sip.setapi(u'QVariant', 2)
m = __import__("__main__")
importer = zipimport.zipimporter(INITSCRIPT_ZIP_FILE_NAME)
if INITSCRIPT_ZIP_FILE_NAME != SHARED_ZIP_FILE_NAME:
moduleName = m.__name__
else:
name, ext = os.path.splitext(os.path.basename(os.path.normcase(FILE_NAME)))
moduleName = "%s__main__" % name
code = importer.get_code(moduleName)
exec(code, m.__dict__)
versionInfo = sys.version_info[:3]
if versionInfo >= (2, 5, 0) and versionInfo <= (2, 6, 4):
module = sys.modules.get("threading")
if module is not None:
module._shutdown()
| bsd-3-clause | 2,568,392,606,868,377,000 | 33.404762 | 97 | 0.616342 | false |
pajlada/pajbot | pajbot/apiwrappers/base.py | 1 | 3753 | import logging
from urllib.parse import quote, urlparse, urlunparse
import datetime
from requests import Session
from pajbot import constants
from pajbot.apiwrappers.response_cache import APIResponseCache
log = logging.getLogger(__name__)
class BaseAPI:
def __init__(self, base_url, redis=None):
self.base_url = base_url
self.session = Session()
self.timeout = 20
# e.g. pajbot1/1.35
self.session.headers["User-Agent"] = f"pajbot/{constants.VERSION}"
if redis is not None:
self.cache = APIResponseCache(redis)
@staticmethod
def quote_path_param(param):
return quote(param, safe="")
@staticmethod
def fill_in_url_scheme(url, default_scheme="https"):
"""Fill in the scheme part of a given URL string, e.g.
with given inputs of url = "//example.com/abc" and
default_scheme="https", the output would be
"https://example.com/abc"
If the given input URL already has a scheme, the scheme is not altered.
"""
parsed_template = urlparse(url, scheme=default_scheme)
return urlunparse(parsed_template)
@staticmethod
def parse_datetime(datetime_str):
"""Parses date strings in the format of 2015-09-11T23:01:11Z
to a tz-aware datetime object."""
naive_dt = datetime.datetime.strptime(datetime_str, "%Y-%m-%dT%H:%M:%SZ")
return naive_dt.replace(tzinfo=datetime.timezone.utc)
@staticmethod
def join_base_and_list(base, path_segments):
url = base
for path_segment in path_segments:
# str(endpoint) so numbers can be used as path segments too
url = BaseAPI.join_base_and_string(url, BaseAPI.quote_path_param(str(path_segment)))
return url
@staticmethod
def join_base_and_string(base, endpoint):
base = base.rstrip("/")
endpoint = endpoint.lstrip("/")
return base + "/" + endpoint
@staticmethod
def join_base_and_endpoint(base, endpoint):
# For use cases with no base and absolute endpoint URLs
if base is None:
return endpoint
if isinstance(endpoint, list):
return BaseAPI.join_base_and_list(base, endpoint)
else:
return BaseAPI.join_base_and_string(base, endpoint)
def request(self, method, endpoint, params, headers, json=None, **request_options):
full_url = self.join_base_and_endpoint(self.base_url, endpoint)
response = self.session.request(
method, full_url, params=params, headers=headers, json=json, timeout=self.timeout, **request_options
)
response.raise_for_status()
return response
def get(self, endpoint, params=None, headers=None, **request_options):
return self.request("GET", endpoint, params, headers, **request_options).json()
def get_response(self, endpoint, params=None, headers=None, **request_options):
return self.request("GET", endpoint, params, headers, **request_options)
def get_binary(self, endpoint, params=None, headers=None, **request_options):
return self.request("GET", endpoint, params, headers, **request_options).content
def post(self, endpoint, params=None, headers=None, json=None, **request_options):
return self.request("POST", endpoint, params, headers, json, **request_options).json()
def put(self, endpoint, params=None, headers=None, json=None, **request_options):
return self.request("PUT", endpoint, params, headers, json, **request_options).json()
def patch(self, endpoint, params=None, headers=None, json=None, **request_options):
return self.request("PATCH", endpoint, params, headers, json, **request_options)
| mit | -5,506,639,588,589,977,000 | 36.53 | 112 | 0.656009 | false |
pcabido/socorro | alembic/versions/235c80dc2e12_fixes_bug_1047079_remove_processors_.py | 13 | 2071 | """Fixes bug 1047079 - remove processors, jobs tables
Revision ID: 235c80dc2e12
Revises: 556e11f2d00f
Create Date: 2014-12-30 13:29:15.108296
"""
# revision identifiers, used by Alembic.
revision = '235c80dc2e12'
down_revision = '556e11f2d00f'
from alembic import op
from socorro.lib import citexttype, jsontype, buildtype
from socorro.lib.migrations import fix_permissions, load_stored_proc
import sqlalchemy as sa
from sqlalchemy import types
from sqlalchemy.dialects import postgresql
from sqlalchemy.sql import table, column
def upgrade():
op.drop_table('jobs')
op.drop_table('processors')
op.alter_column('server_status', 'processors_count', nullable=True)
def downgrade():
op.alter_column('server_status', 'processors_count', nullable=False)
op.execute("""
CREATE TABLE processors (
id serial NOT NULL PRIMARY KEY,
name varchar(255) NOT NULL UNIQUE,
startdatetime timestamp with time zone NOT NULL,
lastseendatetime timestamp with time zone
)
""")
op.execute("""
CREATE TABLE jobs (
id serial NOT NULL PRIMARY KEY,
pathname character varying(1024) NOT NULL,
uuid varchar(50) NOT NULL UNIQUE,
owner integer,
priority integer DEFAULT 0,
queueddatetime timestamp with time zone,
starteddatetime timestamp with time zone,
completeddatetime timestamp with time zone,
success boolean,
message text,
FOREIGN KEY (owner) REFERENCES processors (id)
)
""")
op.execute("""
CREATE INDEX jobs_owner_key ON jobs (owner)
""")
op.execute("""
CREATE INDEX jobs_owner_starteddatetime_key ON jobs (owner, starteddatetime)
""")
op.execute("""
CREATE INDEX jobs_owner_starteddatetime_priority_key ON jobs (owner, starteddatetime, priority DESC)
""")
op.execute("""
CREATE INDEX jobs_completeddatetime_queueddatetime_key ON jobs (completeddatetime, queueddatetime)
""")
| mpl-2.0 | 4,087,245,662,636,866,600 | 30.861538 | 108 | 0.662482 | false |
GaetanCambier/CouchPotatoServer | libs/requests/api.py | 361 | 4344 | # -*- coding: utf-8 -*-
"""
requests.api
~~~~~~~~~~~~
This module implements the Requests API.
:copyright: (c) 2012 by Kenneth Reitz.
:license: Apache2, see LICENSE for more details.
"""
from . import sessions
def request(method, url, **kwargs):
"""Constructs and sends a :class:`Request <Request>`.
Returns :class:`Response <Response>` object.
:param method: method for the new :class:`Request` object.
:param url: URL for the new :class:`Request` object.
:param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`.
:param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
:param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`.
:param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`.
:param files: (optional) Dictionary of 'name': file-like-objects (or {'name': ('filename', fileobj)}) for multipart encoding upload.
:param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth.
:param timeout: (optional) Float describing the timeout of the request in seconds.
:param allow_redirects: (optional) Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
:param proxies: (optional) Dictionary mapping protocol to the URL of the proxy.
:param verify: (optional) if ``True``, the SSL cert will be verified. A CA_BUNDLE path can also be provided.
:param stream: (optional) if ``False``, the response content will be immediately downloaded.
:param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair.
Usage::
>>> import requests
>>> req = requests.request('GET', 'http://httpbin.org/get')
<Response [200]>
"""
session = sessions.Session()
return session.request(method=method, url=url, **kwargs)
def get(url, **kwargs):
"""Sends a GET request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object.
:param \*\*kwargs: Optional arguments that ``request`` takes.
"""
kwargs.setdefault('allow_redirects', True)
return request('get', url, **kwargs)
def options(url, **kwargs):
"""Sends a OPTIONS request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object.
:param \*\*kwargs: Optional arguments that ``request`` takes.
"""
kwargs.setdefault('allow_redirects', True)
return request('options', url, **kwargs)
def head(url, **kwargs):
"""Sends a HEAD request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object.
:param \*\*kwargs: Optional arguments that ``request`` takes.
"""
kwargs.setdefault('allow_redirects', False)
return request('head', url, **kwargs)
def post(url, data=None, **kwargs):
"""Sends a POST request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object.
:param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes.
"""
return request('post', url, data=data, **kwargs)
def put(url, data=None, **kwargs):
"""Sends a PUT request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object.
:param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes.
"""
return request('put', url, data=data, **kwargs)
def patch(url, data=None, **kwargs):
"""Sends a PATCH request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object.
:param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes.
"""
return request('patch', url, data=data, **kwargs)
def delete(url, **kwargs):
"""Sends a DELETE request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object.
:param \*\*kwargs: Optional arguments that ``request`` takes.
"""
return request('delete', url, **kwargs)
| gpl-3.0 | -4,565,407,815,520,076,300 | 35.2 | 136 | 0.668278 | false |
ASCIT/donut | donut/modules/feedback/helpers.py | 2 | 9848 | from donut import email_utils
from donut.modules.feedback import email_templates
import flask
import pymysql.cursors
from donut.modules.feedback.groups import groupInt, groupName
import donut.modules.groups.helpers as groups
import donut.modules.newsgroups.helpers as newsgroups
def send_update_email(group, email, complaint_id):
'''
Sends an email to [email] of poster and group
'''
msg = email_templates.added_message.format(group,
get_link(group, complaint_id))
subject = "Received {} Feedback".format(group)
try:
email_utils.send_email(email, msg, subject, group=group)
return True
except:
return False
def register_complaint(group, data, notification=True):
'''
Inputs a complaint into the database and returns the complaint id
associated with this complaint
data should be a dict with keys 'course', 'msg' and optionally 'name', 'email'
if required fields are missing, returns false
'''
if not (data and data['subject'] and data['msg']): return False
# Register complaint
query = """
INSERT INTO complaint_info (org, subject, resolved, ombuds, uuid)
VALUES (%s, %s, FALSE, %s, UNHEX(REPLACE(UUID(), '-', '')))
"""
if 'ombuds' not in data:
data['ombuds'] = 0
with flask.g.pymysql_db.cursor() as cursor:
cursor.execute(query, (groupInt[group], data['subject'],
data['ombuds']))
complaint_id = cursor.lastrowid
# Add email to db if applicable
if data['email']:
for email in data['email'].split(','):
add_email(groupInt[group], complaint_id, email.strip(), False)
# Add message to database
add_msg(group, complaint_id, data['msg'], data['name'], notification)
return complaint_id
def send_to_group(group, data, complaint_id=None):
group_id = groups.get_group_id(groupName[group])
data['group'] = group_id
data['group_name'] = group
data['poster'] = "{} Feedback".format(group)
data['plain'] = data['msg']
if complaint_id:
data['plain'] += "\nLink to the issue: {}".format(
get_link(group, complaint_id))
data['msg'] = None
newsgroups.send_email(data)
def add_email(group, complaint_id, email, notification=True):
'''
Adds an email to list of addresses subscribed to this complaint
returns false if complaint_id is invalid
'''
if not get_subject(group, complaint_id): return False
query = """
INSERT INTO complaint_emails (complaint_id, email)
VALUES (%s, %s)
"""
with flask.g.pymysql_db.cursor() as cursor:
try:
cursor.execute(query, (complaint_id, email))
except pymysql.err.IntegrityError:
return False
if notification:
send_update_email(group, email, complaint_id)
return True
def remove_email(group, complaint_id, email):
'''
Removes 'email' from the list of emails subscribed to this complaint
returns False if complaint_id is invalid
'''
if not get_subject(group, complaint_id): return False
query = 'DELETE FROM complaint_emails WHERE complaint_id = %s AND email = %s'
with flask.g.pymysql_db.cursor() as cursor:
cursor.execute(query, (complaint_id, email))
return True
def add_msg(group, complaint_id, message, poster, notification=True):
'''
Adds a message to a complaint in the database
and updates status of complaint to 'new_msg'
if poster is None or an empty string, it will be replaced with
"(anonymous)"
If complaint_id is invalid, returns False
'''
subject = get_subject(group, complaint_id)
if not subject:
return False
# Add the message
query = """
INSERT INTO complaint_messages (complaint_id, message, poster, time)
VALUES (%s, %s, %s, NOW())
"""
# Update the status to new_msg
query2 = 'UPDATE complaint_info SET resolved = FALSE WHERE complaint_id = %s'
if not poster:
poster = '(anonymous)'
with flask.g.pymysql_db.cursor() as cursor:
cursor.execute(query, (complaint_id, message, poster))
cursor.execute(query2, complaint_id)
if notification:
data = {'msg': message, 'subject': subject}
send_to_group(group, data, complaint_id)
query = 'SELECT email FROM complaint_emails WHERE complaint_id = %s'
with flask.g.pymysql_db.cursor() as cursor:
cursor.execute(query, complaint_id)
res = cursor.fetchall()
for row in res:
send_update_email(group, row['email'], complaint_id)
def get_link(group, complaint_id):
'''
Gets a (fully qualified) link to the view page for this complaint id
'''
query = 'SELECT HEX(uuid) AS uuid FROM complaint_info WHERE complaint_id = %s'
with flask.g.pymysql_db.cursor() as cursor:
cursor.execute(query, complaint_id)
res = cursor.fetchone()
if not res:
return None
uuid = res['uuid']
return flask.url_for(
'feedback.feedback_view_complaint',
group=group,
id=uuid,
_external=True)
def get_id(group, uuid):
'''
Returns the complaint_id associated with a uuid
or false if the uuid is not found
'''
query = 'SELECT complaint_id FROM complaint_info WHERE org = %s AND uuid = UNHEX(%s)'
with flask.g.pymysql_db.cursor() as cursor:
cursor.execute(query, (groupInt[group], uuid))
if not cursor.rowcount:
return False
return cursor.fetchone()['complaint_id']
def get_messages(group, complaint_id):
'''
Returns timestamps, posters, messages, and message_id's on this complaint
in ascending order of timestamp
'''
query = """
SELECT time, poster, message, message_id FROM complaint_messages
WHERE complaint_id = %s ORDER BY time
""".format(group)
with flask.g.pymysql_db.cursor() as cursor:
cursor.execute(query, (complaint_id))
return cursor.fetchall()
def get_summary(group, complaint_id):
'''
Returns a dict with the following fields: subject, status
'''
query = 'SELECT subject, resolved FROM complaint_info WHERE complaint_id = %s'
with flask.g.pymysql_db.cursor() as cursor:
cursor.execute(query, complaint_id)
return cursor.fetchone()
def get_subject(group, complaint_id):
'''
Returns the suject or None if complaint_id is invalid
'''
res = get_summary(group, complaint_id)
return res['subject'] if res else None
def get_status(group, complaint_id):
'''
Returns the status of a post or None if complaint_id is invalid
'''
res = get_summary(group, complaint_id)
return res['resolved'] if res else None
def set_resolved(group, complaint_id, status):
'''
Sets the status of this complaint to resolved/unresolved
'''
query = "UPDATE complaint_info SET resolved=%s WHERE complaint_id = %s"
with flask.g.pymysql_db.cursor() as cursor:
cursor.execute(query, (status, complaint_id))
def get_emails(group, complaint_id):
'''
Returns a list of subscribed emails for this complaint (which may be empty)
or an empty list if complaint_id is invalid
'''
query = 'SELECT email FROM complaint_emails WHERE complaint_id = %s'
with flask.g.pymysql_db.cursor() as cursor:
cursor.execute(query, complaint_id)
res = cursor.fetchall()
return [row['email'] for row in res]
def get_ombuds(complaint_id):
'''
Returns whether the person has already talked to an ombuds/TA/instructor about
their problem.
'''
query = 'SELECT ombuds FROM complaint_info WHERE complaint_id = %s'
with flask.g.pymysql_db.cursor() as cursor:
cursor.execute(query, complaint_id)
return cursor.fetchone()['ombuds']
def set_ombuds(complaint_id, ombuds):
'''
Sets the status of whether the user has spoken to an ombuds/TA/instructor.
'''
query = "UPDATE complaint_info SET ombuds = %s WHERE complaint_id = %s"
with flask.g.pymysql_db.cursor() as cursor:
cursor.execute(query, [ombuds, complaint_id])
def get_all_fields(group, complaint_id):
'''
Returns a dict with emails, messages, subject, status
Returns None if complaint_id is invalid
'''
if not get_subject(group, complaint_id):
return None
data = {
'emails': get_emails(group, complaint_id),
'messages': get_messages(group, complaint_id),
'subject': get_subject(group, complaint_id),
'resolved': get_status(group, complaint_id)
}
if group == 'arc':
data['ombuds'] = get_ombuds(complaint_id)
return data
def get_posts(group, view_unresolved):
'''
Returns posts and their associated list
of messages.
If view_all is false, only returns unresolved posts.
Will be an array of dicts with keys complaint_id, subject,
resolved, uuid, message, poster, time
Note that message and poster refer to the latest comment on this complaint
'''
query = """SELECT post.complaint_id AS complaint_id, post.subject AS subject,
post.resolved AS resolved, post.uuid AS uuid, comment.message AS message,
comment.poster AS poster, comment.time AS time
FROM complaint_info post
NATURAL JOIN complaint_messages comment
INNER JOIN (
SELECT complaint_id, max(time) AS time
FROM complaint_messages
GROUP BY complaint_id
) maxtime
ON maxtime.time = comment.time AND maxtime.complaint_id = comment.complaint_id
WHERE post.org = %s
"""
if view_unresolved:
query += " AND post.resolved = FALSE"
query += " ORDER BY comment.time DESC"
with flask.g.pymysql_db.cursor() as cursor:
cursor.execute(query, groupInt[group])
return cursor.fetchall()
| mit | -4,923,602,064,571,854,000 | 33.194444 | 89 | 0.648355 | false |
marc-sensenich/ansible | test/units/plugins/httpapi/test_ftd.py | 22 | 14894 | # Copyright (c) 2018 Cisco and/or its affiliates.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
import json
from ansible.module_utils.six.moves.urllib.error import HTTPError
from units.compat import mock
from units.compat import unittest
from units.compat.builtins import BUILTINS
from units.compat.mock import mock_open, patch
from ansible.errors import AnsibleConnectionFailure
from ansible.module_utils.connection import ConnectionError
from ansible.module_utils.network.ftd.common import HTTPMethod, ResponseParams
from ansible.module_utils.network.ftd.fdm_swagger_client import SpecProp, FdmSwaggerParser
from ansible.module_utils.six import BytesIO, StringIO
from ansible.plugins.httpapi.ftd import HttpApi
EXPECTED_BASE_HEADERS = {
'Accept': 'application/json',
'Content-Type': 'application/json'
}
class FakeFtdHttpApiPlugin(HttpApi):
def __init__(self, conn):
super(FakeFtdHttpApiPlugin, self).__init__(conn)
self.hostvars = {
'token_path': '/testLoginUrl',
'spec_path': '/testSpecUrl'
}
def get_option(self, var):
return self.hostvars[var]
class TestFtdHttpApi(unittest.TestCase):
def setUp(self):
self.connection_mock = mock.Mock()
self.ftd_plugin = FakeFtdHttpApiPlugin(self.connection_mock)
self.ftd_plugin.access_token = 'ACCESS_TOKEN'
self.ftd_plugin._load_name = 'httpapi'
def test_login_should_request_tokens_when_no_refresh_token(self):
self.connection_mock.send.return_value = self._connection_response(
{'access_token': 'ACCESS_TOKEN', 'refresh_token': 'REFRESH_TOKEN'}
)
self.ftd_plugin.login('foo', 'bar')
assert 'ACCESS_TOKEN' == self.ftd_plugin.access_token
assert 'REFRESH_TOKEN' == self.ftd_plugin.refresh_token
assert {'Authorization': 'Bearer ACCESS_TOKEN'} == self.ftd_plugin.connection._auth
expected_body = json.dumps({'grant_type': 'password', 'username': 'foo', 'password': 'bar'})
self.connection_mock.send.assert_called_once_with(mock.ANY, expected_body, headers=mock.ANY, method=mock.ANY)
def test_login_should_update_tokens_when_refresh_token_exists(self):
self.ftd_plugin.refresh_token = 'REFRESH_TOKEN'
self.connection_mock.send.return_value = self._connection_response(
{'access_token': 'NEW_ACCESS_TOKEN', 'refresh_token': 'NEW_REFRESH_TOKEN'}
)
self.ftd_plugin.login('foo', 'bar')
assert 'NEW_ACCESS_TOKEN' == self.ftd_plugin.access_token
assert 'NEW_REFRESH_TOKEN' == self.ftd_plugin.refresh_token
assert {'Authorization': 'Bearer NEW_ACCESS_TOKEN'} == self.ftd_plugin.connection._auth
expected_body = json.dumps({'grant_type': 'refresh_token', 'refresh_token': 'REFRESH_TOKEN'})
self.connection_mock.send.assert_called_once_with(mock.ANY, expected_body, headers=mock.ANY, method=mock.ANY)
def test_login_should_use_host_variable_when_set(self):
temp_token_path = self.ftd_plugin.hostvars['token_path']
self.ftd_plugin.hostvars['token_path'] = '/testFakeLoginUrl'
self.connection_mock.send.return_value = self._connection_response(
{'access_token': 'ACCESS_TOKEN', 'refresh_token': 'REFRESH_TOKEN'}
)
self.ftd_plugin.login('foo', 'bar')
self.connection_mock.send.assert_called_once_with('/testFakeLoginUrl', mock.ANY, headers=mock.ANY,
method=mock.ANY)
self.ftd_plugin.hostvars['token_path'] = temp_token_path
def test_login_raises_exception_when_no_refresh_token_and_no_credentials(self):
with self.assertRaises(AnsibleConnectionFailure) as res:
self.ftd_plugin.login(None, None)
assert 'Username and password are required' in str(res.exception)
def test_login_raises_exception_when_invalid_response(self):
self.connection_mock.send.return_value = self._connection_response(
{'no_access_token': 'ACCESS_TOKEN'}
)
with self.assertRaises(ConnectionError) as res:
self.ftd_plugin.login('foo', 'bar')
assert 'Server returned response without token info during connection authentication' in str(res.exception)
def test_login_raises_exception_when_http_error(self):
self.connection_mock.send.side_effect = HTTPError('http://testhost.com', 400, '', {},
StringIO('{"message": "Failed to authenticate user"}'))
with self.assertRaises(ConnectionError) as res:
self.ftd_plugin.login('foo', 'bar')
assert 'Failed to authenticate user' in str(res.exception)
def test_logout_should_revoke_tokens(self):
self.ftd_plugin.access_token = 'ACCESS_TOKEN_TO_REVOKE'
self.ftd_plugin.refresh_token = 'REFRESH_TOKEN_TO_REVOKE'
self.connection_mock.send.return_value = self._connection_response(None)
self.ftd_plugin.logout()
assert self.ftd_plugin.access_token is None
assert self.ftd_plugin.refresh_token is None
expected_body = json.dumps({'grant_type': 'revoke_token', 'access_token': 'ACCESS_TOKEN_TO_REVOKE',
'token_to_revoke': 'REFRESH_TOKEN_TO_REVOKE'})
self.connection_mock.send.assert_called_once_with(mock.ANY, expected_body, headers=mock.ANY, method=mock.ANY)
def test_send_request_should_send_correct_request(self):
exp_resp = {'id': '123', 'name': 'foo'}
self.connection_mock.send.return_value = self._connection_response(exp_resp)
resp = self.ftd_plugin.send_request('/test/{objId}', HTTPMethod.PUT,
body_params={'name': 'foo'},
path_params={'objId': '123'},
query_params={'at': 0})
assert {ResponseParams.SUCCESS: True, ResponseParams.STATUS_CODE: 200,
ResponseParams.RESPONSE: exp_resp} == resp
self.connection_mock.send.assert_called_once_with('/test/123?at=0', '{"name": "foo"}', method=HTTPMethod.PUT,
headers=EXPECTED_BASE_HEADERS)
def test_send_request_should_return_empty_dict_when_no_response_data(self):
self.connection_mock.send.return_value = self._connection_response(None)
resp = self.ftd_plugin.send_request('/test', HTTPMethod.GET)
assert {ResponseParams.SUCCESS: True, ResponseParams.STATUS_CODE: 200, ResponseParams.RESPONSE: {}} == resp
self.connection_mock.send.assert_called_once_with('/test', None, method=HTTPMethod.GET,
headers=EXPECTED_BASE_HEADERS)
def test_send_request_should_return_error_info_when_http_error_raises(self):
self.connection_mock.send.side_effect = HTTPError('http://testhost.com', 500, '', {},
StringIO('{"errorMessage": "ERROR"}'))
resp = self.ftd_plugin.send_request('/test', HTTPMethod.GET)
assert {ResponseParams.SUCCESS: False, ResponseParams.STATUS_CODE: 500,
ResponseParams.RESPONSE: {'errorMessage': 'ERROR'}} == resp
def test_send_request_raises_exception_when_invalid_response(self):
self.connection_mock.send.return_value = self._connection_response('nonValidJson')
with self.assertRaises(ConnectionError) as res:
self.ftd_plugin.send_request('/test', HTTPMethod.GET)
assert 'Invalid JSON response' in str(res.exception)
def test_handle_httperror_should_update_tokens_and_retry_on_auth_errors(self):
self.ftd_plugin.refresh_token = 'REFRESH_TOKEN'
self.connection_mock.send.return_value = self._connection_response(
{'access_token': 'NEW_ACCESS_TOKEN', 'refresh_token': 'NEW_REFRESH_TOKEN'}
)
retry = self.ftd_plugin.handle_httperror(HTTPError('http://testhost.com', 401, '', {}, None))
assert retry
assert 'NEW_ACCESS_TOKEN' == self.ftd_plugin.access_token
assert 'NEW_REFRESH_TOKEN' == self.ftd_plugin.refresh_token
def test_handle_httperror_should_not_retry_on_non_auth_errors(self):
assert not self.ftd_plugin.handle_httperror(HTTPError('http://testhost.com', 500, '', {}, None))
def test_handle_httperror_should_not_retry_when_ignoring_http_errors(self):
self.ftd_plugin._ignore_http_errors = True
assert not self.ftd_plugin.handle_httperror(HTTPError('http://testhost.com', 401, '', {}, None))
@patch('os.path.isdir', mock.Mock(return_value=False))
def test_download_file(self):
self.connection_mock.send.return_value = self._connection_response('File content')
open_mock = mock_open()
with patch('%s.open' % BUILTINS, open_mock):
self.ftd_plugin.download_file('/files/1', '/tmp/test.txt')
open_mock.assert_called_once_with('/tmp/test.txt', 'wb')
open_mock().write.assert_called_once_with(b'File content')
@patch('os.path.isdir', mock.Mock(return_value=True))
def test_download_file_should_extract_filename_from_headers(self):
filename = 'test_file.txt'
response = mock.Mock()
response.info.return_value = {'Content-Disposition': 'attachment; filename="%s"' % filename}
dummy, response_data = self._connection_response('File content')
self.connection_mock.send.return_value = response, response_data
open_mock = mock_open()
with patch('%s.open' % BUILTINS, open_mock):
self.ftd_plugin.download_file('/files/1', '/tmp/')
open_mock.assert_called_once_with('/tmp/%s' % filename, 'wb')
open_mock().write.assert_called_once_with(b'File content')
@patch('os.path.basename', mock.Mock(return_value='test.txt'))
@patch('ansible.plugins.httpapi.ftd.encode_multipart_formdata',
mock.Mock(return_value=('--Encoded data--', 'multipart/form-data')))
def test_upload_file(self):
self.connection_mock.send.return_value = self._connection_response({'id': '123'})
open_mock = mock_open()
with patch('%s.open' % BUILTINS, open_mock):
resp = self.ftd_plugin.upload_file('/tmp/test.txt', '/files')
assert {'id': '123'} == resp
exp_headers = dict(EXPECTED_BASE_HEADERS)
exp_headers['Content-Length'] = len('--Encoded data--')
exp_headers['Content-Type'] = 'multipart/form-data'
self.connection_mock.send.assert_called_once_with('/files', data='--Encoded data--',
headers=exp_headers, method=HTTPMethod.POST)
open_mock.assert_called_once_with('/tmp/test.txt', 'rb')
@patch('os.path.basename', mock.Mock(return_value='test.txt'))
@patch('ansible.plugins.httpapi.ftd.encode_multipart_formdata',
mock.Mock(return_value=('--Encoded data--', 'multipart/form-data')))
def test_upload_file_raises_exception_when_invalid_response(self):
self.connection_mock.send.return_value = self._connection_response('invalidJsonResponse')
open_mock = mock_open()
with patch('%s.open' % BUILTINS, open_mock):
with self.assertRaises(ConnectionError) as res:
self.ftd_plugin.upload_file('/tmp/test.txt', '/files')
assert 'Invalid JSON response' in str(res.exception)
@patch.object(FdmSwaggerParser, 'parse_spec')
def test_get_operation_spec(self, parse_spec_mock):
self.connection_mock.send.return_value = self._connection_response(None)
parse_spec_mock.return_value = {
SpecProp.OPERATIONS: {'testOp': 'Specification for testOp'}
}
assert 'Specification for testOp' == self.ftd_plugin.get_operation_spec('testOp')
assert self.ftd_plugin.get_operation_spec('nonExistingTestOp') is None
@patch.object(FdmSwaggerParser, 'parse_spec')
def test_get_model_spec(self, parse_spec_mock):
self.connection_mock.send.return_value = self._connection_response(None)
parse_spec_mock.return_value = {
SpecProp.MODELS: {'TestModel': 'Specification for TestModel'}
}
assert 'Specification for TestModel' == self.ftd_plugin.get_model_spec('TestModel')
assert self.ftd_plugin.get_model_spec('NonExistingTestModel') is None
@patch.object(FdmSwaggerParser, 'parse_spec')
def test_get_model_spec(self, parse_spec_mock):
self.connection_mock.send.return_value = self._connection_response(None)
operation1 = {'modelName': 'TestModel'}
op_model_name_is_none = {'modelName': None}
op_without_model_name = {'url': 'testUrl'}
parse_spec_mock.return_value = {
SpecProp.MODEL_OPERATIONS: {
'TestModel': {
'testOp1': operation1,
'testOp2': 'spec2'
},
'TestModel2': {
'testOp10': 'spec10',
'testOp20': 'spec20'
}
},
SpecProp.OPERATIONS: {
'testOp1': operation1,
'testOp10': {
'modelName': 'TestModel2'
},
'testOpWithoutModelName': op_without_model_name,
'testOpModelNameIsNone': op_model_name_is_none
}
}
assert {'testOp1': operation1, 'testOp2': 'spec2'} == self.ftd_plugin.get_operation_specs_by_model_name(
'TestModel')
assert None is self.ftd_plugin.get_operation_specs_by_model_name(
'testOpModelNameIsNone')
assert None is self.ftd_plugin.get_operation_specs_by_model_name(
'testOpWithoutModelName')
assert self.ftd_plugin.get_operation_specs_by_model_name('nonExistingOperation') is None
@staticmethod
def _connection_response(response, status=200):
response_mock = mock.Mock()
response_mock.getcode.return_value = status
response_text = json.dumps(response) if type(response) is dict else response
response_data = BytesIO(response_text.encode() if response_text else ''.encode())
return response_mock, response_data
| gpl-3.0 | -8,765,652,224,979,166,000 | 45.984227 | 117 | 0.639721 | false |
mferenca/HMS-ecommerce | ecommerce/extensions/api/v2/tests/views/test_products.py | 1 | 7818 | from __future__ import unicode_literals
import datetime
import json
import pytz
from django.core.urlresolvers import reverse
from django.test import RequestFactory
from oscar.core.loading import get_model
from ecommerce.coupons.tests.mixins import CouponMixin
from ecommerce.courses.models import Course
from ecommerce.extensions.api.serializers import ProductSerializer
from ecommerce.extensions.api.v2.tests.views import JSON_CONTENT_TYPE, ProductSerializerMixin
from ecommerce.extensions.catalogue.tests.mixins import CourseCatalogTestMixin
from ecommerce.tests.testcases import TestCase
Benefit = get_model('offer', 'Benefit')
Catalog = get_model('catalogue', 'Catalog')
Product = get_model('catalogue', 'Product')
ProductClass = get_model('catalogue', 'ProductClass')
Voucher = get_model('voucher', 'Voucher')
class ProductViewSetBase(ProductSerializerMixin, CourseCatalogTestMixin, TestCase):
def setUp(self):
super(ProductViewSetBase, self).setUp()
self.user = self.create_user(is_staff=True)
self.client.login(username=self.user.username, password=self.password)
self.course = Course.objects.create(id='edX/DemoX/Demo_Course', name='Test Course')
# TODO Update the expiration date by 2099-12-31
expires = datetime.datetime(2100, 1, 1, tzinfo=pytz.UTC)
self.seat = self.course.create_or_update_seat('honor', False, 0, self.partner, expires=expires)
class ProductViewSetTests(ProductViewSetBase):
def test_list(self):
""" Verify a list of products is returned. """
path = reverse('api:v2:product-list')
response = self.client.get(path)
self.assertEqual(response.status_code, 200)
results = [self.serialize_product(p) for p in self.course.products.all()]
expected = {'count': 2, 'next': None, 'previous': None, 'results': results}
self.assertDictEqual(json.loads(response.content), expected)
# If no products exist, the view should return an empty result set.
Product.objects.all().delete()
response = self.client.get(path)
self.assertEqual(response.status_code, 200)
expected = {'count': 0, 'next': None, 'previous': None, 'results': []}
self.assertDictEqual(json.loads(response.content), expected)
def test_retrieve(self):
""" Verify a single product is returned. """
path = reverse('api:v2:product-detail', kwargs={'pk': 999})
response = self.client.get(path)
self.assertEqual(response.status_code, 404)
path = reverse('api:v2:product-detail', kwargs={'pk': self.seat.id})
response = self.client.get(path)
self.assertEqual(response.status_code, 200)
self.assertDictEqual(json.loads(response.content), self.serialize_product(self.seat))
def test_destroy(self):
""" Verify the view does NOT allow products to be destroyed. """
product_id = self.seat.id
path = reverse('api:v2:product-detail', kwargs={'pk': product_id})
response = self.client.delete(path)
self.assertEqual(response.status_code, 405)
self.assertTrue(Product.objects.filter(id=product_id).exists())
def test_update(self):
""" Verify the view allows individual products to be updated. """
data = self.serialize_product(self.seat)
data['title'] = 'Fake Seat Title'
path = reverse('api:v2:product-detail', kwargs={'pk': self.seat.id})
response = self.client.put(path, json.dumps(data), JSON_CONTENT_TYPE)
self.assertEqual(response.status_code, 200, response.content)
product = Product.objects.get(id=self.seat.id)
self.assertEqual(product.title, data['title'])
self.assertDictEqual(json.loads(response.content), self.serialize_product(product))
def test_list_for_course(self):
""" Verify the view supports listing products for a single course. """
# Create another course and seat to confirm filtering.
other_course = Course.objects.create(id='edX/DemoX/XYZ', name='Test Course 2')
other_course.create_or_update_seat('honor', False, 0, self.partner)
path = reverse('api:v2:course-product-list', kwargs={'parent_lookup_course_id': self.course.id})
response = self.client.get(path)
self.assertEqual(response.status_code, 200)
results = [self.serialize_product(p) for p in self.course.products.all()]
expected = {'count': 2, 'next': None, 'previous': None, 'results': results}
self.assertDictEqual(json.loads(response.content), expected)
def test_get_partner_products(self):
"""Verify the endpoint returns the list of products associated with a
partner.
"""
url = reverse(
'api:v2:partner-product-list',
kwargs={'parent_lookup_stockrecords__partner_id': self.partner.id}
)
response = self.client.get(url)
expected_data = self.serialize_product(self.seat)
self.assertEqual(response.status_code, 200)
self.assertListEqual(json.loads(response.content)['results'], [expected_data])
def test_no_partner_product(self):
"""Verify the endpoint returns an empty list if no products are
associated with a partner.
"""
Product.objects.all().delete()
url = reverse(
'api:v2:partner-product-list',
kwargs={'parent_lookup_stockrecords__partner_id': self.partner.id}
)
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
expected = {
'count': 0,
'next': None,
'previous': None,
'results': []
}
self.assertDictEqual(json.loads(response.content), expected)
class ProductViewSetCouponTests(CouponMixin, ProductViewSetBase):
def test_coupon_product_details(self):
"""Verify the endpoint returns all coupon information."""
coupon = self.create_coupon()
url = reverse('api:v2:product-detail', kwargs={'pk': coupon.id})
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
request = RequestFactory(SERVER_NAME=self.site.domain).get('/')
request.user = self.user
request.site = self.site
expected = ProductSerializer(coupon, context={'request': request}).data
self.assertDictEqual(response.data, expected)
def test_coupon_voucher_serializer(self):
"""Verify that the vouchers of a coupon are properly serialized."""
coupon = self.create_coupon()
url = reverse('api:v2:product-detail', kwargs={'pk': coupon.id})
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
response_data = json.loads(response.content)
voucher = response_data['attribute_values'][0]['value'][0]
self.assertEqual(voucher['name'], 'Test coupon')
self.assertEqual(voucher['usage'], Voucher.SINGLE_USE)
self.assertEqual(voucher['benefit']['type'], Benefit.PERCENTAGE)
self.assertEqual(voucher['benefit']['value'], 100.0)
def test_product_filtering(self):
"""Verify products are filtered."""
self.create_coupon()
url = reverse('api:v2:product-list')
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
response_data = json.loads(response.content)
self.assertEqual(response_data['count'], 3)
filtered_url = '{}?product_class=CoUpOn'.format(url)
response = self.client.get(filtered_url)
self.assertEqual(response.status_code, 200)
response_data = json.loads(response.content)
self.assertEqual(response_data['count'], 1)
self.assertEqual(response_data['results'][0]['product_class'], 'Coupon')
| agpl-3.0 | 1,074,553,738,677,681,000 | 43.931034 | 104 | 0.664876 | false |
wenottingham/ansible | lib/ansible/plugins/cache/memory.py | 275 | 1466 | # (c) 2014, Brian Coca, Josh Drake, et al
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.plugins.cache.base import BaseCacheModule
class CacheModule(BaseCacheModule):
def __init__(self, *args, **kwargs):
self._cache = {}
def get(self, key):
return self._cache.get(key)
def set(self, key, value):
self._cache[key] = value
def keys(self):
return self._cache.keys()
def contains(self, key):
return key in self._cache
def delete(self, key):
del self._cache[key]
def flush(self):
self._cache = {}
def copy(self):
return self._cache.copy()
def __getstate__(self):
return self.copy()
def __setstate__(self, data):
self._cache = data
| gpl-3.0 | 8,537,509,876,561,711,000 | 27.192308 | 70 | 0.666439 | false |
Maximilian-Reuter/SickRage | lib/hachoir_core/i18n.py | 86 | 6241 | # -*- coding: UTF-8 -*-
"""
Functions to manage internationalisation (i18n):
- initLocale(): setup locales and install Unicode compatible stdout and
stderr ;
- getTerminalCharset(): guess terminal charset ;
- gettext(text) translate a string to current language. The function always
returns Unicode string. You can also use the alias: _() ;
- ngettext(singular, plural, count): translate a sentence with singular and
plural form. The function always returns Unicode string.
WARNING: Loading this module indirectly calls initLocale() which sets
locale LC_ALL to ''. This is needed to get user preferred locale
settings.
"""
import hachoir_core.config as config
import hachoir_core
import locale
from os import path
import sys
from codecs import BOM_UTF8, BOM_UTF16_LE, BOM_UTF16_BE
def _getTerminalCharset():
"""
Function used by getTerminalCharset() to get terminal charset.
@see getTerminalCharset()
"""
# (1) Try locale.getpreferredencoding()
try:
charset = locale.getpreferredencoding()
if charset:
return charset
except (locale.Error, AttributeError):
pass
# (2) Try locale.nl_langinfo(CODESET)
try:
charset = locale.nl_langinfo(locale.CODESET)
if charset:
return charset
except (locale.Error, AttributeError):
pass
# (3) Try sys.stdout.encoding
if hasattr(sys.stdout, "encoding") and sys.stdout.encoding:
return sys.stdout.encoding
# (4) Otherwise, returns "ASCII"
return "ASCII"
def getTerminalCharset():
"""
Guess terminal charset using differents tests:
1. Try locale.getpreferredencoding()
2. Try locale.nl_langinfo(CODESET)
3. Try sys.stdout.encoding
4. Otherwise, returns "ASCII"
WARNING: Call initLocale() before calling this function.
"""
try:
return getTerminalCharset.value
except AttributeError:
getTerminalCharset.value = _getTerminalCharset()
return getTerminalCharset.value
class UnicodeStdout(object):
def __init__(self, old_device, charset):
self.device = old_device
self.charset = charset
def flush(self):
self.device.flush()
def write(self, text):
if isinstance(text, unicode):
text = text.encode(self.charset, 'replace')
self.device.write(text)
def writelines(self, lines):
for text in lines:
self.write(text)
def initLocale():
# Only initialize locale once
if initLocale.is_done:
return getTerminalCharset()
initLocale.is_done = True
# Setup locales
try:
locale.setlocale(locale.LC_ALL, "")
except (locale.Error, IOError):
pass
# Get the terminal charset
charset = getTerminalCharset()
# UnicodeStdout conflicts with the readline module
if config.unicode_stdout and ('readline' not in sys.modules):
# Replace stdout and stderr by unicode objet supporting unicode string
sys.stdout = UnicodeStdout(sys.stdout, charset)
sys.stderr = UnicodeStdout(sys.stderr, charset)
return charset
initLocale.is_done = False
def _dummy_gettext(text):
return unicode(text)
def _dummy_ngettext(singular, plural, count):
if 1 < abs(count) or not count:
return unicode(plural)
else:
return unicode(singular)
def _initGettext():
charset = initLocale()
# Try to load gettext module
if config.use_i18n:
try:
import gettext
ok = True
except ImportError:
ok = False
else:
ok = False
# gettext is not available or not needed: use dummy gettext functions
if not ok:
return (_dummy_gettext, _dummy_ngettext)
# Gettext variables
package = hachoir_core.PACKAGE
locale_dir = path.join(path.dirname(__file__), "..", "locale")
# Initialize gettext module
gettext.bindtextdomain(package, locale_dir)
gettext.textdomain(package)
translate = gettext.gettext
ngettext = gettext.ngettext
# TODO: translate_unicode lambda function really sucks!
# => find native function to do that
unicode_gettext = lambda text: \
unicode(translate(text), charset)
unicode_ngettext = lambda singular, plural, count: \
unicode(ngettext(singular, plural, count), charset)
return (unicode_gettext, unicode_ngettext)
UTF_BOMS = (
(BOM_UTF8, "UTF-8"),
(BOM_UTF16_LE, "UTF-16-LE"),
(BOM_UTF16_BE, "UTF-16-BE"),
)
# Set of valid characters for specific charset
CHARSET_CHARACTERS = (
# U+00E0: LATIN SMALL LETTER A WITH GRAVE
(set(u"©®éêè\xE0ç".encode("ISO-8859-1")), "ISO-8859-1"),
(set(u"©®éêè\xE0ç€".encode("ISO-8859-15")), "ISO-8859-15"),
(set(u"©®".encode("MacRoman")), "MacRoman"),
(set(u"εδηιθκμοΡσςυΈί".encode("ISO-8859-7")), "ISO-8859-7"),
)
def guessBytesCharset(bytes, default=None):
r"""
>>> guessBytesCharset("abc")
'ASCII'
>>> guessBytesCharset("\xEF\xBB\xBFabc")
'UTF-8'
>>> guessBytesCharset("abc\xC3\xA9")
'UTF-8'
>>> guessBytesCharset("File written by Adobe Photoshop\xA8 4.0\0")
'MacRoman'
>>> guessBytesCharset("\xE9l\xE9phant")
'ISO-8859-1'
>>> guessBytesCharset("100 \xA4")
'ISO-8859-15'
>>> guessBytesCharset('Word \xb8\xea\xe4\xef\xf3\xe7 - Microsoft Outlook 97 - \xd1\xf5\xe8\xec\xdf\xf3\xe5\xe9\xf2 e-mail')
'ISO-8859-7'
"""
# Check for UTF BOM
for bom_bytes, charset in UTF_BOMS:
if bytes.startswith(bom_bytes):
return charset
# Pure ASCII?
try:
text = unicode(bytes, 'ASCII', 'strict')
return 'ASCII'
except UnicodeDecodeError:
pass
# Valid UTF-8?
try:
text = unicode(bytes, 'UTF-8', 'strict')
return 'UTF-8'
except UnicodeDecodeError:
pass
# Create a set of non-ASCII characters
non_ascii_set = set( byte for byte in bytes if ord(byte) >= 128 )
for characters, charset in CHARSET_CHARACTERS:
if characters.issuperset(non_ascii_set):
return charset
return default
# Initialize _(), gettext() and ngettext() functions
gettext, ngettext = _initGettext()
_ = gettext
| gpl-3.0 | 5,393,930,058,155,709,000 | 28.023364 | 127 | 0.647561 | false |
bestvibes/neo4j-social-network | env/lib/python2.7/encodings/cp875.py | 593 | 13110 | """ Python Character Mapping Codec cp875 generated from 'MAPPINGS/VENDORS/MICSFT/EBCDIC/CP875.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='cp875',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
u'\x00' # 0x00 -> NULL
u'\x01' # 0x01 -> START OF HEADING
u'\x02' # 0x02 -> START OF TEXT
u'\x03' # 0x03 -> END OF TEXT
u'\x9c' # 0x04 -> CONTROL
u'\t' # 0x05 -> HORIZONTAL TABULATION
u'\x86' # 0x06 -> CONTROL
u'\x7f' # 0x07 -> DELETE
u'\x97' # 0x08 -> CONTROL
u'\x8d' # 0x09 -> CONTROL
u'\x8e' # 0x0A -> CONTROL
u'\x0b' # 0x0B -> VERTICAL TABULATION
u'\x0c' # 0x0C -> FORM FEED
u'\r' # 0x0D -> CARRIAGE RETURN
u'\x0e' # 0x0E -> SHIFT OUT
u'\x0f' # 0x0F -> SHIFT IN
u'\x10' # 0x10 -> DATA LINK ESCAPE
u'\x11' # 0x11 -> DEVICE CONTROL ONE
u'\x12' # 0x12 -> DEVICE CONTROL TWO
u'\x13' # 0x13 -> DEVICE CONTROL THREE
u'\x9d' # 0x14 -> CONTROL
u'\x85' # 0x15 -> CONTROL
u'\x08' # 0x16 -> BACKSPACE
u'\x87' # 0x17 -> CONTROL
u'\x18' # 0x18 -> CANCEL
u'\x19' # 0x19 -> END OF MEDIUM
u'\x92' # 0x1A -> CONTROL
u'\x8f' # 0x1B -> CONTROL
u'\x1c' # 0x1C -> FILE SEPARATOR
u'\x1d' # 0x1D -> GROUP SEPARATOR
u'\x1e' # 0x1E -> RECORD SEPARATOR
u'\x1f' # 0x1F -> UNIT SEPARATOR
u'\x80' # 0x20 -> CONTROL
u'\x81' # 0x21 -> CONTROL
u'\x82' # 0x22 -> CONTROL
u'\x83' # 0x23 -> CONTROL
u'\x84' # 0x24 -> CONTROL
u'\n' # 0x25 -> LINE FEED
u'\x17' # 0x26 -> END OF TRANSMISSION BLOCK
u'\x1b' # 0x27 -> ESCAPE
u'\x88' # 0x28 -> CONTROL
u'\x89' # 0x29 -> CONTROL
u'\x8a' # 0x2A -> CONTROL
u'\x8b' # 0x2B -> CONTROL
u'\x8c' # 0x2C -> CONTROL
u'\x05' # 0x2D -> ENQUIRY
u'\x06' # 0x2E -> ACKNOWLEDGE
u'\x07' # 0x2F -> BELL
u'\x90' # 0x30 -> CONTROL
u'\x91' # 0x31 -> CONTROL
u'\x16' # 0x32 -> SYNCHRONOUS IDLE
u'\x93' # 0x33 -> CONTROL
u'\x94' # 0x34 -> CONTROL
u'\x95' # 0x35 -> CONTROL
u'\x96' # 0x36 -> CONTROL
u'\x04' # 0x37 -> END OF TRANSMISSION
u'\x98' # 0x38 -> CONTROL
u'\x99' # 0x39 -> CONTROL
u'\x9a' # 0x3A -> CONTROL
u'\x9b' # 0x3B -> CONTROL
u'\x14' # 0x3C -> DEVICE CONTROL FOUR
u'\x15' # 0x3D -> NEGATIVE ACKNOWLEDGE
u'\x9e' # 0x3E -> CONTROL
u'\x1a' # 0x3F -> SUBSTITUTE
u' ' # 0x40 -> SPACE
u'\u0391' # 0x41 -> GREEK CAPITAL LETTER ALPHA
u'\u0392' # 0x42 -> GREEK CAPITAL LETTER BETA
u'\u0393' # 0x43 -> GREEK CAPITAL LETTER GAMMA
u'\u0394' # 0x44 -> GREEK CAPITAL LETTER DELTA
u'\u0395' # 0x45 -> GREEK CAPITAL LETTER EPSILON
u'\u0396' # 0x46 -> GREEK CAPITAL LETTER ZETA
u'\u0397' # 0x47 -> GREEK CAPITAL LETTER ETA
u'\u0398' # 0x48 -> GREEK CAPITAL LETTER THETA
u'\u0399' # 0x49 -> GREEK CAPITAL LETTER IOTA
u'[' # 0x4A -> LEFT SQUARE BRACKET
u'.' # 0x4B -> FULL STOP
u'<' # 0x4C -> LESS-THAN SIGN
u'(' # 0x4D -> LEFT PARENTHESIS
u'+' # 0x4E -> PLUS SIGN
u'!' # 0x4F -> EXCLAMATION MARK
u'&' # 0x50 -> AMPERSAND
u'\u039a' # 0x51 -> GREEK CAPITAL LETTER KAPPA
u'\u039b' # 0x52 -> GREEK CAPITAL LETTER LAMDA
u'\u039c' # 0x53 -> GREEK CAPITAL LETTER MU
u'\u039d' # 0x54 -> GREEK CAPITAL LETTER NU
u'\u039e' # 0x55 -> GREEK CAPITAL LETTER XI
u'\u039f' # 0x56 -> GREEK CAPITAL LETTER OMICRON
u'\u03a0' # 0x57 -> GREEK CAPITAL LETTER PI
u'\u03a1' # 0x58 -> GREEK CAPITAL LETTER RHO
u'\u03a3' # 0x59 -> GREEK CAPITAL LETTER SIGMA
u']' # 0x5A -> RIGHT SQUARE BRACKET
u'$' # 0x5B -> DOLLAR SIGN
u'*' # 0x5C -> ASTERISK
u')' # 0x5D -> RIGHT PARENTHESIS
u';' # 0x5E -> SEMICOLON
u'^' # 0x5F -> CIRCUMFLEX ACCENT
u'-' # 0x60 -> HYPHEN-MINUS
u'/' # 0x61 -> SOLIDUS
u'\u03a4' # 0x62 -> GREEK CAPITAL LETTER TAU
u'\u03a5' # 0x63 -> GREEK CAPITAL LETTER UPSILON
u'\u03a6' # 0x64 -> GREEK CAPITAL LETTER PHI
u'\u03a7' # 0x65 -> GREEK CAPITAL LETTER CHI
u'\u03a8' # 0x66 -> GREEK CAPITAL LETTER PSI
u'\u03a9' # 0x67 -> GREEK CAPITAL LETTER OMEGA
u'\u03aa' # 0x68 -> GREEK CAPITAL LETTER IOTA WITH DIALYTIKA
u'\u03ab' # 0x69 -> GREEK CAPITAL LETTER UPSILON WITH DIALYTIKA
u'|' # 0x6A -> VERTICAL LINE
u',' # 0x6B -> COMMA
u'%' # 0x6C -> PERCENT SIGN
u'_' # 0x6D -> LOW LINE
u'>' # 0x6E -> GREATER-THAN SIGN
u'?' # 0x6F -> QUESTION MARK
u'\xa8' # 0x70 -> DIAERESIS
u'\u0386' # 0x71 -> GREEK CAPITAL LETTER ALPHA WITH TONOS
u'\u0388' # 0x72 -> GREEK CAPITAL LETTER EPSILON WITH TONOS
u'\u0389' # 0x73 -> GREEK CAPITAL LETTER ETA WITH TONOS
u'\xa0' # 0x74 -> NO-BREAK SPACE
u'\u038a' # 0x75 -> GREEK CAPITAL LETTER IOTA WITH TONOS
u'\u038c' # 0x76 -> GREEK CAPITAL LETTER OMICRON WITH TONOS
u'\u038e' # 0x77 -> GREEK CAPITAL LETTER UPSILON WITH TONOS
u'\u038f' # 0x78 -> GREEK CAPITAL LETTER OMEGA WITH TONOS
u'`' # 0x79 -> GRAVE ACCENT
u':' # 0x7A -> COLON
u'#' # 0x7B -> NUMBER SIGN
u'@' # 0x7C -> COMMERCIAL AT
u"'" # 0x7D -> APOSTROPHE
u'=' # 0x7E -> EQUALS SIGN
u'"' # 0x7F -> QUOTATION MARK
u'\u0385' # 0x80 -> GREEK DIALYTIKA TONOS
u'a' # 0x81 -> LATIN SMALL LETTER A
u'b' # 0x82 -> LATIN SMALL LETTER B
u'c' # 0x83 -> LATIN SMALL LETTER C
u'd' # 0x84 -> LATIN SMALL LETTER D
u'e' # 0x85 -> LATIN SMALL LETTER E
u'f' # 0x86 -> LATIN SMALL LETTER F
u'g' # 0x87 -> LATIN SMALL LETTER G
u'h' # 0x88 -> LATIN SMALL LETTER H
u'i' # 0x89 -> LATIN SMALL LETTER I
u'\u03b1' # 0x8A -> GREEK SMALL LETTER ALPHA
u'\u03b2' # 0x8B -> GREEK SMALL LETTER BETA
u'\u03b3' # 0x8C -> GREEK SMALL LETTER GAMMA
u'\u03b4' # 0x8D -> GREEK SMALL LETTER DELTA
u'\u03b5' # 0x8E -> GREEK SMALL LETTER EPSILON
u'\u03b6' # 0x8F -> GREEK SMALL LETTER ZETA
u'\xb0' # 0x90 -> DEGREE SIGN
u'j' # 0x91 -> LATIN SMALL LETTER J
u'k' # 0x92 -> LATIN SMALL LETTER K
u'l' # 0x93 -> LATIN SMALL LETTER L
u'm' # 0x94 -> LATIN SMALL LETTER M
u'n' # 0x95 -> LATIN SMALL LETTER N
u'o' # 0x96 -> LATIN SMALL LETTER O
u'p' # 0x97 -> LATIN SMALL LETTER P
u'q' # 0x98 -> LATIN SMALL LETTER Q
u'r' # 0x99 -> LATIN SMALL LETTER R
u'\u03b7' # 0x9A -> GREEK SMALL LETTER ETA
u'\u03b8' # 0x9B -> GREEK SMALL LETTER THETA
u'\u03b9' # 0x9C -> GREEK SMALL LETTER IOTA
u'\u03ba' # 0x9D -> GREEK SMALL LETTER KAPPA
u'\u03bb' # 0x9E -> GREEK SMALL LETTER LAMDA
u'\u03bc' # 0x9F -> GREEK SMALL LETTER MU
u'\xb4' # 0xA0 -> ACUTE ACCENT
u'~' # 0xA1 -> TILDE
u's' # 0xA2 -> LATIN SMALL LETTER S
u't' # 0xA3 -> LATIN SMALL LETTER T
u'u' # 0xA4 -> LATIN SMALL LETTER U
u'v' # 0xA5 -> LATIN SMALL LETTER V
u'w' # 0xA6 -> LATIN SMALL LETTER W
u'x' # 0xA7 -> LATIN SMALL LETTER X
u'y' # 0xA8 -> LATIN SMALL LETTER Y
u'z' # 0xA9 -> LATIN SMALL LETTER Z
u'\u03bd' # 0xAA -> GREEK SMALL LETTER NU
u'\u03be' # 0xAB -> GREEK SMALL LETTER XI
u'\u03bf' # 0xAC -> GREEK SMALL LETTER OMICRON
u'\u03c0' # 0xAD -> GREEK SMALL LETTER PI
u'\u03c1' # 0xAE -> GREEK SMALL LETTER RHO
u'\u03c3' # 0xAF -> GREEK SMALL LETTER SIGMA
u'\xa3' # 0xB0 -> POUND SIGN
u'\u03ac' # 0xB1 -> GREEK SMALL LETTER ALPHA WITH TONOS
u'\u03ad' # 0xB2 -> GREEK SMALL LETTER EPSILON WITH TONOS
u'\u03ae' # 0xB3 -> GREEK SMALL LETTER ETA WITH TONOS
u'\u03ca' # 0xB4 -> GREEK SMALL LETTER IOTA WITH DIALYTIKA
u'\u03af' # 0xB5 -> GREEK SMALL LETTER IOTA WITH TONOS
u'\u03cc' # 0xB6 -> GREEK SMALL LETTER OMICRON WITH TONOS
u'\u03cd' # 0xB7 -> GREEK SMALL LETTER UPSILON WITH TONOS
u'\u03cb' # 0xB8 -> GREEK SMALL LETTER UPSILON WITH DIALYTIKA
u'\u03ce' # 0xB9 -> GREEK SMALL LETTER OMEGA WITH TONOS
u'\u03c2' # 0xBA -> GREEK SMALL LETTER FINAL SIGMA
u'\u03c4' # 0xBB -> GREEK SMALL LETTER TAU
u'\u03c5' # 0xBC -> GREEK SMALL LETTER UPSILON
u'\u03c6' # 0xBD -> GREEK SMALL LETTER PHI
u'\u03c7' # 0xBE -> GREEK SMALL LETTER CHI
u'\u03c8' # 0xBF -> GREEK SMALL LETTER PSI
u'{' # 0xC0 -> LEFT CURLY BRACKET
u'A' # 0xC1 -> LATIN CAPITAL LETTER A
u'B' # 0xC2 -> LATIN CAPITAL LETTER B
u'C' # 0xC3 -> LATIN CAPITAL LETTER C
u'D' # 0xC4 -> LATIN CAPITAL LETTER D
u'E' # 0xC5 -> LATIN CAPITAL LETTER E
u'F' # 0xC6 -> LATIN CAPITAL LETTER F
u'G' # 0xC7 -> LATIN CAPITAL LETTER G
u'H' # 0xC8 -> LATIN CAPITAL LETTER H
u'I' # 0xC9 -> LATIN CAPITAL LETTER I
u'\xad' # 0xCA -> SOFT HYPHEN
u'\u03c9' # 0xCB -> GREEK SMALL LETTER OMEGA
u'\u0390' # 0xCC -> GREEK SMALL LETTER IOTA WITH DIALYTIKA AND TONOS
u'\u03b0' # 0xCD -> GREEK SMALL LETTER UPSILON WITH DIALYTIKA AND TONOS
u'\u2018' # 0xCE -> LEFT SINGLE QUOTATION MARK
u'\u2015' # 0xCF -> HORIZONTAL BAR
u'}' # 0xD0 -> RIGHT CURLY BRACKET
u'J' # 0xD1 -> LATIN CAPITAL LETTER J
u'K' # 0xD2 -> LATIN CAPITAL LETTER K
u'L' # 0xD3 -> LATIN CAPITAL LETTER L
u'M' # 0xD4 -> LATIN CAPITAL LETTER M
u'N' # 0xD5 -> LATIN CAPITAL LETTER N
u'O' # 0xD6 -> LATIN CAPITAL LETTER O
u'P' # 0xD7 -> LATIN CAPITAL LETTER P
u'Q' # 0xD8 -> LATIN CAPITAL LETTER Q
u'R' # 0xD9 -> LATIN CAPITAL LETTER R
u'\xb1' # 0xDA -> PLUS-MINUS SIGN
u'\xbd' # 0xDB -> VULGAR FRACTION ONE HALF
u'\x1a' # 0xDC -> SUBSTITUTE
u'\u0387' # 0xDD -> GREEK ANO TELEIA
u'\u2019' # 0xDE -> RIGHT SINGLE QUOTATION MARK
u'\xa6' # 0xDF -> BROKEN BAR
u'\\' # 0xE0 -> REVERSE SOLIDUS
u'\x1a' # 0xE1 -> SUBSTITUTE
u'S' # 0xE2 -> LATIN CAPITAL LETTER S
u'T' # 0xE3 -> LATIN CAPITAL LETTER T
u'U' # 0xE4 -> LATIN CAPITAL LETTER U
u'V' # 0xE5 -> LATIN CAPITAL LETTER V
u'W' # 0xE6 -> LATIN CAPITAL LETTER W
u'X' # 0xE7 -> LATIN CAPITAL LETTER X
u'Y' # 0xE8 -> LATIN CAPITAL LETTER Y
u'Z' # 0xE9 -> LATIN CAPITAL LETTER Z
u'\xb2' # 0xEA -> SUPERSCRIPT TWO
u'\xa7' # 0xEB -> SECTION SIGN
u'\x1a' # 0xEC -> SUBSTITUTE
u'\x1a' # 0xED -> SUBSTITUTE
u'\xab' # 0xEE -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\xac' # 0xEF -> NOT SIGN
u'0' # 0xF0 -> DIGIT ZERO
u'1' # 0xF1 -> DIGIT ONE
u'2' # 0xF2 -> DIGIT TWO
u'3' # 0xF3 -> DIGIT THREE
u'4' # 0xF4 -> DIGIT FOUR
u'5' # 0xF5 -> DIGIT FIVE
u'6' # 0xF6 -> DIGIT SIX
u'7' # 0xF7 -> DIGIT SEVEN
u'8' # 0xF8 -> DIGIT EIGHT
u'9' # 0xF9 -> DIGIT NINE
u'\xb3' # 0xFA -> SUPERSCRIPT THREE
u'\xa9' # 0xFB -> COPYRIGHT SIGN
u'\x1a' # 0xFC -> SUBSTITUTE
u'\x1a' # 0xFD -> SUBSTITUTE
u'\xbb' # 0xFE -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\x9f' # 0xFF -> CONTROL
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
| mit | -6,321,853,707,885,503,000 | 41.703583 | 116 | 0.535698 | false |
imsplitbit/nova | nova/exception.py | 1 | 42268 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Nova base exception handling.
Includes decorator for re-raising Nova-type exceptions.
SHOULD include dedicated exception logging.
"""
import functools
import sys
from oslo.config import cfg
import webob.exc
from nova.openstack.common import excutils
from nova.openstack.common.gettextutils import _
from nova.openstack.common import log as logging
from nova import safe_utils
LOG = logging.getLogger(__name__)
exc_log_opts = [
cfg.BoolOpt('fatal_exception_format_errors',
default=False,
help='make exception message format errors fatal'),
]
CONF = cfg.CONF
CONF.register_opts(exc_log_opts)
class ConvertedException(webob.exc.WSGIHTTPException):
def __init__(self, code=0, title="", explanation=""):
self.code = code
self.title = title
self.explanation = explanation
super(ConvertedException, self).__init__()
def _cleanse_dict(original):
"""Strip all admin_password, new_pass, rescue_pass keys from a dict."""
return dict((k, v) for k, v in original.iteritems() if not "_pass" in k)
def wrap_exception(notifier=None, get_notifier=None):
"""This decorator wraps a method to catch any exceptions that may
get thrown. It logs the exception as well as optionally sending
it to the notification system.
"""
def inner(f):
def wrapped(self, context, *args, **kw):
# Don't store self or context in the payload, it now seems to
# contain confidential information.
try:
return f(self, context, *args, **kw)
except Exception as e:
with excutils.save_and_reraise_exception():
if notifier or get_notifier:
payload = dict(exception=e)
call_dict = safe_utils.getcallargs(f, context,
*args, **kw)
cleansed = _cleanse_dict(call_dict)
payload.update({'args': cleansed})
# If f has multiple decorators, they must use
# functools.wraps to ensure the name is
# propagated.
event_type = f.__name__
(notifier or get_notifier()).error(context,
event_type,
payload)
return functools.wraps(f)(wrapped)
return inner
class NovaException(Exception):
"""Base Nova Exception
To correctly use this class, inherit from it and define
a 'msg_fmt' property. That msg_fmt will get printf'd
with the keyword arguments provided to the constructor.
"""
msg_fmt = _("An unknown exception occurred.")
code = 500
headers = {}
safe = False
def __init__(self, message=None, **kwargs):
self.kwargs = kwargs
if 'code' not in self.kwargs:
try:
self.kwargs['code'] = self.code
except AttributeError:
pass
if not message:
try:
message = self.msg_fmt % kwargs
except Exception:
exc_info = sys.exc_info()
# kwargs doesn't match a variable in the message
# log the issue and the kwargs
LOG.exception(_('Exception in string format operation'))
for name, value in kwargs.iteritems():
LOG.error("%s: %s" % (name, value))
if CONF.fatal_exception_format_errors:
raise exc_info[0], exc_info[1], exc_info[2]
else:
# at least get the core message out if something happened
message = self.msg_fmt
super(NovaException, self).__init__(message)
def format_message(self):
# NOTE(mrodden): use the first argument to the python Exception object
# which should be our full NovaException message, (see __init__)
return self.args[0]
class EncryptionFailure(NovaException):
msg_fmt = _("Failed to encrypt text: %(reason)s")
class DecryptionFailure(NovaException):
msg_fmt = _("Failed to decrypt text: %(reason)s")
class VirtualInterfaceCreateException(NovaException):
msg_fmt = _("Virtual Interface creation failed")
class VirtualInterfaceMacAddressException(NovaException):
msg_fmt = _("5 attempts to create virtual interface"
"with unique mac address failed")
class GlanceConnectionFailed(NovaException):
msg_fmt = _("Connection to glance host %(host)s:%(port)s failed: "
"%(reason)s")
class NotAuthorized(NovaException):
ec2_code = 'AuthFailure'
msg_fmt = _("Not authorized.")
code = 403
class AdminRequired(NotAuthorized):
msg_fmt = _("User does not have admin privileges")
class PolicyNotAuthorized(NotAuthorized):
msg_fmt = _("Policy doesn't allow %(action)s to be performed.")
class ImageNotActive(NovaException):
# NOTE(jruzicka): IncorrectState is used for volumes only in EC2,
# but it still seems like the most appropriate option.
ec2_code = 'IncorrectState'
msg_fmt = _("Image %(image_id)s is not active.")
class ImageNotAuthorized(NovaException):
msg_fmt = _("Not authorized for image %(image_id)s.")
class Invalid(NovaException):
msg_fmt = _("Unacceptable parameters.")
code = 400
class InvalidBDM(Invalid):
msg_fmt = _("Block Device Mapping is Invalid.")
class InvalidBDMSnapshot(InvalidBDM):
msg_fmt = _("Block Device Mapping is Invalid: "
"failed to get snapshot %(id)s.")
class InvalidBDMVolume(InvalidBDM):
msg_fmt = _("Block Device Mapping is Invalid: "
"failed to get volume %(id)s.")
class InvalidBDMImage(InvalidBDM):
msg_fmt = _("Block Device Mapping is Invalid: "
"failed to get image %(id)s.")
class InvalidBDMBootSequence(InvalidBDM):
msg_fmt = _("Block Device Mapping is Invalid: "
"Boot sequence for the instance "
"and image/block device mapping "
"combination is not valid.")
class InvalidBDMLocalsLimit(InvalidBDM):
msg_fmt = _("Block Device Mapping is Invalid: "
"You specified more local devices than the "
"limit allows")
class InvalidBDMEphemeralSize(InvalidBDM):
msg_fmt = _("Ephemeral disks requested are larger than "
"the instance type allows.")
class InvalidBDMSwapSize(InvalidBDM):
msg_fmt = _("Swap drive requested is larger than instance type allows.")
class InvalidBDMFormat(InvalidBDM):
msg_fmt = _("Block Device Mapping is Invalid: "
"%(details)s")
class InvalidBDMForLegacy(InvalidBDM):
msg_fmt = _("Block Device Mapping cannot "
"be converted to legacy format. ")
class InvalidAttribute(Invalid):
msg_fmt = _("Attribute not supported: %(attr)s")
class VolumeUnattached(Invalid):
ec2_code = 'IncorrectState'
msg_fmt = _("Volume %(volume_id)s is not attached to anything")
class VolumeNotCreated(NovaException):
msg_fmt = _("Volume %(volume_id)s did not finish being created"
" even after we waited %(seconds)s seconds or %(attempts)s"
" attempts.")
class InvalidKeypair(Invalid):
ec2_code = 'InvalidKeyPair.Format'
msg_fmt = _("Keypair data is invalid") + ": %(reason)s"
class InvalidRequest(Invalid):
msg_fmt = _("The request is invalid.")
class InvalidInput(Invalid):
msg_fmt = _("Invalid input received") + ": %(reason)s"
class InvalidVolume(Invalid):
ec2_code = 'UnsupportedOperation'
msg_fmt = _("Invalid volume") + ": %(reason)s"
class InvalidVolumeAccessMode(Invalid):
msg_fmt = _("Invalid volume access mode") + ": %(access_mode)s"
class InvalidMetadata(Invalid):
msg_fmt = _("Invalid metadata") + ": %(reason)s"
class InvalidMetadataSize(Invalid):
msg_fmt = _("Invalid metadata size") + ": %(reason)s"
class InvalidPortRange(Invalid):
ec2_code = 'InvalidParameterValue'
msg_fmt = _("Invalid port range %(from_port)s:%(to_port)s. %(msg)s")
class InvalidIpProtocol(Invalid):
msg_fmt = _("Invalid IP protocol %(protocol)s.")
class InvalidContentType(Invalid):
msg_fmt = _("Invalid content type %(content_type)s.")
class InvalidCidr(Invalid):
msg_fmt = _("Invalid cidr %(cidr)s.")
class InvalidUnicodeParameter(Invalid):
msg_fmt = _("Invalid Parameter: "
"Unicode is not supported by the current database.")
# Cannot be templated as the error syntax varies.
# msg needs to be constructed when raised.
class InvalidParameterValue(Invalid):
ec2_code = 'InvalidParameterValue'
msg_fmt = _("%(err)s")
class InvalidAggregateAction(Invalid):
msg_fmt = _("Cannot perform action '%(action)s' on aggregate "
"%(aggregate_id)s. Reason: %(reason)s.")
class InvalidGroup(Invalid):
msg_fmt = _("Group not valid. Reason: %(reason)s")
class InvalidSortKey(Invalid):
msg_fmt = _("Sort key supplied was not valid.")
class InstanceInvalidState(Invalid):
msg_fmt = _("Instance %(instance_uuid)s in %(attr)s %(state)s. Cannot "
"%(method)s while the instance is in this state.")
class InstanceNotRunning(Invalid):
msg_fmt = _("Instance %(instance_id)s is not running.")
class InstanceNotInRescueMode(Invalid):
msg_fmt = _("Instance %(instance_id)s is not in rescue mode")
class InstanceNotRescuable(Invalid):
msg_fmt = _("Instance %(instance_id)s cannot be rescued: %(reason)s")
class InstanceNotReady(Invalid):
msg_fmt = _("Instance %(instance_id)s is not ready")
class InstanceSuspendFailure(Invalid):
msg_fmt = _("Failed to suspend instance") + ": %(reason)s"
class InstanceResumeFailure(Invalid):
msg_fmt = _("Failed to resume instance: %(reason)s.")
class InstancePowerOnFailure(Invalid):
msg_fmt = _("Failed to power on instance: %(reason)s.")
class InstancePowerOffFailure(Invalid):
msg_fmt = _("Failed to power off instance: %(reason)s.")
class InstanceRebootFailure(Invalid):
msg_fmt = _("Failed to reboot instance") + ": %(reason)s"
class InstanceTerminationFailure(Invalid):
msg_fmt = _("Failed to terminate instance") + ": %(reason)s"
class InstanceDeployFailure(Invalid):
msg_fmt = _("Failed to deploy instance") + ": %(reason)s"
class MultiplePortsNotApplicable(Invalid):
msg_fmt = _("Failed to launch instances") + ": %(reason)s"
class ServiceUnavailable(Invalid):
msg_fmt = _("Service is unavailable at this time.")
class ComputeResourcesUnavailable(ServiceUnavailable):
msg_fmt = _("Insufficient compute resources.")
class HypervisorUnavailable(NovaException):
msg_fmt = _("Connection to the hypervisor is broken on host: %(host)s")
class ComputeServiceUnavailable(ServiceUnavailable):
msg_fmt = _("Compute service of %(host)s is unavailable at this time.")
class ComputeServiceInUse(NovaException):
msg_fmt = _("Compute service of %(host)s is still in use.")
class UnableToMigrateToSelf(Invalid):
msg_fmt = _("Unable to migrate instance (%(instance_id)s) "
"to current host (%(host)s).")
class InvalidHypervisorType(Invalid):
msg_fmt = _("The supplied hypervisor type of is invalid.")
class DestinationHypervisorTooOld(Invalid):
msg_fmt = _("The instance requires a newer hypervisor version than "
"has been provided.")
class DestinationDiskExists(Invalid):
msg_fmt = _("The supplied disk path (%(path)s) already exists, "
"it is expected not to exist.")
class InvalidDevicePath(Invalid):
msg_fmt = _("The supplied device path (%(path)s) is invalid.")
class DevicePathInUse(Invalid):
msg_fmt = _("The supplied device path (%(path)s) is in use.")
code = 409
class DeviceIsBusy(Invalid):
msg_fmt = _("The supplied device (%(device)s) is busy.")
class InvalidCPUInfo(Invalid):
msg_fmt = _("Unacceptable CPU info") + ": %(reason)s"
class InvalidIpAddressError(Invalid):
msg_fmt = _("%(address)s is not a valid IP v4/6 address.")
class InvalidVLANTag(Invalid):
msg_fmt = _("VLAN tag is not appropriate for the port group "
"%(bridge)s. Expected VLAN tag is %(tag)s, "
"but the one associated with the port group is %(pgroup)s.")
class InvalidVLANPortGroup(Invalid):
msg_fmt = _("vSwitch which contains the port group %(bridge)s is "
"not associated with the desired physical adapter. "
"Expected vSwitch is %(expected)s, but the one associated "
"is %(actual)s.")
class InvalidDiskFormat(Invalid):
msg_fmt = _("Disk format %(disk_format)s is not acceptable")
class ImageUnacceptable(Invalid):
msg_fmt = _("Image %(image_id)s is unacceptable: %(reason)s")
class InstanceUnacceptable(Invalid):
msg_fmt = _("Instance %(instance_id)s is unacceptable: %(reason)s")
class InvalidEc2Id(Invalid):
msg_fmt = _("Ec2 id %(ec2_id)s is unacceptable.")
class InvalidUUID(Invalid):
msg_fmt = _("Expected a uuid but received %(uuid)s.")
class InvalidID(Invalid):
msg_fmt = _("Invalid ID received %(id)s.")
class ConstraintNotMet(NovaException):
msg_fmt = _("Constraint not met.")
code = 412
class NotFound(NovaException):
msg_fmt = _("Resource could not be found.")
code = 404
class AgentBuildNotFound(NotFound):
msg_fmt = _("No agent-build associated with id %(id)s.")
class AgentBuildExists(NovaException):
msg_fmt = _("Agent-build with hypervisor %(hypervisor)s os %(os)s "
"architecture %(architecture)s exists.")
class VolumeNotFound(NotFound):
ec2_code = 'InvalidVolumeID.NotFound'
msg_fmt = _("Volume %(volume_id)s could not be found.")
class SnapshotNotFound(NotFound):
ec2_code = 'InvalidSnapshotID.NotFound'
msg_fmt = _("Snapshot %(snapshot_id)s could not be found.")
class DiskNotFound(NotFound):
msg_fmt = _("No disk at %(location)s")
class VolumeDriverNotFound(NotFound):
msg_fmt = _("Could not find a handler for %(driver_type)s volume.")
class InvalidImageRef(Invalid):
msg_fmt = _("Invalid image href %(image_href)s.")
class AutoDiskConfigDisabledByImage(Invalid):
msg_fmt = _("Requested image %(image)s "
"has automatic disk resize disabled.")
class ImageNotFound(NotFound):
msg_fmt = _("Image %(image_id)s could not be found.")
# NOTE(jruzicka): ImageNotFound is not a valid EC2 error code.
class ImageNotFoundEC2(ImageNotFound):
msg_fmt = _("Image %(image_id)s could not be found. The nova EC2 API "
"assigns image ids dynamically when they are listed for the "
"first time. Have you listed image ids since adding this "
"image?")
class ProjectNotFound(NotFound):
msg_fmt = _("Project %(project_id)s could not be found.")
class StorageRepositoryNotFound(NotFound):
msg_fmt = _("Cannot find SR to read/write VDI.")
class NetworkDuplicated(Invalid):
msg_fmt = _("Network %(network_id)s is duplicated.")
class NetworkInUse(NovaException):
msg_fmt = _("Network %(network_id)s is still in use.")
class NetworkNotCreated(NovaException):
msg_fmt = _("%(req)s is required to create a network.")
class NetworkNotFound(NotFound):
msg_fmt = _("Network %(network_id)s could not be found.")
class PortNotFound(NotFound):
msg_fmt = _("Port id %(port_id)s could not be found.")
class NetworkNotFoundForBridge(NetworkNotFound):
msg_fmt = _("Network could not be found for bridge %(bridge)s")
class NetworkNotFoundForUUID(NetworkNotFound):
msg_fmt = _("Network could not be found for uuid %(uuid)s")
class NetworkNotFoundForCidr(NetworkNotFound):
msg_fmt = _("Network could not be found with cidr %(cidr)s.")
class NetworkNotFoundForInstance(NetworkNotFound):
msg_fmt = _("Network could not be found for instance %(instance_id)s.")
class NoNetworksFound(NotFound):
msg_fmt = _("No networks defined.")
class NoMoreNetworks(NovaException):
msg_fmt = _("No more available networks.")
class NetworkNotFoundForProject(NotFound):
msg_fmt = _("Either Network uuid %(network_uuid)s is not present or "
"is not assigned to the project %(project_id)s.")
class NetworkAmbiguous(Invalid):
msg_fmt = _("More than one possible network found. Specify "
"network ID(s) to select which one(s) to connect to,")
class DatastoreNotFound(NotFound):
msg_fmt = _("Could not find the datastore reference(s) which the VM uses.")
class PortInUse(Invalid):
msg_fmt = _("Port %(port_id)s is still in use.")
class PortNotUsable(Invalid):
msg_fmt = _("Port %(port_id)s not usable for instance %(instance)s.")
class PortNotFree(Invalid):
msg_fmt = _("No free port available for instance %(instance)s.")
class FixedIpExists(NovaException):
msg_fmt = _("Fixed ip %(address)s already exists.")
class FixedIpNotFound(NotFound):
msg_fmt = _("No fixed IP associated with id %(id)s.")
class FixedIpNotFoundForAddress(FixedIpNotFound):
msg_fmt = _("Fixed ip not found for address %(address)s.")
class FixedIpNotFoundForInstance(FixedIpNotFound):
msg_fmt = _("Instance %(instance_uuid)s has zero fixed ips.")
class FixedIpNotFoundForNetworkHost(FixedIpNotFound):
msg_fmt = _("Network host %(host)s has zero fixed ips "
"in network %(network_id)s.")
class FixedIpNotFoundForSpecificInstance(FixedIpNotFound):
msg_fmt = _("Instance %(instance_uuid)s doesn't have fixed ip '%(ip)s'.")
class FixedIpNotFoundForNetwork(FixedIpNotFound):
msg_fmt = _("Fixed IP address (%(address)s) does not exist in "
"network (%(network_uuid)s).")
class FixedIpAlreadyInUse(NovaException):
msg_fmt = _("Fixed IP address %(address)s is already in use on instance "
"%(instance_uuid)s.")
class FixedIpAssociatedWithMultipleInstances(NovaException):
msg_fmt = _("More than one instance is associated with fixed ip address "
"'%(address)s'.")
class FixedIpInvalid(Invalid):
msg_fmt = _("Fixed IP address %(address)s is invalid.")
class NoMoreFixedIps(NovaException):
ec2_code = 'UnsupportedOperation'
msg_fmt = _("Zero fixed ips available.")
class NoFixedIpsDefined(NotFound):
msg_fmt = _("Zero fixed ips could be found.")
class FloatingIpExists(NovaException):
msg_fmt = _("Floating ip %(address)s already exists.")
class FloatingIpNotFound(NotFound):
ec2_code = "UnsupportedOperation"
msg_fmt = _("Floating ip not found for id %(id)s.")
class FloatingIpDNSExists(Invalid):
msg_fmt = _("The DNS entry %(name)s already exists in domain %(domain)s.")
class FloatingIpNotFoundForAddress(FloatingIpNotFound):
msg_fmt = _("Floating ip not found for address %(address)s.")
class FloatingIpNotFoundForHost(FloatingIpNotFound):
msg_fmt = _("Floating ip not found for host %(host)s.")
class FloatingIpMultipleFoundForAddress(NovaException):
msg_fmt = _("Multiple floating ips are found for address %(address)s.")
class FloatingIpPoolNotFound(NotFound):
msg_fmt = _("Floating ip pool not found.")
safe = True
class NoMoreFloatingIps(FloatingIpNotFound):
msg_fmt = _("Zero floating ips available.")
safe = True
class FloatingIpAssociated(NovaException):
ec2_code = "UnsupportedOperation"
msg_fmt = _("Floating ip %(address)s is associated.")
class FloatingIpNotAssociated(NovaException):
msg_fmt = _("Floating ip %(address)s is not associated.")
class NoFloatingIpsDefined(NotFound):
msg_fmt = _("Zero floating ips exist.")
class NoFloatingIpInterface(NotFound):
ec2_code = "UnsupportedOperation"
msg_fmt = _("Interface %(interface)s not found.")
class CannotDisassociateAutoAssignedFloatingIP(NovaException):
ec2_code = "UnsupportedOperation"
msg_fmt = _("Cannot disassociate auto assigned floating ip")
class KeypairNotFound(NotFound):
ec2_code = 'InvalidKeyPair.NotFound'
msg_fmt = _("Keypair %(name)s not found for user %(user_id)s")
class ServiceNotFound(NotFound):
msg_fmt = _("Service %(service_id)s could not be found.")
class ServiceBinaryExists(NovaException):
msg_fmt = _("Service with host %(host)s binary %(binary)s exists.")
class ServiceTopicExists(NovaException):
msg_fmt = _("Service with host %(host)s topic %(topic)s exists.")
class HostNotFound(NotFound):
msg_fmt = _("Host %(host)s could not be found.")
class ComputeHostNotFound(HostNotFound):
msg_fmt = _("Compute host %(host)s could not be found.")
class HostBinaryNotFound(NotFound):
msg_fmt = _("Could not find binary %(binary)s on host %(host)s.")
class InvalidReservationExpiration(Invalid):
msg_fmt = _("Invalid reservation expiration %(expire)s.")
class InvalidQuotaValue(Invalid):
msg_fmt = _("Change would make usage less than 0 for the following "
"resources: %(unders)s")
class QuotaNotFound(NotFound):
msg_fmt = _("Quota could not be found")
class QuotaExists(NovaException):
msg_fmt = _("Quota exists for project %(project_id)s, "
"resource %(resource)s")
class QuotaResourceUnknown(QuotaNotFound):
msg_fmt = _("Unknown quota resources %(unknown)s.")
class ProjectUserQuotaNotFound(QuotaNotFound):
msg_fmt = _("Quota for user %(user_id)s in project %(project_id)s "
"could not be found.")
class ProjectQuotaNotFound(QuotaNotFound):
msg_fmt = _("Quota for project %(project_id)s could not be found.")
class QuotaClassNotFound(QuotaNotFound):
msg_fmt = _("Quota class %(class_name)s could not be found.")
class QuotaUsageNotFound(QuotaNotFound):
msg_fmt = _("Quota usage for project %(project_id)s could not be found.")
class ReservationNotFound(QuotaNotFound):
msg_fmt = _("Quota reservation %(uuid)s could not be found.")
class OverQuota(NovaException):
msg_fmt = _("Quota exceeded for resources: %(overs)s")
class SecurityGroupNotFound(NotFound):
msg_fmt = _("Security group %(security_group_id)s not found.")
class SecurityGroupNotFoundForProject(SecurityGroupNotFound):
msg_fmt = _("Security group %(security_group_id)s not found "
"for project %(project_id)s.")
class SecurityGroupNotFoundForRule(SecurityGroupNotFound):
msg_fmt = _("Security group with rule %(rule_id)s not found.")
class SecurityGroupExists(Invalid):
ec2_code = 'InvalidGroup.Duplicate'
msg_fmt = _("Security group %(security_group_name)s already exists "
"for project %(project_id)s.")
class SecurityGroupExistsForInstance(Invalid):
msg_fmt = _("Security group %(security_group_id)s is already associated"
" with the instance %(instance_id)s")
class SecurityGroupNotExistsForInstance(Invalid):
msg_fmt = _("Security group %(security_group_id)s is not associated with"
" the instance %(instance_id)s")
class SecurityGroupDefaultRuleNotFound(Invalid):
msg_fmt = _("Security group default rule (%rule_id)s not found.")
class SecurityGroupCannotBeApplied(Invalid):
msg_fmt = _("Network requires port_security_enabled and subnet associated"
" in order to apply security groups.")
class SecurityGroupRuleExists(Invalid):
ec2_code = 'InvalidPermission.Duplicate'
msg_fmt = _("Rule already exists in group: %(rule)s")
class NoUniqueMatch(NovaException):
msg_fmt = _("No Unique Match Found.")
code = 409
class MigrationNotFound(NotFound):
msg_fmt = _("Migration %(migration_id)s could not be found.")
class MigrationNotFoundByStatus(MigrationNotFound):
msg_fmt = _("Migration not found for instance %(instance_id)s "
"with status %(status)s.")
class ConsolePoolNotFound(NotFound):
msg_fmt = _("Console pool %(pool_id)s could not be found.")
class ConsolePoolExists(NovaException):
msg_fmt = _("Console pool with host %(host)s, console_type "
"%(console_type)s and compute_host %(compute_host)s "
"already exists.")
class ConsolePoolNotFoundForHostType(NotFound):
msg_fmt = _("Console pool of type %(console_type)s "
"for compute host %(compute_host)s "
"on proxy host %(host)s not found.")
class ConsoleNotFound(NotFound):
msg_fmt = _("Console %(console_id)s could not be found.")
class ConsoleNotFoundForInstance(ConsoleNotFound):
msg_fmt = _("Console for instance %(instance_uuid)s could not be found.")
class ConsoleNotFoundInPoolForInstance(ConsoleNotFound):
msg_fmt = _("Console for instance %(instance_uuid)s "
"in pool %(pool_id)s could not be found.")
class ConsoleTypeInvalid(Invalid):
msg_fmt = _("Invalid console type %(console_type)s")
class ConsoleTypeUnavailable(Invalid):
msg_fmt = _("Unavailable console type %(console_type)s.")
class InstanceTypeNotFound(NotFound):
msg_fmt = _("Instance type %(instance_type_id)s could not be found.")
class InstanceTypeNotFoundByName(InstanceTypeNotFound):
msg_fmt = _("Instance type with name %(instance_type_name)s "
"could not be found.")
class FlavorNotFound(NotFound):
msg_fmt = _("Flavor %(flavor_id)s could not be found.")
class FlavorAccessNotFound(NotFound):
msg_fmt = _("Flavor access not found for %(flavor_id)s / "
"%(project_id)s combination.")
class CellNotFound(NotFound):
msg_fmt = _("Cell %(cell_name)s doesn't exist.")
class CellExists(NovaException):
msg_fmt = _("Cell with name %(name)s already exists.")
class CellRoutingInconsistency(NovaException):
msg_fmt = _("Inconsistency in cell routing: %(reason)s")
class CellServiceAPIMethodNotFound(NotFound):
msg_fmt = _("Service API method not found: %(detail)s")
class CellTimeout(NotFound):
msg_fmt = _("Timeout waiting for response from cell")
class CellMaxHopCountReached(NovaException):
msg_fmt = _("Cell message has reached maximum hop count: %(hop_count)s")
class NoCellsAvailable(NovaException):
msg_fmt = _("No cells available matching scheduling criteria.")
class CellsUpdateUnsupported(NovaException):
msg_fmt = _("Cannot update cells configuration file.")
class InstanceUnknownCell(NotFound):
msg_fmt = _("Cell is not known for instance %(instance_uuid)s")
class SchedulerHostFilterNotFound(NotFound):
msg_fmt = _("Scheduler Host Filter %(filter_name)s could not be found.")
class InstanceTypeExtraSpecsNotFound(NotFound):
msg_fmt = _("Instance Type %(instance_type_id)s has no extra specs with "
"key %(extra_specs_key)s.")
class FileNotFound(NotFound):
msg_fmt = _("File %(file_path)s could not be found.")
class NoFilesFound(NotFound):
msg_fmt = _("Zero files could be found.")
class SwitchNotFoundForNetworkAdapter(NotFound):
msg_fmt = _("Virtual switch associated with the "
"network adapter %(adapter)s not found.")
class NetworkAdapterNotFound(NotFound):
msg_fmt = _("Network adapter %(adapter)s could not be found.")
class ClassNotFound(NotFound):
msg_fmt = _("Class %(class_name)s could not be found: %(exception)s")
class NotAllowed(NovaException):
msg_fmt = _("Action not allowed.")
class ImageRotationNotAllowed(NovaException):
msg_fmt = _("Rotation is not allowed for snapshots")
class RotationRequiredForBackup(NovaException):
msg_fmt = _("Rotation param is required for backup image_type")
class KeyPairExists(NovaException):
ec2_code = 'InvalidKeyPair.Duplicate'
msg_fmt = _("Key pair '%(key_name)s' already exists.")
class InstanceExists(NovaException):
msg_fmt = _("Instance %(name)s already exists.")
class InstanceTypeExists(NovaException):
msg_fmt = _("Instance Type with name %(name)s already exists.")
class InstanceTypeIdExists(NovaException):
msg_fmt = _("Instance Type with ID %(flavor_id)s already exists.")
class FlavorAccessExists(NovaException):
msg_fmt = _("Flavor access already exists for flavor %(flavor_id)s "
"and project %(project_id)s combination.")
class InvalidSharedStorage(NovaException):
msg_fmt = _("%(path)s is not on shared storage: %(reason)s")
class InvalidLocalStorage(NovaException):
msg_fmt = _("%(path)s is not on local storage: %(reason)s")
class MigrationError(NovaException):
msg_fmt = _("Migration error") + ": %(reason)s"
class MigrationPreCheckError(MigrationError):
msg_fmt = _("Migration pre-check error") + ": %(reason)s"
class MalformedRequestBody(NovaException):
msg_fmt = _("Malformed message body: %(reason)s")
# NOTE(johannes): NotFound should only be used when a 404 error is
# appropriate to be returned
class ConfigNotFound(NovaException):
msg_fmt = _("Could not find config at %(path)s")
class PasteAppNotFound(NovaException):
msg_fmt = _("Could not load paste app '%(name)s' from %(path)s")
class CannotResizeToSameFlavor(NovaException):
msg_fmt = _("When resizing, instances must change flavor!")
class ResizeError(NovaException):
msg_fmt = _("Resize error: %(reason)s")
class CannotResizeDisk(NovaException):
msg_fmt = _("Server disk was unable to be resized because: %(reason)s")
class InstanceTypeMemoryTooSmall(NovaException):
msg_fmt = _("Instance type's memory is too small for requested image.")
class InstanceTypeDiskTooSmall(NovaException):
msg_fmt = _("Instance type's disk is too small for requested image.")
class InsufficientFreeMemory(NovaException):
msg_fmt = _("Insufficient free memory on compute node to start %(uuid)s.")
class NoValidHost(NovaException):
msg_fmt = _("No valid host was found. %(reason)s")
class QuotaError(NovaException):
ec2_code = 'ResourceLimitExceeded'
msg_fmt = _("Quota exceeded") + ": code=%(code)s"
code = 413
headers = {'Retry-After': 0}
safe = True
class TooManyInstances(QuotaError):
msg_fmt = _("Quota exceeded for %(overs)s: Requested %(req)s,"
" but already used %(used)d of %(allowed)d %(resource)s")
class FloatingIpLimitExceeded(QuotaError):
msg_fmt = _("Maximum number of floating ips exceeded")
class FixedIpLimitExceeded(QuotaError):
msg_fmt = _("Maximum number of fixed ips exceeded")
class MetadataLimitExceeded(QuotaError):
msg_fmt = _("Maximum number of metadata items exceeds %(allowed)d")
class OnsetFileLimitExceeded(QuotaError):
msg_fmt = _("Personality file limit exceeded")
class OnsetFilePathLimitExceeded(QuotaError):
msg_fmt = _("Personality file path too long")
class OnsetFileContentLimitExceeded(QuotaError):
msg_fmt = _("Personality file content too long")
class KeypairLimitExceeded(QuotaError):
msg_fmt = _("Maximum number of key pairs exceeded")
class SecurityGroupLimitExceeded(QuotaError):
ec2_code = 'SecurityGroupLimitExceeded'
msg_fmt = _("Maximum number of security groups or rules exceeded")
class PortLimitExceeded(QuotaError):
msg_fmt = _("Maximum number of ports exceeded")
class AggregateError(NovaException):
msg_fmt = _("Aggregate %(aggregate_id)s: action '%(action)s' "
"caused an error: %(reason)s.")
class AggregateNotFound(NotFound):
msg_fmt = _("Aggregate %(aggregate_id)s could not be found.")
class AggregateNameExists(NovaException):
msg_fmt = _("Aggregate %(aggregate_name)s already exists.")
class AggregateHostNotFound(NotFound):
msg_fmt = _("Aggregate %(aggregate_id)s has no host %(host)s.")
class AggregateMetadataNotFound(NotFound):
msg_fmt = _("Aggregate %(aggregate_id)s has no metadata with "
"key %(metadata_key)s.")
class AggregateHostExists(NovaException):
msg_fmt = _("Aggregate %(aggregate_id)s already has host %(host)s.")
class InstanceTypeCreateFailed(NovaException):
msg_fmt = _("Unable to create instance type")
class InstancePasswordSetFailed(NovaException):
msg_fmt = _("Failed to set admin password on %(instance)s "
"because %(reason)s")
safe = True
class DuplicateVlan(NovaException):
msg_fmt = _("Detected existing vlan with id %(vlan)d")
class CidrConflict(NovaException):
msg_fmt = _("There was a conflict when trying to complete your request.")
code = 409
class InstanceNotFound(NotFound):
ec2_code = 'InvalidInstanceID.NotFound'
msg_fmt = _("Instance %(instance_id)s could not be found.")
class InstanceInfoCacheNotFound(NotFound):
msg_fmt = _("Info cache for instance %(instance_uuid)s could not be "
"found.")
class NodeNotFound(NotFound):
msg_fmt = _("Node %(node_id)s could not be found.")
class NodeNotFoundByUUID(NotFound):
msg_fmt = _("Node with UUID %(node_uuid)s could not be found.")
class MarkerNotFound(NotFound):
msg_fmt = _("Marker %(marker)s could not be found.")
class InvalidInstanceIDMalformed(Invalid):
ec2_code = 'InvalidInstanceID.Malformed'
msg_fmt = _("Invalid id: %(val)s (expecting \"i-...\").")
class CouldNotFetchImage(NovaException):
msg_fmt = _("Could not fetch image %(image_id)s")
class CouldNotUploadImage(NovaException):
msg_fmt = _("Could not upload image %(image_id)s")
class TaskAlreadyRunning(NovaException):
msg_fmt = _("Task %(task_name)s is already running on host %(host)s")
class TaskNotRunning(NovaException):
msg_fmt = _("Task %(task_name)s is not running on host %(host)s")
class InstanceIsLocked(InstanceInvalidState):
msg_fmt = _("Instance %(instance_uuid)s is locked")
class ConfigDriveInvalidValue(Invalid):
msg_fmt = _("Invalid value for Config Drive option: %(option)s")
class ConfigDriveMountFailed(NovaException):
msg_fmt = _("Could not mount vfat config drive. %(operation)s failed. "
"Error: %(error)s")
class ConfigDriveUnknownFormat(NovaException):
msg_fmt = _("Unknown config drive format %(format)s. Select one of "
"iso9660 or vfat.")
class InterfaceAttachFailed(Invalid):
msg_fmt = _("Failed to attach network adapter device to %(instance)s")
class InterfaceDetachFailed(Invalid):
msg_fmt = _("Failed to detach network adapter device from %(instance)s")
class InstanceUserDataTooLarge(NovaException):
msg_fmt = _("User data too large. User data must be no larger than "
"%(maxsize)s bytes once base64 encoded. Your data is "
"%(length)d bytes")
class InstanceUserDataMalformed(NovaException):
msg_fmt = _("User data needs to be valid base 64.")
class UnexpectedTaskStateError(NovaException):
msg_fmt = _("unexpected task state: expecting %(expected)s but "
"the actual state is %(actual)s")
class InstanceActionNotFound(NovaException):
msg_fmt = _("Action for request_id %(request_id)s on instance"
" %(instance_uuid)s not found")
class InstanceActionEventNotFound(NovaException):
msg_fmt = _("Event %(event)s not found for action id %(action_id)s")
class UnexpectedVMStateError(NovaException):
msg_fmt = _("unexpected VM state: expecting %(expected)s but "
"the actual state is %(actual)s")
class CryptoCAFileNotFound(FileNotFound):
msg_fmt = _("The CA file for %(project)s could not be found")
class CryptoCRLFileNotFound(FileNotFound):
msg_fmt = _("The CRL file for %(project)s could not be found")
class InstanceRecreateNotSupported(Invalid):
msg_fmt = _('Instance recreate is not implemented by this virt driver.')
class ServiceGroupUnavailable(NovaException):
msg_fmt = _("The service from servicegroup driver %(driver)s is "
"temporarily unavailable.")
class DBNotAllowed(NovaException):
msg_fmt = _('%(binary)s attempted direct database access which is '
'not allowed by policy')
class UnsupportedVirtType(Invalid):
msg_fmt = _("Virtualization type '%(virt)s' is not supported by "
"this compute driver")
class UnsupportedHardware(Invalid):
msg_fmt = _("Requested hardware '%(model)s' is not supported by "
"the '%(virt)s' virt driver")
class Base64Exception(NovaException):
msg_fmt = _("Invalid Base 64 data for file %(path)s")
class BuildAbortException(NovaException):
msg_fmt = _("Build of instance %(instance_uuid)s aborted: %(reason)s")
class RescheduledException(NovaException):
msg_fmt = _("Build of instance %(instance_uuid)s was re-scheduled: "
"%(reason)s")
class ShadowTableExists(NovaException):
msg_fmt = _("Shadow table with name %(name)s already exists.")
class InstanceFaultRollback(NovaException):
def __init__(self, inner_exception=None):
message = _("Instance rollback performed due to: %s")
self.inner_exception = inner_exception
super(InstanceFaultRollback, self).__init__(message % inner_exception)
class UnsupportedObjectError(NovaException):
msg_fmt = _('Unsupported object type %(objtype)s')
class OrphanedObjectError(NovaException):
msg_fmt = _('Cannot call %(method)s on orphaned %(objtype)s object')
class IncompatibleObjectVersion(NovaException):
msg_fmt = _('Version %(objver)s of %(objname)s is not supported')
class ObjectActionError(NovaException):
msg_fmt = _('Object action %(action)s failed because: %(reason)s')
class CoreAPIMissing(NovaException):
msg_fmt = _("Core API extensions are missing: %(missing_apis)s")
class AgentError(NovaException):
msg_fmt = _('Error during following call to agent: %(method)s')
class AgentTimeout(AgentError):
msg_fmt = _('Unable to contact guest agent. '
'The following call timed out: %(method)s')
class AgentNotImplemented(AgentError):
msg_fmt = _('Agent does not support the call: %(method)s')
class InstanceGroupNotFound(NotFound):
msg_fmt = _("Instance group %(group_uuid)s could not be found.")
class InstanceGroupIdExists(NovaException):
msg_fmt = _("Instance group %(group_uuid)s already exists.")
class InstanceGroupMetadataNotFound(NotFound):
msg_fmt = _("Instance group %(group_uuid)s has no metadata with "
"key %(metadata_key)s.")
class InstanceGroupMemberNotFound(NotFound):
msg_fmt = _("Instance group %(group_uuid)s has no member with "
"id %(instance_id)s.")
class InstanceGroupPolicyNotFound(NotFound):
msg_fmt = _("Instance group %(group_uuid)s has no policy %(policy)s.")
class PluginRetriesExceeded(NovaException):
msg_fmt = _("Number of retries to plugin (%(num_retries)d) exceeded.")
class ImageDownloadModuleError(NovaException):
msg_fmt = _("There was an error with the download module %(module)s. "
"%(reason)s")
class ImageDownloadModuleMetaDataError(ImageDownloadModuleError):
msg_fmt = _("The metadata for this location will not work with this "
"module %(module)s. %(reason)s.")
class ImageDownloadModuleNotImplementedError(ImageDownloadModuleError):
msg_fmt = _("The method %(method_name)s is not implemented.")
class ImageDownloadModuleConfigurationError(ImageDownloadModuleError):
msg_fmt = _("The module %(module)s is misconfigured: %(reason)s.")
class PciDeviceWrongAddressFormat(NovaException):
msg_fmt = _("The PCI address %(address)s has an incorrect format.")
class PciDeviceNotFoundById(NotFound):
msg_fmt = _("PCI device %(id)s not found")
class PciDeviceNotFound(NovaException):
msg_fmt = _("PCI Device %(node_id)s:%(address)s not found.")
class PciDeviceInvalidStatus(NovaException):
msg_fmt = _(
"PCI Device %(compute_node_id)s:%(address)s is %(status)s "
"instead of %(hopestatus)s")
class PciDeviceInvalidOwner(NovaException):
msg_fmt = _(
"PCI Device %(compute_node_id)s:%(address)s is owned by %(owner)s "
"instead of %(hopeowner)s")
class PciDeviceRequestFailed(NovaException):
msg_fmt = _(
"PCI Device request (%requests)s failed")
class PciDevicePoolEmpty(NovaException):
msg_fmt = _(
"Attempt to consume PCI Device %(compute_node_id)s:%(address)s "
"from empty pool")
class PciInvalidAlias(NovaException):
msg_fmt = _("Invalid PCI alias definition: %(reason)s")
class PciRequestAliasNotDefined(NovaException):
msg_fmt = _("PCI alias %(alias)s is not defined")
class MissingParameter(NovaException):
ec2_code = 'MissingParameter'
msg_fmt = _("Not enough parameters: %(reason)s")
code = 400
class PciConfigInvalidWhitelist(Invalid):
msg_fmt = _("Invalid PCI devices Whitelist config %(reason)s")
class PciTrackerInvalidNodeId(NovaException):
msg_fmt = _("Cannot change %(node_id)s to %(new_node_id)s")
# Cannot be templated, msg needs to be constructed when raised.
class InternalError(NovaException):
ec2_code = 'InternalError'
msg_fmt = "%(err)s"
class PciDevicePrepareFailed(NovaException):
msg_fmt = _("Failed to prepare PCI device %(id)s for instance "
"%(instance_uuid)s: %(reason)s")
class PciDeviceDetachFailed(NovaException):
msg_fmt = _("Failed to detach PCI device %(dev)s: %(reason)s")
class PciDeviceUnsupportedHypervisor(NovaException):
msg_fmt = _("%(type)s hypervisor does not support PCI devices")
class KeyManagerError(NovaException):
msg_fmt = _("key manager error: %(reason)s")
| apache-2.0 | 1,885,499,902,190,757,600 | 27.559459 | 79 | 0.672045 | false |
qedi-r/home-assistant | homeassistant/components/knx/light.py | 3 | 12148 | """Support for KNX/IP lights."""
from enum import Enum
import voluptuous as vol
from xknx.devices import Light as XknxLight
from homeassistant.components.light import (
ATTR_BRIGHTNESS,
ATTR_COLOR_TEMP,
ATTR_HS_COLOR,
ATTR_WHITE_VALUE,
PLATFORM_SCHEMA,
SUPPORT_BRIGHTNESS,
SUPPORT_COLOR,
SUPPORT_COLOR_TEMP,
SUPPORT_WHITE_VALUE,
Light,
)
from homeassistant.const import CONF_ADDRESS, CONF_NAME
from homeassistant.core import callback
import homeassistant.helpers.config_validation as cv
import homeassistant.util.color as color_util
from . import ATTR_DISCOVER_DEVICES, DATA_KNX
CONF_STATE_ADDRESS = "state_address"
CONF_BRIGHTNESS_ADDRESS = "brightness_address"
CONF_BRIGHTNESS_STATE_ADDRESS = "brightness_state_address"
CONF_COLOR_ADDRESS = "color_address"
CONF_COLOR_STATE_ADDRESS = "color_state_address"
CONF_COLOR_TEMP_ADDRESS = "color_temperature_address"
CONF_COLOR_TEMP_STATE_ADDRESS = "color_temperature_state_address"
CONF_COLOR_TEMP_MODE = "color_temperature_mode"
CONF_RGBW_ADDRESS = "rgbw_address"
CONF_RGBW_STATE_ADDRESS = "rgbw_state_address"
CONF_MIN_KELVIN = "min_kelvin"
CONF_MAX_KELVIN = "max_kelvin"
DEFAULT_NAME = "KNX Light"
DEFAULT_COLOR = (0.0, 0.0)
DEFAULT_BRIGHTNESS = 255
DEFAULT_COLOR_TEMP_MODE = "absolute"
DEFAULT_WHITE_VALUE = 255
DEFAULT_MIN_KELVIN = 2700 # 370 mireds
DEFAULT_MAX_KELVIN = 6000 # 166 mireds
class ColorTempModes(Enum):
"""Color temperature modes for config validation."""
absolute = "DPT-7.600"
relative = "DPT-5.001"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_ADDRESS): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_STATE_ADDRESS): cv.string,
vol.Optional(CONF_BRIGHTNESS_ADDRESS): cv.string,
vol.Optional(CONF_BRIGHTNESS_STATE_ADDRESS): cv.string,
vol.Optional(CONF_COLOR_ADDRESS): cv.string,
vol.Optional(CONF_COLOR_STATE_ADDRESS): cv.string,
vol.Optional(CONF_COLOR_TEMP_ADDRESS): cv.string,
vol.Optional(CONF_COLOR_TEMP_STATE_ADDRESS): cv.string,
vol.Optional(CONF_COLOR_TEMP_MODE, default=DEFAULT_COLOR_TEMP_MODE): cv.enum(
ColorTempModes
),
vol.Optional(CONF_RGBW_ADDRESS): cv.string,
vol.Optional(CONF_RGBW_STATE_ADDRESS): cv.string,
vol.Optional(CONF_MIN_KELVIN, default=DEFAULT_MIN_KELVIN): vol.All(
vol.Coerce(int), vol.Range(min=1)
),
vol.Optional(CONF_MAX_KELVIN, default=DEFAULT_MAX_KELVIN): vol.All(
vol.Coerce(int), vol.Range(min=1)
),
}
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up lights for KNX platform."""
if discovery_info is not None:
async_add_entities_discovery(hass, discovery_info, async_add_entities)
else:
async_add_entities_config(hass, config, async_add_entities)
@callback
def async_add_entities_discovery(hass, discovery_info, async_add_entities):
"""Set up lights for KNX platform configured via xknx.yaml."""
entities = []
for device_name in discovery_info[ATTR_DISCOVER_DEVICES]:
device = hass.data[DATA_KNX].xknx.devices[device_name]
entities.append(KNXLight(device))
async_add_entities(entities)
@callback
def async_add_entities_config(hass, config, async_add_entities):
"""Set up light for KNX platform configured within platform."""
group_address_tunable_white = None
group_address_tunable_white_state = None
group_address_color_temp = None
group_address_color_temp_state = None
if config[CONF_COLOR_TEMP_MODE] == ColorTempModes.absolute:
group_address_color_temp = config.get(CONF_COLOR_TEMP_ADDRESS)
group_address_color_temp_state = config.get(CONF_COLOR_TEMP_STATE_ADDRESS)
elif config[CONF_COLOR_TEMP_MODE] == ColorTempModes.relative:
group_address_tunable_white = config.get(CONF_COLOR_TEMP_ADDRESS)
group_address_tunable_white_state = config.get(CONF_COLOR_TEMP_STATE_ADDRESS)
light = XknxLight(
hass.data[DATA_KNX].xknx,
name=config[CONF_NAME],
group_address_switch=config[CONF_ADDRESS],
group_address_switch_state=config.get(CONF_STATE_ADDRESS),
group_address_brightness=config.get(CONF_BRIGHTNESS_ADDRESS),
group_address_brightness_state=config.get(CONF_BRIGHTNESS_STATE_ADDRESS),
group_address_color=config.get(CONF_COLOR_ADDRESS),
group_address_color_state=config.get(CONF_COLOR_STATE_ADDRESS),
group_address_rgbw=config.get(CONF_RGBW_ADDRESS),
group_address_rgbw_state=config.get(CONF_RGBW_STATE_ADDRESS),
group_address_tunable_white=group_address_tunable_white,
group_address_tunable_white_state=group_address_tunable_white_state,
group_address_color_temperature=group_address_color_temp,
group_address_color_temperature_state=group_address_color_temp_state,
min_kelvin=config[CONF_MIN_KELVIN],
max_kelvin=config[CONF_MAX_KELVIN],
)
hass.data[DATA_KNX].xknx.devices.add(light)
async_add_entities([KNXLight(light)])
class KNXLight(Light):
"""Representation of a KNX light."""
def __init__(self, device):
"""Initialize of KNX light."""
self.device = device
self._min_kelvin = device.min_kelvin
self._max_kelvin = device.max_kelvin
self._min_mireds = color_util.color_temperature_kelvin_to_mired(
self._max_kelvin
)
self._max_mireds = color_util.color_temperature_kelvin_to_mired(
self._min_kelvin
)
@callback
def async_register_callbacks(self):
"""Register callbacks to update hass after device was changed."""
async def after_update_callback(device):
"""Call after device was updated."""
await self.async_update_ha_state()
self.device.register_device_updated_cb(after_update_callback)
async def async_added_to_hass(self):
"""Store register state change callback."""
self.async_register_callbacks()
@property
def name(self):
"""Return the name of the KNX device."""
return self.device.name
@property
def available(self):
"""Return True if entity is available."""
return self.hass.data[DATA_KNX].connected
@property
def should_poll(self):
"""No polling needed within KNX."""
return False
@property
def brightness(self):
"""Return the brightness of this light between 0..255."""
if not self.device.supports_brightness:
return None
return self.device.current_brightness
@property
def hs_color(self):
"""Return the HS color value."""
rgb = None
if self.device.supports_rgbw or self.device.supports_color:
rgb, _ = self.device.current_color
return color_util.color_RGB_to_hs(*rgb) if rgb else None
@property
def white_value(self):
"""Return the white value."""
white = None
if self.device.supports_rgbw:
_, white = self.device.current_color
return white
@property
def color_temp(self):
"""Return the color temperature in mireds."""
if self.device.supports_color_temperature:
kelvin = self.device.current_color_temperature
if kelvin is not None:
return color_util.color_temperature_kelvin_to_mired(kelvin)
if self.device.supports_tunable_white:
relative_ct = self.device.current_tunable_white
if relative_ct is not None:
# as KNX devices typically use Kelvin we use it as base for
# calculating ct from percent
return color_util.color_temperature_kelvin_to_mired(
self._min_kelvin
+ ((relative_ct / 255) * (self._max_kelvin - self._min_kelvin))
)
return None
@property
def min_mireds(self):
"""Return the coldest color temp this light supports in mireds."""
return self._min_mireds
@property
def max_mireds(self):
"""Return the warmest color temp this light supports in mireds."""
return self._max_mireds
@property
def effect_list(self):
"""Return the list of supported effects."""
return None
@property
def effect(self):
"""Return the current effect."""
return None
@property
def is_on(self):
"""Return true if light is on."""
return self.device.state
@property
def supported_features(self):
"""Flag supported features."""
flags = 0
if self.device.supports_brightness:
flags |= SUPPORT_BRIGHTNESS
if self.device.supports_color:
flags |= SUPPORT_COLOR | SUPPORT_BRIGHTNESS
if self.device.supports_rgbw:
flags |= SUPPORT_COLOR | SUPPORT_WHITE_VALUE
if self.device.supports_color_temperature or self.device.supports_tunable_white:
flags |= SUPPORT_COLOR_TEMP
return flags
async def async_turn_on(self, **kwargs):
"""Turn the light on."""
brightness = kwargs.get(ATTR_BRIGHTNESS, self.brightness)
hs_color = kwargs.get(ATTR_HS_COLOR, self.hs_color)
white_value = kwargs.get(ATTR_WHITE_VALUE, self.white_value)
mireds = kwargs.get(ATTR_COLOR_TEMP, self.color_temp)
update_brightness = ATTR_BRIGHTNESS in kwargs
update_color = ATTR_HS_COLOR in kwargs
update_white_value = ATTR_WHITE_VALUE in kwargs
update_color_temp = ATTR_COLOR_TEMP in kwargs
# always only go one path for turning on (avoid conflicting changes
# and weird effects)
if self.device.supports_brightness and (update_brightness and not update_color):
# if we don't need to update the color, try updating brightness
# directly if supported; don't do it if color also has to be
# changed, as RGB color implicitly sets the brightness as well
await self.device.set_brightness(brightness)
elif (self.device.supports_rgbw or self.device.supports_color) and (
update_brightness or update_color or update_white_value
):
# change RGB color, white value )if supported), and brightness
# if brightness or hs_color was not yet set use the default value
# to calculate RGB from as a fallback
if brightness is None:
brightness = DEFAULT_BRIGHTNESS
if hs_color is None:
hs_color = DEFAULT_COLOR
if white_value is None and self.device.supports_rgbw:
white_value = DEFAULT_WHITE_VALUE
rgb = color_util.color_hsv_to_RGB(*hs_color, brightness * 100 / 255)
await self.device.set_color(rgb, white_value)
elif self.device.supports_color_temperature and update_color_temp:
# change color temperature without ON telegram
kelvin = int(color_util.color_temperature_mired_to_kelvin(mireds))
if kelvin > self._max_kelvin:
kelvin = self._max_kelvin
elif kelvin < self._min_kelvin:
kelvin = self._min_kelvin
await self.device.set_color_temperature(kelvin)
elif self.device.supports_tunable_white and update_color_temp:
# calculate relative_ct from Kelvin to fit typical KNX devices
kelvin = min(
self._max_kelvin,
int(color_util.color_temperature_mired_to_kelvin(mireds)),
)
relative_ct = int(
255
* (kelvin - self._min_kelvin)
/ (self._max_kelvin - self._min_kelvin)
)
await self.device.set_tunable_white(relative_ct)
else:
# no color/brightness change requested, so just turn it on
await self.device.set_on()
async def async_turn_off(self, **kwargs):
"""Turn the light off."""
await self.device.set_off()
| apache-2.0 | 2,638,273,227,097,628,000 | 37.081505 | 88 | 0.645291 | false |
swgillespie/coreclr | src/scripts/genXplatEventing.py | 8 | 31274 | #
## Licensed to the .NET Foundation under one or more agreements.
## The .NET Foundation licenses this file to you under the MIT license.
## See the LICENSE file in the project root for more information.
#
#
#USAGE:
#Add Events: modify <root>src/vm/ClrEtwAll.man
#Look at the Code in <root>/src/inc/genXplatLttng.py for using subroutines in this file
#
# Python 2 compatibility
from __future__ import print_function
import os
import xml.dom.minidom as DOM
stdprolog="""
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
/******************************************************************
DO NOT MODIFY. AUTOGENERATED FILE.
This file is generated using the logic from <root>/src/scripts/genXplatEventing.py
******************************************************************/
"""
stdprolog_cmake="""
#
#
#******************************************************************
#DO NOT MODIFY. AUTOGENERATED FILE.
#This file is generated using the logic from <root>/src/scripts/genXplatEventing.py
#******************************************************************
"""
lindent = " ";
palDataTypeMapping ={
#constructed types
"win:null" :" ",
"win:Int64" :"const __int64",
"win:ULong" :"const ULONG",
"win:count" :"*",
"win:Struct" :"const void",
#actual spec
"win:GUID" :"const GUID",
"win:AnsiString" :"LPCSTR",
"win:UnicodeString" :"PCWSTR",
"win:Double" :"const double",
"win:Int32" :"const signed int",
"win:Boolean" :"const BOOL",
"win:UInt64" :"const unsigned __int64",
"win:UInt32" :"const unsigned int",
"win:UInt16" :"const unsigned short",
"win:UInt8" :"const unsigned char",
"win:Pointer" :"const void*",
"win:Binary" :"const BYTE"
}
# A Template represents an ETW template can contain 1 or more AbstractTemplates
# The AbstractTemplate contains FunctionSignature
# FunctionSignature consist of FunctionParameter representing each parameter in it's signature
class AbstractTemplate:
def __init__(self,abstractTemplateName,abstractFnFrame):
self.abstractTemplateName = abstractTemplateName
self.AbstractFnFrame = abstractFnFrame
class Template:
def __init__(self,templateName):
self.template = templateName
self.allAbstractTemplateTypes = [] # list of AbstractTemplateNames
self.allAbstractTemplateLUT = {} #dictionary of AbstractTemplate
def append(self,abstractTemplateName,abstractFnFrame):
self.allAbstractTemplateTypes.append(abstractTemplateName)
self.allAbstractTemplateLUT[abstractTemplateName] = AbstractTemplate(abstractTemplateName,abstractFnFrame)
def getFnFrame(self,abstractTemplateName):
return self.allAbstractTemplateLUT[abstractTemplateName].AbstractFnFrame
def getAbstractVarProps(self,abstractTemplateName):
return self.allAbstractTemplateLUT[abstractTemplateName].AbstractVarProps
def getFnParam(self,name):
for subtemplate in self.allAbstractTemplateTypes:
frame = self.getFnFrame(subtemplate)
if frame.getParam(name):
return frame.getParam(name)
return None
class FunctionSignature:
def __init__(self):
self.LUT = {} # dictionary of FunctionParameter
self.paramlist = [] # list of parameters to maintain their order in signature
def append(self,variable,fnparam):
self.LUT[variable] = fnparam
self.paramlist.append(variable)
def getParam(self,variable):
return self.LUT.get(variable)
def getLength(self):
return len(self.paramlist)
class FunctionParameter:
def __init__(self,winType,name,count,prop):
self.winType = winType #ETW type as given in the manifest
self.name = name #parameter name as given in the manifest
self.prop = prop #any special property as determined by the manifest and developer
#self.count #indicates if the parameter is a pointer
if count == "win:null":
self.count = "win:null"
elif count or winType == "win:GUID" or count == "win:count":
#special case for GUIDS, consider them as structs
self.count = "win:count"
else:
self.count = "win:null"
def getTopLevelElementsByTagName(Node,tag):
dataNodes = []
for element in Node.getElementsByTagName(tag):
if element.parentNode == Node:
dataNodes.append(element)
return dataNodes
def bucketizeAbstractTemplates(template,fnPrototypes,var_Dependecies):
# At this point we have the complete argument list, now break them into chunks of 10
# As Abstract Template supports a maximum of 10 arguments
abstractTemplateName = template;
subevent_cnt = 1;
templateProp = Template(template)
abstractFnFrame = FunctionSignature()
for variable in fnPrototypes.paramlist:
for dependency in var_Dependecies[variable]:
if not abstractFnFrame.getParam(dependency):
abstractFnFrame.append(dependency,fnPrototypes.getParam(dependency))
frameCount = abstractFnFrame.getLength()
if frameCount == 10:
templateProp.append(abstractTemplateName,abstractFnFrame)
abstractTemplateName = template + "_" + str(subevent_cnt)
subevent_cnt += 1
if len(var_Dependecies[variable]) > 1:
#check if the frame's dependencies are all present
depExists = True
for depends in var_Dependecies[variable]:
if not abstractFnFrame.getParam(depends):
depExists = False
break
if not depExists:
raise ValueError('Abstract Template: '+ abstractTemplateName+ ' does not have all its dependecies in the frame, write required Logic here and test it out, the parameter whose dependency is missing is :'+ variable)
#psuedo code:
# 1. add a missing dependecies to the frame of the current parameter
# 2. Check if the frame has enough space, if there is continue adding missing dependencies
# 3. Else Save the current Frame and start a new frame and follow step 1 and 2
# 4. Add the current parameter and proceed
#create a new fn frame
abstractFnFrame = FunctionSignature()
#subevent_cnt == 1 represents argumentless templates
if abstractFnFrame.getLength() > 0 or subevent_cnt == 1:
templateProp.append(abstractTemplateName,abstractFnFrame)
return templateProp
ignoredXmlTemplateAttribes = frozenset(["map","outType"])
usedXmlTemplateAttribes = frozenset(["name","inType","count", "length"])
def parseTemplateNodes(templateNodes):
#return values
allTemplates = {}
for templateNode in templateNodes:
template = templateNode.getAttribute('tid')
var_Dependecies = {}
fnPrototypes = FunctionSignature()
dataNodes = getTopLevelElementsByTagName(templateNode,'data')
# Validate that no new attributes has been added to manifest
for dataNode in dataNodes:
nodeMap = dataNode.attributes
for attrib in nodeMap.values():
attrib_name = attrib.name
if attrib_name not in ignoredXmlTemplateAttribes and attrib_name not in usedXmlTemplateAttribes:
raise ValueError('unknown attribute: '+ attrib_name + ' in template:'+ template)
for dataNode in dataNodes:
variable = dataNode.getAttribute('name')
wintype = dataNode.getAttribute('inType')
#count and length are the same
wincount = dataNode.getAttribute('count')
winlength = dataNode.getAttribute('length');
var_Props = None
var_dependency = [variable]
if winlength:
if wincount:
raise Exception("both count and length property found on: " + variable + "in template: " + template)
wincount = winlength
if (wincount.isdigit() and int(wincount) ==1):
wincount = ''
if wincount:
if (wincount.isdigit()):
var_Props = wincount
elif fnPrototypes.getParam(wincount):
var_Props = wincount
var_dependency.insert(0,wincount)
#construct the function signature
if wintype == "win:GUID":
var_Props = "sizeof(GUID)/sizeof(int)"
var_Dependecies[variable] = var_dependency
fnparam = FunctionParameter(wintype,variable,wincount,var_Props)
fnPrototypes.append(variable,fnparam)
structNodes = getTopLevelElementsByTagName(templateNode,'struct')
count = 0;
for structToBeMarshalled in structNodes:
struct_len = "Arg"+ str(count) + "_Struct_Len_"
struct_pointer = "Arg"+ str(count) + "_Struct_Pointer_"
count += 1
#populate the Property- used in codegen
structname = structToBeMarshalled.getAttribute('name')
countVarName = structToBeMarshalled.getAttribute('count')
if not countVarName:
raise ValueError('Struct '+ structname+ ' in template:'+ template + 'does not have an attribute count')
var_Props = countVarName + "*" + struct_len + "/sizeof(int)"
var_Dependecies[struct_len] = [struct_len]
var_Dependecies[struct_pointer] = [countVarName,struct_len,struct_pointer]
fnparam_len = FunctionParameter("win:ULong",struct_len,"win:null",None)
fnparam_pointer = FunctionParameter("win:Struct",struct_pointer,"win:count",var_Props)
fnPrototypes.append(struct_len,fnparam_len)
fnPrototypes.append(struct_pointer,fnparam_pointer)
allTemplates[template] = bucketizeAbstractTemplates(template,fnPrototypes,var_Dependecies)
return allTemplates
def generateClrallEvents(eventNodes,allTemplates):
clrallEvents = []
for eventNode in eventNodes:
eventName = eventNode.getAttribute('symbol')
templateName = eventNode.getAttribute('template')
#generate EventEnabled
clrallEvents.append("inline BOOL EventEnabled")
clrallEvents.append(eventName)
clrallEvents.append("() {return XplatEventLogger::IsEventLoggingEnabled() && EventXplatEnabled")
clrallEvents.append(eventName+"();}\n\n")
#generate FireEtw functions
fnptype = []
fnbody = []
fnptype.append("inline ULONG FireEtw")
fnptype.append(eventName)
fnptype.append("(\n")
fnbody.append(lindent)
fnbody.append("if (!EventEnabled")
fnbody.append(eventName)
fnbody.append("()) {return ERROR_SUCCESS;}\n")
line = []
fnptypeline = []
if templateName:
for subTemplate in allTemplates[templateName].allAbstractTemplateTypes:
fnSig = allTemplates[templateName].getFnFrame(subTemplate)
for params in fnSig.paramlist:
fnparam = fnSig.getParam(params)
wintypeName = fnparam.winType
typewName = palDataTypeMapping[wintypeName]
winCount = fnparam.count
countw = palDataTypeMapping[winCount]
fnptypeline.append(lindent)
fnptypeline.append(typewName)
fnptypeline.append(countw)
fnptypeline.append(" ")
fnptypeline.append(fnparam.name)
fnptypeline.append(",\n")
#fnsignature
for params in fnSig.paramlist:
fnparam = fnSig.getParam(params)
line.append(fnparam.name)
line.append(",")
#remove trailing commas
if len(line) > 0:
del line[-1]
if len(fnptypeline) > 0:
del fnptypeline[-1]
fnptype.extend(fnptypeline)
fnptype.append("\n)\n{\n")
fnbody.append(lindent)
fnbody.append("return FireEtXplat")
fnbody.append(eventName)
fnbody.append("(")
fnbody.extend(line)
fnbody.append(");\n")
fnbody.append("}\n\n")
clrallEvents.extend(fnptype)
clrallEvents.extend(fnbody)
return ''.join(clrallEvents)
def generateClrXplatEvents(eventNodes, allTemplates):
clrallEvents = []
for eventNode in eventNodes:
eventName = eventNode.getAttribute('symbol')
templateName = eventNode.getAttribute('template')
#generate EventEnabled
clrallEvents.append("extern \"C\" BOOL EventXplatEnabled")
clrallEvents.append(eventName)
clrallEvents.append("();\n")
#generate FireEtw functions
fnptype = []
fnptypeline = []
fnptype.append("extern \"C\" ULONG FireEtXplat")
fnptype.append(eventName)
fnptype.append("(\n")
if templateName:
for subTemplate in allTemplates[templateName].allAbstractTemplateTypes:
fnSig = allTemplates[templateName].getFnFrame(subTemplate)
for params in fnSig.paramlist:
fnparam = fnSig.getParam(params)
wintypeName = fnparam.winType
typewName = palDataTypeMapping[wintypeName]
winCount = fnparam.count
countw = palDataTypeMapping[winCount]
fnptypeline.append(lindent)
fnptypeline.append(typewName)
fnptypeline.append(countw)
fnptypeline.append(" ")
fnptypeline.append(fnparam.name)
fnptypeline.append(",\n")
#remove trailing commas
if len(fnptypeline) > 0:
del fnptypeline[-1]
fnptype.extend(fnptypeline)
fnptype.append("\n);\n")
clrallEvents.extend(fnptype)
return ''.join(clrallEvents)
#generates the dummy header file which is used by the VM as entry point to the logging Functions
def generateclrEtwDummy(eventNodes,allTemplates):
clretmEvents = []
for eventNode in eventNodes:
eventName = eventNode.getAttribute('symbol')
templateName = eventNode.getAttribute('template')
fnptype = []
#generate FireEtw functions
fnptype.append("#define FireEtw")
fnptype.append(eventName)
fnptype.append("(");
line = []
if templateName:
for subTemplate in allTemplates[templateName].allAbstractTemplateTypes:
fnSig = allTemplates[templateName].getFnFrame(subTemplate)
for params in fnSig.paramlist:
fnparam = fnSig.getParam(params)
line.append(fnparam.name)
line.append(", ")
#remove trailing commas
if len(line) > 0:
del line[-1]
fnptype.extend(line)
fnptype.append(") 0\n")
clretmEvents.extend(fnptype)
return ''.join(clretmEvents)
def generateClralltestEvents(sClrEtwAllMan):
tree = DOM.parse(sClrEtwAllMan)
clrtestEvents = []
for providerNode in tree.getElementsByTagName('provider'):
templateNodes = providerNode.getElementsByTagName('template')
allTemplates = parseTemplateNodes(templateNodes)
eventNodes = providerNode.getElementsByTagName('event')
for eventNode in eventNodes:
eventName = eventNode.getAttribute('symbol')
templateName = eventNode.getAttribute('template')
clrtestEvents.append(" EventXplatEnabled" + eventName + "();\n")
clrtestEvents.append("Error |= FireEtXplat" + eventName + "(\n")
line =[]
if templateName :
for subTemplate in allTemplates[templateName].allAbstractTemplateTypes:
fnSig = allTemplates[templateName].getFnFrame(subTemplate)
for params in fnSig.paramlist:
argline =''
fnparam = fnSig.getParam(params)
if fnparam.name.lower() == 'count':
argline = '2'
else:
if fnparam.winType == "win:Binary":
argline = 'win_Binary'
elif fnparam.winType == "win:Pointer" and fnparam.count == "win:count":
argline = "(const void**)&var11"
elif fnparam.winType == "win:Pointer" :
argline = "(const void*)var11"
elif fnparam.winType =="win:AnsiString":
argline = '" Testing AniString "'
elif fnparam.winType =="win:UnicodeString":
argline = 'W(" Testing UnicodeString ")'
else:
if fnparam.count == "win:count":
line.append("&")
argline = fnparam.winType.replace(":","_")
line.append(argline)
line.append(",\n")
#remove trailing commas
if len(line) > 0:
del line[-1]
line.append("\n")
line.append(");\n")
clrtestEvents.extend(line)
return ''.join(clrtestEvents)
def generateSanityTest(sClrEtwAllMan,testDir):
if not testDir:
return
print('Generating Event Logging Tests')
if not os.path.exists(testDir):
os.makedirs(testDir)
cmake_file = testDir + "/CMakeLists.txt"
test_cpp = "clralltestevents.cpp"
testinfo = testDir + "/testinfo.dat"
Cmake_file = open(cmake_file,'w')
Test_cpp = open(testDir + "/" + test_cpp,'w')
Testinfo = open(testinfo,'w')
#CMake File:
Cmake_file.write(stdprolog_cmake)
Cmake_file.write("""
cmake_minimum_required(VERSION 2.8.12.2)
set(CMAKE_INCLUDE_CURRENT_DIR ON)
set(SOURCES
""")
Cmake_file.write(test_cpp)
Cmake_file.write("""
)
include_directories(${GENERATED_INCLUDE_DIR})
include_directories(${COREPAL_SOURCE_DIR}/inc/rt)
add_executable(eventprovidertest
${SOURCES}
)
set(EVENT_PROVIDER_DEPENDENCIES "")
set(EVENT_PROVIDER_LINKER_OTPTIONS "")
if(FEATURE_EVENT_TRACE)
add_definitions(-DFEATURE_EVENT_TRACE=1)
list(APPEND EVENT_PROVIDER_DEPENDENCIES
coreclrtraceptprovider
eventprovider
)
list(APPEND EVENT_PROVIDER_LINKER_OTPTIONS
${EVENT_PROVIDER_DEPENDENCIES}
)
endif(FEATURE_EVENT_TRACE)
add_dependencies(eventprovidertest ${EVENT_PROVIDER_DEPENDENCIES} coreclrpal)
target_link_libraries(eventprovidertest
coreclrpal
${EVENT_PROVIDER_LINKER_OTPTIONS}
)
""")
Testinfo.write("""
Copyright (c) Microsoft Corporation. All rights reserved.
#
Version = 1.0
Section = EventProvider
Function = EventProvider
Name = PAL test for FireEtW* and EventEnabled* functions
TYPE = DEFAULT
EXE1 = eventprovidertest
Description
=This is a sanity test to check that there are no crashes in Xplat eventing
""")
#Test.cpp
Test_cpp.write(stdprolog)
Test_cpp.write("""
/*=====================================================================
**
** Source: clralltestevents.cpp
**
** Purpose: Ensure Correctness of Eventing code
**
**
**===================================================================*/
#include <palsuite.h>
#include <clrxplatevents.h>
typedef struct _Struct1 {
ULONG Data1;
unsigned short Data2;
unsigned short Data3;
unsigned char Data4[8];
} Struct1;
Struct1 var21[2] = { { 245, 13, 14, "deadbea" }, { 542, 0, 14, "deadflu" } };
Struct1* var11 = var21;
Struct1* win_Struct = var21;
GUID win_GUID ={ 245, 13, 14, "deadbea" };
double win_Double =34.04;
ULONG win_ULong = 34;
BOOL win_Boolean = FALSE;
unsigned __int64 win_UInt64 = 114;
unsigned int win_UInt32 = 4;
unsigned short win_UInt16 = 12;
unsigned char win_UInt8 = 9;
int win_Int32 = 12;
BYTE* win_Binary =(BYTE*)var21 ;
int __cdecl main(int argc, char **argv)
{
/* Initialize the PAL.
*/
if(0 != PAL_Initialize(argc, argv))
{
return FAIL;
}
ULONG Error = ERROR_SUCCESS;
#if defined(FEATURE_EVENT_TRACE)
Trace("\\n Starting functional eventing APIs tests \\n");
""")
Test_cpp.write(generateClralltestEvents(sClrEtwAllMan))
Test_cpp.write("""
/* Shutdown the PAL.
*/
if (Error != ERROR_SUCCESS)
{
Fail("One or more eventing Apis failed\\n ");
return FAIL;
}
Trace("\\n All eventing APIs were fired succesfully \\n");
#endif //defined(FEATURE_EVENT_TRACE)
PAL_Terminate();
return PASS;
}
""")
Cmake_file.close()
Test_cpp.close()
Testinfo.close()
def generateEtmDummyHeader(sClrEtwAllMan,clretwdummy):
if not clretwdummy:
return
print(' Generating Dummy Event Headers')
tree = DOM.parse(sClrEtwAllMan)
incDir = os.path.dirname(os.path.realpath(clretwdummy))
if not os.path.exists(incDir):
os.makedirs(incDir)
Clretwdummy = open(clretwdummy,'w')
Clretwdummy.write(stdprolog + "\n")
for providerNode in tree.getElementsByTagName('provider'):
templateNodes = providerNode.getElementsByTagName('template')
allTemplates = parseTemplateNodes(templateNodes)
eventNodes = providerNode.getElementsByTagName('event')
#pal: create etmdummy.h
Clretwdummy.write(generateclrEtwDummy(eventNodes, allTemplates) + "\n")
Clretwdummy.close()
def generatePlformIndependentFiles(sClrEtwAllMan,incDir,etmDummyFile):
generateEtmDummyHeader(sClrEtwAllMan,etmDummyFile)
tree = DOM.parse(sClrEtwAllMan)
if not incDir:
return
print(' Generating Event Headers')
if not os.path.exists(incDir):
os.makedirs(incDir)
clrallevents = incDir + "/clretwallmain.h"
clrxplatevents = incDir + "/clrxplatevents.h"
Clrallevents = open(clrallevents,'w')
Clrxplatevents = open(clrxplatevents,'w')
Clrallevents.write(stdprolog + "\n")
Clrxplatevents.write(stdprolog + "\n")
Clrallevents.write("\n#include \"clrxplatevents.h\"\n\n")
for providerNode in tree.getElementsByTagName('provider'):
templateNodes = providerNode.getElementsByTagName('template')
allTemplates = parseTemplateNodes(templateNodes)
eventNodes = providerNode.getElementsByTagName('event')
#vm header:
Clrallevents.write(generateClrallEvents(eventNodes, allTemplates) + "\n")
#pal: create clrallevents.h
Clrxplatevents.write(generateClrXplatEvents(eventNodes, allTemplates) + "\n")
Clrxplatevents.close()
Clrallevents.close()
class EventExclusions:
def __init__(self):
self.nostack = set()
self.explicitstack = set()
self.noclrinstance = set()
def parseExclusionList(exclusionListFile):
ExclusionFile = open(exclusionListFile,'r')
exclusionInfo = EventExclusions()
for line in ExclusionFile:
line = line.strip()
#remove comments
if not line or line.startswith('#'):
continue
tokens = line.split(':')
#entries starting with nomac are ignored
if "nomac" in tokens:
continue
if len(tokens) > 5:
raise Exception("Invalid Entry " + line + "in "+ exclusionListFile)
eventProvider = tokens[2]
eventTask = tokens[1]
eventSymbol = tokens[4]
if eventProvider == '':
eventProvider = "*"
if eventTask == '':
eventTask = "*"
if eventSymbol == '':
eventSymbol = "*"
entry = eventProvider + ":" + eventTask + ":" + eventSymbol
if tokens[0].lower() == "nostack":
exclusionInfo.nostack.add(entry)
if tokens[0].lower() == "stack":
exclusionInfo.explicitstack.add(entry)
if tokens[0].lower() == "noclrinstanceid":
exclusionInfo.noclrinstance.add(entry)
ExclusionFile.close()
return exclusionInfo
def getStackWalkBit(eventProvider, taskName, eventSymbol, stackSet):
for entry in stackSet:
tokens = entry.split(':')
if len(tokens) != 3:
raise Exception("Error, possible error in the script which introduced the enrty "+ entry)
eventCond = tokens[0] == eventProvider or tokens[0] == "*"
taskCond = tokens[1] == taskName or tokens[1] == "*"
symbolCond = tokens[2] == eventSymbol or tokens[2] == "*"
if eventCond and taskCond and symbolCond:
return False
return True
#Add the miscelaneous checks here
def checkConsistency(sClrEtwAllMan,exclusionListFile):
tree = DOM.parse(sClrEtwAllMan)
exclusionInfo = parseExclusionList(exclusionListFile)
for providerNode in tree.getElementsByTagName('provider'):
stackSupportSpecified = {}
eventNodes = providerNode.getElementsByTagName('event')
templateNodes = providerNode.getElementsByTagName('template')
eventProvider = providerNode.getAttribute('name')
allTemplates = parseTemplateNodes(templateNodes)
for eventNode in eventNodes:
taskName = eventNode.getAttribute('task')
eventSymbol = eventNode.getAttribute('symbol')
eventTemplate = eventNode.getAttribute('template')
eventValue = int(eventNode.getAttribute('value'))
clrInstanceBit = getStackWalkBit(eventProvider, taskName, eventSymbol, exclusionInfo.noclrinstance)
sLookupFieldName = "ClrInstanceID"
sLookupFieldType = "win:UInt16"
if clrInstanceBit and allTemplates.get(eventTemplate):
# check for the event template and look for a field named ClrInstanceId of type win:UInt16
fnParam = allTemplates[eventTemplate].getFnParam(sLookupFieldName)
if not(fnParam and fnParam.winType == sLookupFieldType):
raise Exception(exclusionListFile + ":No " + sLookupFieldName + " field of type " + sLookupFieldType + " for event symbol " + eventSymbol)
# If some versions of an event are on the nostack/stack lists,
# and some versions are not on either the nostack or stack list,
# then developer likely forgot to specify one of the versions
eventStackBitFromNoStackList = getStackWalkBit(eventProvider, taskName, eventSymbol, exclusionInfo.nostack)
eventStackBitFromExplicitStackList = getStackWalkBit(eventProvider, taskName, eventSymbol, exclusionInfo.explicitstack)
sStackSpecificityError = exclusionListFile + ": Error processing event :" + eventSymbol + "(ID" + str(eventValue) + "): This file must contain either ALL versions of this event or NO versions of this event. Currently some, but not all, versions of this event are present\n"
if not stackSupportSpecified.get(eventValue):
# Haven't checked this event before. Remember whether a preference is stated
if ( not eventStackBitFromNoStackList) or ( not eventStackBitFromExplicitStackList):
stackSupportSpecified[eventValue] = True
else:
stackSupportSpecified[eventValue] = False
else:
# We've checked this event before.
if stackSupportSpecified[eventValue]:
# When we last checked, a preference was previously specified, so it better be specified here
if eventStackBitFromNoStackList and eventStackBitFromExplicitStackList:
raise Exception(sStackSpecificityError)
else:
# When we last checked, a preference was not previously specified, so it better not be specified here
if ( not eventStackBitFromNoStackList) or ( not eventStackBitFromExplicitStackList):
raise Exception(sStackSpecificityError)
import argparse
import sys
def main(argv):
#parse the command line
parser = argparse.ArgumentParser(description="Generates the Code required to instrument LTTtng logging mechanism")
required = parser.add_argument_group('required arguments')
required.add_argument('--man', type=str, required=True,
help='full path to manifest containig the description of events')
required.add_argument('--exc', type=str, required=True,
help='full path to exclusion list')
required.add_argument('--inc', type=str, default=None,
help='full path to directory where the header files will be generated')
required.add_argument('--dummy', type=str,default=None,
help='full path to file that will have dummy definitions of FireEtw functions')
required.add_argument('--testdir', type=str, default=None,
help='full path to directory where the test assets will be deployed' )
args, unknown = parser.parse_known_args(argv)
if unknown:
print('Unknown argument(s): ', ', '.join(unknown))
return const.UnknownArguments
sClrEtwAllMan = args.man
exclusionListFile = args.exc
incdir = args.inc
etmDummyFile = args.dummy
testDir = args.testdir
checkConsistency(sClrEtwAllMan, exclusionListFile)
generatePlformIndependentFiles(sClrEtwAllMan,incdir,etmDummyFile)
generateSanityTest(sClrEtwAllMan,testDir)
if __name__ == '__main__':
return_code = main(sys.argv[1:])
sys.exit(return_code)
| mit | -3,069,274,019,837,486,000 | 37.514778 | 285 | 0.59615 | false |
firebitsbr/pwn_plug_sources | src/metagoofil/pdfminer/pdffont.py | 32 | 26471 | #!/usr/bin/env python2
import sys
import struct
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
from cmapdb import CMapDB, CMapParser, FileUnicodeMap, CMap
from encodingdb import EncodingDB, name2unicode
from psparser import PSStackParser
from psparser import PSSyntaxError, PSEOF
from psparser import LIT, KWD, STRICT
from psparser import PSLiteral, literal_name
from pdftypes import PDFException, resolve1
from pdftypes import int_value, float_value, num_value
from pdftypes import str_value, list_value, dict_value, stream_value
from fontmetrics import FONT_METRICS
from utils import apply_matrix_norm, nunpack, choplist
def get_widths(seq):
widths = {}
r = []
for v in seq:
if isinstance(v, list):
if r:
char1 = r[-1]
for (i,w) in enumerate(v):
widths[char1+i] = w
r = []
elif isinstance(v, int):
r.append(v)
if len(r) == 3:
(char1,char2,w) = r
for i in xrange(char1, char2+1):
widths[i] = w
r = []
return widths
#assert get_widths([1]) == {}
#assert get_widths([1,2,3]) == {1:3, 2:3}
#assert get_widths([1,[2,3],6,[7,8]]) == {1:2,2:3, 6:7,7:8}
def get_widths2(seq):
widths = {}
r = []
for v in seq:
if isinstance(v, list):
if r:
char1 = r[-1]
for (i,(w,vx,vy)) in enumerate(choplist(3,v)):
widths[char1+i] = (w,(vx,vy))
r = []
elif isinstance(v, int):
r.append(v)
if len(r) == 5:
(char1,char2,w,vx,vy) = r
for i in xrange(char1, char2+1):
widths[i] = (w,(vx,vy))
r = []
return widths
#assert get_widths2([1]) == {}
#assert get_widths2([1,2,3,4,5]) == {1:(3,(4,5)), 2:(3,(4,5))}
#assert get_widths2([1,[2,3,4,5],6,[7,8,9]]) == {1:(2,(3,4)), 6:(7,(8,9))}
## FontMetricsDB
##
class FontMetricsDB(object):
@classmethod
def get_metrics(klass, fontname):
return FONT_METRICS[fontname]
## Type1FontHeaderParser
##
class Type1FontHeaderParser(PSStackParser):
KEYWORD_BEGIN = KWD('begin')
KEYWORD_END = KWD('end')
KEYWORD_DEF = KWD('def')
KEYWORD_PUT = KWD('put')
KEYWORD_DICT = KWD('dict')
KEYWORD_ARRAY = KWD('array')
KEYWORD_READONLY = KWD('readonly')
KEYWORD_FOR = KWD('for')
KEYWORD_FOR = KWD('for')
def __init__(self, data):
PSStackParser.__init__(self, data)
self._cid2unicode = {}
return
def get_encoding(self):
while 1:
try:
(cid,name) = self.nextobject()
except PSEOF:
break
try:
self._cid2unicode[cid] = name2unicode(name)
except KeyError:
pass
return self._cid2unicode
def do_keyword(self, pos, token):
if token is self.KEYWORD_PUT:
((_,key),(_,value)) = self.pop(2)
if (isinstance(key, int) and
isinstance(value, PSLiteral)):
self.add_results((key, literal_name(value)))
return
## CFFFont
## (Format specified in Adobe Technical Note: #5176
## "The Compact Font Format Specification")
##
NIBBLES = ('0','1','2','3','4','5','6','7','8','9','.','e','e-',None,'-')
def getdict(data):
d = {}
fp = StringIO(data)
stack = []
while 1:
c = fp.read(1)
if not c: break
b0 = ord(c)
if b0 <= 21:
d[b0] = stack
stack = []
continue
if b0 == 30:
s = ''
loop = True
while loop:
b = ord(fp.read(1))
for n in (b >> 4, b & 15):
if n == 15:
loop = False
else:
s += NIBBLES[n]
value = float(s)
elif 32 <= b0 and b0 <= 246:
value = b0-139
else:
b1 = ord(fp.read(1))
if 247 <= b0 and b0 <= 250:
value = ((b0-247)<<8)+b1+108
elif 251 <= b0 and b0 <= 254:
value = -((b0-251)<<8)-b1-108
else:
b2 = ord(fp.read(1))
if 128 <= b1: b1 -= 256
if b0 == 28:
value = b1<<8 | b2
else:
value = b1<<24 | b2<<16 | struct.unpack('>H', fp.read(2))[0]
stack.append(value)
return d
class CFFFont(object):
STANDARD_STRINGS = (
'.notdef', 'space', 'exclam', 'quotedbl', 'numbersign',
'dollar', 'percent', 'ampersand', 'quoteright', 'parenleft',
'parenright', 'asterisk', 'plus', 'comma', 'hyphen', 'period',
'slash', 'zero', 'one', 'two', 'three', 'four', 'five', 'six',
'seven', 'eight', 'nine', 'colon', 'semicolon', 'less', 'equal',
'greater', 'question', 'at', 'A', 'B', 'C', 'D', 'E', 'F', 'G',
'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T',
'U', 'V', 'W', 'X', 'Y', 'Z', 'bracketleft', 'backslash',
'bracketright', 'asciicircum', 'underscore', 'quoteleft', 'a',
'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n',
'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z',
'braceleft', 'bar', 'braceright', 'asciitilde', 'exclamdown',
'cent', 'sterling', 'fraction', 'yen', 'florin', 'section',
'currency', 'quotesingle', 'quotedblleft', 'guillemotleft',
'guilsinglleft', 'guilsinglright', 'fi', 'fl', 'endash',
'dagger', 'daggerdbl', 'periodcentered', 'paragraph', 'bullet',
'quotesinglbase', 'quotedblbase', 'quotedblright',
'guillemotright', 'ellipsis', 'perthousand', 'questiondown',
'grave', 'acute', 'circumflex', 'tilde', 'macron', 'breve',
'dotaccent', 'dieresis', 'ring', 'cedilla', 'hungarumlaut',
'ogonek', 'caron', 'emdash', 'AE', 'ordfeminine', 'Lslash',
'Oslash', 'OE', 'ordmasculine', 'ae', 'dotlessi', 'lslash',
'oslash', 'oe', 'germandbls', 'onesuperior', 'logicalnot', 'mu',
'trademark', 'Eth', 'onehalf', 'plusminus', 'Thorn',
'onequarter', 'divide', 'brokenbar', 'degree', 'thorn',
'threequarters', 'twosuperior', 'registered', 'minus', 'eth',
'multiply', 'threesuperior', 'copyright', 'Aacute',
'Acircumflex', 'Adieresis', 'Agrave', 'Aring', 'Atilde',
'Ccedilla', 'Eacute', 'Ecircumflex', 'Edieresis', 'Egrave',
'Iacute', 'Icircumflex', 'Idieresis', 'Igrave', 'Ntilde',
'Oacute', 'Ocircumflex', 'Odieresis', 'Ograve', 'Otilde',
'Scaron', 'Uacute', 'Ucircumflex', 'Udieresis', 'Ugrave',
'Yacute', 'Ydieresis', 'Zcaron', 'aacute', 'acircumflex',
'adieresis', 'agrave', 'aring', 'atilde', 'ccedilla', 'eacute',
'ecircumflex', 'edieresis', 'egrave', 'iacute', 'icircumflex',
'idieresis', 'igrave', 'ntilde', 'oacute', 'ocircumflex',
'odieresis', 'ograve', 'otilde', 'scaron', 'uacute',
'ucircumflex', 'udieresis', 'ugrave', 'yacute', 'ydieresis',
'zcaron', 'exclamsmall', 'Hungarumlautsmall', 'dollaroldstyle',
'dollarsuperior', 'ampersandsmall', 'Acutesmall',
'parenleftsuperior', 'parenrightsuperior', 'twodotenleader',
'onedotenleader', 'zerooldstyle', 'oneoldstyle', 'twooldstyle',
'threeoldstyle', 'fouroldstyle', 'fiveoldstyle', 'sixoldstyle',
'sevenoldstyle', 'eightoldstyle', 'nineoldstyle',
'commasuperior', 'threequartersemdash', 'periodsuperior',
'questionsmall', 'asuperior', 'bsuperior', 'centsuperior',
'dsuperior', 'esuperior', 'isuperior', 'lsuperior', 'msuperior',
'nsuperior', 'osuperior', 'rsuperior', 'ssuperior', 'tsuperior',
'ff', 'ffi', 'ffl', 'parenleftinferior', 'parenrightinferior',
'Circumflexsmall', 'hyphensuperior', 'Gravesmall', 'Asmall',
'Bsmall', 'Csmall', 'Dsmall', 'Esmall', 'Fsmall', 'Gsmall',
'Hsmall', 'Ismall', 'Jsmall', 'Ksmall', 'Lsmall', 'Msmall',
'Nsmall', 'Osmall', 'Psmall', 'Qsmall', 'Rsmall', 'Ssmall',
'Tsmall', 'Usmall', 'Vsmall', 'Wsmall', 'Xsmall', 'Ysmall',
'Zsmall', 'colonmonetary', 'onefitted', 'rupiah', 'Tildesmall',
'exclamdownsmall', 'centoldstyle', 'Lslashsmall', 'Scaronsmall',
'Zcaronsmall', 'Dieresissmall', 'Brevesmall', 'Caronsmall',
'Dotaccentsmall', 'Macronsmall', 'figuredash', 'hypheninferior',
'Ogoneksmall', 'Ringsmall', 'Cedillasmall', 'questiondownsmall',
'oneeighth', 'threeeighths', 'fiveeighths', 'seveneighths',
'onethird', 'twothirds', 'zerosuperior', 'foursuperior',
'fivesuperior', 'sixsuperior', 'sevensuperior', 'eightsuperior',
'ninesuperior', 'zeroinferior', 'oneinferior', 'twoinferior',
'threeinferior', 'fourinferior', 'fiveinferior', 'sixinferior',
'seveninferior', 'eightinferior', 'nineinferior',
'centinferior', 'dollarinferior', 'periodinferior',
'commainferior', 'Agravesmall', 'Aacutesmall',
'Acircumflexsmall', 'Atildesmall', 'Adieresissmall',
'Aringsmall', 'AEsmall', 'Ccedillasmall', 'Egravesmall',
'Eacutesmall', 'Ecircumflexsmall', 'Edieresissmall',
'Igravesmall', 'Iacutesmall', 'Icircumflexsmall',
'Idieresissmall', 'Ethsmall', 'Ntildesmall', 'Ogravesmall',
'Oacutesmall', 'Ocircumflexsmall', 'Otildesmall',
'Odieresissmall', 'OEsmall', 'Oslashsmall', 'Ugravesmall',
'Uacutesmall', 'Ucircumflexsmall', 'Udieresissmall',
'Yacutesmall', 'Thornsmall', 'Ydieresissmall', '001.000',
'001.001', '001.002', '001.003', 'Black', 'Bold', 'Book',
'Light', 'Medium', 'Regular', 'Roman', 'Semibold',
)
class INDEX(object):
def __init__(self, fp):
self.fp = fp
self.offsets = []
(count, offsize) = struct.unpack('>HB', self.fp.read(3))
for i in xrange(count+1):
self.offsets.append(nunpack(self.fp.read(offsize)))
self.base = self.fp.tell()-1
self.fp.seek(self.base+self.offsets[-1])
return
def __repr__(self):
return '<INDEX: size=%d>' % len(self)
def __len__(self):
return len(self.offsets)-1
def __getitem__(self, i):
self.fp.seek(self.base+self.offsets[i])
return self.fp.read(self.offsets[i+1]-self.offsets[i])
def __iter__(self):
return iter( self[i] for i in xrange(len(self)) )
def __init__(self, name, fp):
self.name = name
self.fp = fp
# Header
(_major,_minor,hdrsize,offsize) = struct.unpack('BBBB', self.fp.read(4))
self.fp.read(hdrsize-4)
# Name INDEX
self.name_index = self.INDEX(self.fp)
# Top DICT INDEX
self.dict_index = self.INDEX(self.fp)
# String INDEX
self.string_index = self.INDEX(self.fp)
# Global Subr INDEX
self.subr_index = self.INDEX(self.fp)
# Top DICT DATA
self.top_dict = getdict(self.dict_index[0])
(charset_pos,) = self.top_dict.get(15, [0])
(encoding_pos,) = self.top_dict.get(16, [0])
(charstring_pos,) = self.top_dict.get(17, [0])
# CharStrings
self.fp.seek(charstring_pos)
self.charstring = self.INDEX(self.fp)
self.nglyphs = len(self.charstring)
# Encodings
self.code2gid = {}
self.gid2code = {}
self.fp.seek(encoding_pos)
format = self.fp.read(1)
if format == '\x00':
# Format 0
(n,) = struct.unpack('B', self.fp.read(1))
for (code,gid) in enumerate(struct.unpack('B'*n, self.fp.read(n))):
self.code2gid[code] = gid
self.gid2code[gid] = code
elif format == '\x01':
# Format 1
(n,) = struct.unpack('B', self.fp.read(1))
code = 0
for i in xrange(n):
(first,nleft) = struct.unpack('BB', self.fp.read(2))
for gid in xrange(first,first+nleft+1):
self.code2gid[code] = gid
self.gid2code[gid] = code
code += 1
else:
raise ValueError('unsupported encoding format: %r' % format)
# Charsets
self.name2gid = {}
self.gid2name = {}
self.fp.seek(charset_pos)
format = self.fp.read(1)
if format == '\x00':
# Format 0
n = self.nglyphs-1
for (gid,sid) in enumerate(struct.unpack('>'+'H'*n, self.fp.read(2*n))):
gid += 1
name = self.getstr(sid)
self.name2gid[name] = gid
self.gid2name[gid] = name
elif format == '\x01':
# Format 1
(n,) = struct.unpack('B', self.fp.read(1))
sid = 0
for i in xrange(n):
(first,nleft) = struct.unpack('BB', self.fp.read(2))
for gid in xrange(first,first+nleft+1):
name = self.getstr(sid)
self.name2gid[name] = gid
self.gid2name[gid] = name
sid += 1
elif format == '\x02':
# Format 2
assert 0
else:
raise ValueError('unsupported charset format: %r' % format)
#print self.code2gid
#print self.name2gid
#assert 0
return
def getstr(self, sid):
if sid < len(self.STANDARD_STRINGS):
return self.STANDARD_STRINGS[sid]
return self.string_index[sid-len(self.STANDARD_STRINGS)]
## TrueTypeFont
##
class TrueTypeFont(object):
class CMapNotFound(Exception): pass
def __init__(self, name, fp):
self.name = name
self.fp = fp
self.tables = {}
self.fonttype = fp.read(4)
(ntables, _1, _2, _3) = struct.unpack('>HHHH', fp.read(8))
for _ in xrange(ntables):
(name, tsum, offset, length) = struct.unpack('>4sLLL', fp.read(16))
self.tables[name] = (offset, length)
return
def create_unicode_map(self):
if 'cmap' not in self.tables:
raise TrueTypeFont.CMapNotFound
(base_offset, length) = self.tables['cmap']
fp = self.fp
fp.seek(base_offset)
(version, nsubtables) = struct.unpack('>HH', fp.read(4))
subtables = []
for i in xrange(nsubtables):
subtables.append(struct.unpack('>HHL', fp.read(8)))
char2gid = {}
# Only supports subtable type 0, 2 and 4.
for (_1, _2, st_offset) in subtables:
fp.seek(base_offset+st_offset)
(fmttype, fmtlen, fmtlang) = struct.unpack('>HHH', fp.read(6))
if fmttype == 0:
char2gid.update(enumerate(struct.unpack('>256B', fp.read(256))))
elif fmttype == 2:
subheaderkeys = struct.unpack('>256H', fp.read(512))
firstbytes = [0]*8192
for (i,k) in enumerate(subheaderkeys):
firstbytes[k/8] = i
nhdrs = max(subheaderkeys)/8 + 1
hdrs = []
for i in xrange(nhdrs):
(firstcode,entcount,delta,offset) = struct.unpack('>HHhH', fp.read(8))
hdrs.append((i,firstcode,entcount,delta,fp.tell()-2+offset))
for (i,firstcode,entcount,delta,pos) in hdrs:
if not entcount: continue
first = firstcode + (firstbytes[i] << 8)
fp.seek(pos)
for c in xrange(entcount):
gid = struct.unpack('>H', fp.read(2))
if gid:
gid += delta
char2gid[first+c] = gid
elif fmttype == 4:
(segcount, _1, _2, _3) = struct.unpack('>HHHH', fp.read(8))
segcount /= 2
ecs = struct.unpack('>%dH' % segcount, fp.read(2*segcount))
fp.read(2)
scs = struct.unpack('>%dH' % segcount, fp.read(2*segcount))
idds = struct.unpack('>%dh' % segcount, fp.read(2*segcount))
pos = fp.tell()
idrs = struct.unpack('>%dH' % segcount, fp.read(2*segcount))
for (ec,sc,idd,idr) in zip(ecs, scs, idds, idrs):
if idr:
fp.seek(pos+idr)
for c in xrange(sc, ec+1):
char2gid[c] = (struct.unpack('>H', fp.read(2))[0] + idd) & 0xffff
else:
for c in xrange(sc, ec+1):
char2gid[c] = (c + idd) & 0xffff
else:
assert 0
# create unicode map
unicode_map = FileUnicodeMap()
for (char,gid) in char2gid.iteritems():
unicode_map.add_cid2unichr(gid, char)
return unicode_map
## Fonts
##
class PDFFontError(PDFException): pass
class PDFUnicodeNotDefined(PDFFontError): pass
LITERAL_STANDARD_ENCODING = LIT('StandardEncoding')
LITERAL_TYPE1C = LIT('Type1C')
# PDFFont
class PDFFont(object):
def __init__(self, descriptor, widths, default_width=None):
self.descriptor = descriptor
self.widths = widths
self.fontname = resolve1(descriptor.get('FontName', 'unknown'))
if isinstance(self.fontname, PSLiteral):
self.fontname = literal_name(self.fontname)
self.flags = int_value(descriptor.get('Flags', 0))
self.ascent = num_value(descriptor.get('Ascent', 0))
self.descent = num_value(descriptor.get('Descent', 0))
self.italic_angle = num_value(descriptor.get('ItalicAngle', 0))
self.default_width = default_width or num_value(descriptor.get('MissingWidth', 0))
self.leading = num_value(descriptor.get('Leading', 0))
self.bbox = list_value(descriptor.get('FontBBox', (0,0,0,0)))
self.hscale = self.vscale = .001
return
def __repr__(self):
return '<PDFFont>'
def is_vertical(self):
return False
def is_multibyte(self):
return False
def decode(self, bytes):
return map(ord, bytes)
def get_ascent(self):
return self.ascent * self.vscale
def get_descent(self):
return self.descent * self.vscale
def get_width(self):
w = self.bbox[2]-self.bbox[0]
if w == 0:
w = -self.default_width
return w * self.hscale
def get_height(self):
h = self.bbox[3]-self.bbox[1]
if h == 0:
h = self.ascent - self.descent
return h * self.vscale
def char_width(self, cid):
return self.widths.get(cid, self.default_width) * self.hscale
def char_disp(self, cid):
return 0
def string_width(self, s):
return sum( self.char_width(cid) for cid in self.decode(s) )
# PDFSimpleFont
class PDFSimpleFont(PDFFont):
def __init__(self, descriptor, widths, spec):
# Font encoding is specified either by a name of
# built-in encoding or a dictionary that describes
# the differences.
if 'Encoding' in spec:
encoding = resolve1(spec['Encoding'])
else:
encoding = LITERAL_STANDARD_ENCODING
if isinstance(encoding, dict):
name = literal_name(encoding.get('BaseEncoding', LITERAL_STANDARD_ENCODING))
diff = list_value(encoding.get('Differences', None))
self.cid2unicode = EncodingDB.get_encoding(name, diff)
else:
self.cid2unicode = EncodingDB.get_encoding(literal_name(encoding))
self.unicode_map = None
if 'ToUnicode' in spec:
strm = stream_value(spec['ToUnicode'])
self.unicode_map = FileUnicodeMap()
CMapParser(self.unicode_map, StringIO(strm.get_data())).run()
PDFFont.__init__(self, descriptor, widths)
return
def to_unichr(self, cid):
if self.unicode_map:
try:
return self.unicode_map.get_unichr(cid)
except KeyError:
pass
try:
return self.cid2unicode[cid]
except KeyError:
raise PDFUnicodeNotDefined(None, cid)
# PDFType1Font
class PDFType1Font(PDFSimpleFont):
def __init__(self, rsrcmgr, spec):
try:
self.basefont = literal_name(spec['BaseFont'])
except KeyError:
if STRICT:
raise PDFFontError('BaseFont is missing')
self.basefont = 'unknown'
try:
(descriptor, widths) = FontMetricsDB.get_metrics(self.basefont)
except KeyError:
descriptor = dict_value(spec.get('FontDescriptor', {}))
firstchar = int_value(spec.get('FirstChar', 0))
lastchar = int_value(spec.get('LastChar', 255))
widths = list_value(spec.get('Widths', [0]*256))
widths = dict( (i+firstchar,w) for (i,w) in enumerate(widths) )
PDFSimpleFont.__init__(self, descriptor, widths, spec)
if 'Encoding' not in spec and 'FontFile' in descriptor:
# try to recover the missing encoding info from the font file.
self.fontfile = stream_value(descriptor.get('FontFile'))
length1 = int_value(self.fontfile['Length1'])
data = self.fontfile.get_data()[:length1]
parser = Type1FontHeaderParser(StringIO(data))
self.cid2unicode = parser.get_encoding()
return
def __repr__(self):
return '<PDFType1Font: basefont=%r>' % self.basefont
# PDFTrueTypeFont
class PDFTrueTypeFont(PDFType1Font):
def __repr__(self):
return '<PDFTrueTypeFont: basefont=%r>' % self.basefont
# PDFType3Font
class PDFType3Font(PDFSimpleFont):
def __init__(self, rsrcmgr, spec):
firstchar = int_value(spec.get('FirstChar', 0))
lastchar = int_value(spec.get('LastChar', 0))
widths = list_value(spec.get('Widths', [0]*256))
widths = dict( (i+firstchar,w) for (i,w) in enumerate(widths))
if 'FontDescriptor' in spec:
descriptor = dict_value(spec['FontDescriptor'])
else:
descriptor = {'Ascent':0, 'Descent':0,
'FontBBox':spec['FontBBox']}
PDFSimpleFont.__init__(self, descriptor, widths, spec)
self.matrix = tuple(list_value(spec.get('FontMatrix')))
(_,self.descent,_,self.ascent) = self.bbox
(self.hscale,self.vscale) = apply_matrix_norm(self.matrix, (1,1))
return
def __repr__(self):
return '<PDFType3Font>'
# PDFCIDFont
class PDFCIDFont(PDFFont):
def __init__(self, rsrcmgr, spec):
try:
self.basefont = literal_name(spec['BaseFont'])
except KeyError:
if STRICT:
raise PDFFontError('BaseFont is missing')
self.basefont = 'unknown'
self.cidsysteminfo = dict_value(spec.get('CIDSystemInfo', {}))
self.cidcoding = '%s-%s' % (self.cidsysteminfo.get('Registry', 'unknown'),
self.cidsysteminfo.get('Ordering', 'unknown'))
try:
name = literal_name(spec['Encoding'])
except KeyError:
if STRICT:
raise PDFFontError('Encoding is unspecified')
name = 'unknown'
try:
self.cmap = CMapDB.get_cmap(name)
except CMapDB.CMapNotFound, e:
if STRICT:
raise PDFFontError(e)
self.cmap = CMap()
try:
descriptor = dict_value(spec['FontDescriptor'])
except KeyError:
if STRICT:
raise PDFFontError('FontDescriptor is missing')
descriptor = {}
ttf = None
if 'FontFile2' in descriptor:
self.fontfile = stream_value(descriptor.get('FontFile2'))
ttf = TrueTypeFont(self.basefont,
StringIO(self.fontfile.get_data()))
self.unicode_map = None
if 'ToUnicode' in spec:
strm = stream_value(spec['ToUnicode'])
self.unicode_map = FileUnicodeMap()
CMapParser(self.unicode_map, StringIO(strm.get_data())).run()
elif self.cidcoding == 'Adobe-Identity':
if ttf:
try:
self.unicode_map = ttf.create_unicode_map()
except TrueTypeFont.CMapNotFound:
pass
else:
try:
self.unicode_map = CMapDB.get_unicode_map(self.cidcoding, self.cmap.is_vertical())
except CMapDB.CMapNotFound, e:
pass
self.vertical = self.cmap.is_vertical()
if self.vertical:
# writing mode: vertical
widths = get_widths2(list_value(spec.get('W2', [])))
self.disps = dict( (cid,(vx,vy)) for (cid,(_,(vx,vy))) in widths.iteritems() )
(vy,w) = spec.get('DW2', [880, -1000])
self.default_disp = (None,vy)
widths = dict( (cid,w) for (cid,(w,_)) in widths.iteritems() )
default_width = w
else:
# writing mode: horizontal
self.disps = {}
self.default_disp = 0
widths = get_widths(list_value(spec.get('W', [])))
default_width = spec.get('DW', 1000)
PDFFont.__init__(self, descriptor, widths, default_width=default_width)
return
def __repr__(self):
return '<PDFCIDFont: basefont=%r, cidcoding=%r>' % (self.basefont, self.cidcoding)
def is_vertical(self):
return self.vertical
def is_multibyte(self):
return True
def decode(self, bytes):
return self.cmap.decode(bytes)
def char_disp(self, cid):
"Returns an integer for horizontal fonts, a tuple for vertical fonts."
return self.disps.get(cid, self.default_disp)
def to_unichr(self, cid):
try:
if not self.unicode_map: raise KeyError(cid)
return self.unicode_map.get_unichr(cid)
except KeyError:
raise PDFUnicodeNotDefined(self.cidcoding, cid)
# main
def main(argv):
for fname in argv[1:]:
fp = file(fname, 'rb')
#font = TrueTypeFont(fname, fp)
font = CFFFont(fname, fp)
print font
fp.close()
return
if __name__ == '__main__': sys.exit(main(sys.argv))
| gpl-3.0 | 7,434,869,200,181,841,000 | 36.707977 | 98 | 0.540478 | false |
liorvh/phantomjs | src/qt/qtwebkit/Tools/Scripts/webkitpy/style/checkers/watchlist_unittest.py | 124 | 2754 | # Copyright (C) 2010 Apple Inc. All rights reserved.
# Copyright (C) 2011 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
'''Unit tests for watchlist.py.'''
import unittest2 as unittest
import watchlist
class MockErrorHandler(object):
def __init__(self, handle_style_error):
self.turned_off_filtering = False
self._handle_style_error = handle_style_error
def turn_off_line_filtering(self):
self.turned_off_filtering = True
def __call__(self, line_number, category, confidence, message):
self._handle_style_error(self, line_number, category, confidence, message)
return True
class WatchListTest(unittest.TestCase):
def test_basic_error_message(self):
def handle_style_error(mock_error_handler, line_number, category, confidence, message):
mock_error_handler.had_error = True
self.assertEqual(0, line_number)
self.assertEqual('watchlist/general', category)
error_handler = MockErrorHandler(handle_style_error)
error_handler.had_error = False
checker = watchlist.WatchListChecker('watchlist', error_handler)
checker.check(['{"DEFINTIONS": {}}'])
self.assertTrue(error_handler.had_error)
self.assertTrue(error_handler.turned_off_filtering)
| bsd-3-clause | 366,529,192,794,524,000 | 41.369231 | 95 | 0.737473 | false |
ArtemTeleshev/raspberrypi-linux | tools/perf/scripts/python/sctop.py | 1996 | 2102 | # system call top
# (c) 2010, Tom Zanussi <[email protected]>
# Licensed under the terms of the GNU GPL License version 2
#
# Periodically displays system-wide system call totals, broken down by
# syscall. If a [comm] arg is specified, only syscalls called by
# [comm] are displayed. If an [interval] arg is specified, the display
# will be refreshed every [interval] seconds. The default interval is
# 3 seconds.
import os, sys, thread, time
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import *
usage = "perf script -s sctop.py [comm] [interval]\n";
for_comm = None
default_interval = 3
interval = default_interval
if len(sys.argv) > 3:
sys.exit(usage)
if len(sys.argv) > 2:
for_comm = sys.argv[1]
interval = int(sys.argv[2])
elif len(sys.argv) > 1:
try:
interval = int(sys.argv[1])
except ValueError:
for_comm = sys.argv[1]
interval = default_interval
syscalls = autodict()
def trace_begin():
thread.start_new_thread(print_syscall_totals, (interval,))
pass
def raw_syscalls__sys_enter(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
common_callchain, id, args):
if for_comm is not None:
if common_comm != for_comm:
return
try:
syscalls[id] += 1
except TypeError:
syscalls[id] = 1
def syscalls__sys_enter(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
id, args):
raw_syscalls__sys_enter(**locals())
def print_syscall_totals(interval):
while 1:
clear_term()
if for_comm is not None:
print "\nsyscall events for %s:\n\n" % (for_comm),
else:
print "\nsyscall events:\n\n",
print "%-40s %10s\n" % ("event", "count"),
print "%-40s %10s\n" % ("----------------------------------------", \
"----------"),
for id, val in sorted(syscalls.iteritems(), key = lambda(k, v): (v, k), \
reverse = True):
try:
print "%-40s %10d\n" % (syscall_name(id), val),
except TypeError:
pass
syscalls.clear()
time.sleep(interval)
| gpl-2.0 | 651,098,864,098,121,100 | 25.275 | 75 | 0.657945 | false |
Conjuror/fxos-certsuite | mcts/web-platform-tests/tests/tools/pywebsocket/src/mod_pywebsocket/handshake/hybi.py | 139 | 17070 | # Copyright 2012, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""This file provides the opening handshake processor for the WebSocket
protocol (RFC 6455).
Specification:
http://tools.ietf.org/html/rfc6455
"""
# Note: request.connection.write is used in this module, even though mod_python
# document says that it should be used only in connection handlers.
# Unfortunately, we have no other options. For example, request.write is not
# suitable because it doesn't allow direct raw bytes writing.
import base64
import logging
import os
import re
from mod_pywebsocket import common
from mod_pywebsocket.extensions import get_extension_processor
from mod_pywebsocket.extensions import is_compression_extension
from mod_pywebsocket.handshake._base import check_request_line
from mod_pywebsocket.handshake._base import format_header
from mod_pywebsocket.handshake._base import get_mandatory_header
from mod_pywebsocket.handshake._base import HandshakeException
from mod_pywebsocket.handshake._base import parse_token_list
from mod_pywebsocket.handshake._base import validate_mandatory_header
from mod_pywebsocket.handshake._base import validate_subprotocol
from mod_pywebsocket.handshake._base import VersionException
from mod_pywebsocket.stream import Stream
from mod_pywebsocket.stream import StreamOptions
from mod_pywebsocket import util
# Used to validate the value in the Sec-WebSocket-Key header strictly. RFC 4648
# disallows non-zero padding, so the character right before == must be any of
# A, Q, g and w.
_SEC_WEBSOCKET_KEY_REGEX = re.compile('^[+/0-9A-Za-z]{21}[AQgw]==$')
# Defining aliases for values used frequently.
_VERSION_HYBI08 = common.VERSION_HYBI08
_VERSION_HYBI08_STRING = str(_VERSION_HYBI08)
_VERSION_LATEST = common.VERSION_HYBI_LATEST
_VERSION_LATEST_STRING = str(_VERSION_LATEST)
_SUPPORTED_VERSIONS = [
_VERSION_LATEST,
_VERSION_HYBI08,
]
def compute_accept(key):
"""Computes value for the Sec-WebSocket-Accept header from value of the
Sec-WebSocket-Key header.
"""
accept_binary = util.sha1_hash(
key + common.WEBSOCKET_ACCEPT_UUID).digest()
accept = base64.b64encode(accept_binary)
return (accept, accept_binary)
class Handshaker(object):
"""Opening handshake processor for the WebSocket protocol (RFC 6455)."""
def __init__(self, request, dispatcher):
"""Construct an instance.
Args:
request: mod_python request.
dispatcher: Dispatcher (dispatch.Dispatcher).
Handshaker will add attributes such as ws_resource during handshake.
"""
self._logger = util.get_class_logger(self)
self._request = request
self._dispatcher = dispatcher
def _validate_connection_header(self):
connection = get_mandatory_header(
self._request, common.CONNECTION_HEADER)
try:
connection_tokens = parse_token_list(connection)
except HandshakeException, e:
raise HandshakeException(
'Failed to parse %s: %s' % (common.CONNECTION_HEADER, e))
connection_is_valid = False
for token in connection_tokens:
if token.lower() == common.UPGRADE_CONNECTION_TYPE.lower():
connection_is_valid = True
break
if not connection_is_valid:
raise HandshakeException(
'%s header doesn\'t contain "%s"' %
(common.CONNECTION_HEADER, common.UPGRADE_CONNECTION_TYPE))
def do_handshake(self):
self._request.ws_close_code = None
self._request.ws_close_reason = None
# Parsing.
check_request_line(self._request)
validate_mandatory_header(
self._request,
common.UPGRADE_HEADER,
common.WEBSOCKET_UPGRADE_TYPE)
self._validate_connection_header()
self._request.ws_resource = self._request.uri
unused_host = get_mandatory_header(self._request, common.HOST_HEADER)
self._request.ws_version = self._check_version()
# This handshake must be based on latest hybi. We are responsible to
# fallback to HTTP on handshake failure as latest hybi handshake
# specifies.
try:
self._get_origin()
self._set_protocol()
self._parse_extensions()
# Key validation, response generation.
key = self._get_key()
(accept, accept_binary) = compute_accept(key)
self._logger.debug(
'%s: %r (%s)',
common.SEC_WEBSOCKET_ACCEPT_HEADER,
accept,
util.hexify(accept_binary))
self._logger.debug('Protocol version is RFC 6455')
# Setup extension processors.
processors = []
if self._request.ws_requested_extensions is not None:
for extension_request in self._request.ws_requested_extensions:
processor = get_extension_processor(extension_request)
# Unknown extension requests are just ignored.
if processor is not None:
processors.append(processor)
self._request.ws_extension_processors = processors
# List of extra headers. The extra handshake handler may add header
# data as name/value pairs to this list and pywebsocket appends
# them to the WebSocket handshake.
self._request.extra_headers = []
# Extra handshake handler may modify/remove processors.
self._dispatcher.do_extra_handshake(self._request)
processors = filter(lambda processor: processor is not None,
self._request.ws_extension_processors)
# Ask each processor if there are extensions on the request which
# cannot co-exist. When processor decided other processors cannot
# co-exist with it, the processor marks them (or itself) as
# "inactive". The first extension processor has the right to
# make the final call.
for processor in reversed(processors):
if processor.is_active():
processor.check_consistency_with_other_processors(
processors)
processors = filter(lambda processor: processor.is_active(),
processors)
accepted_extensions = []
# We need to take into account of mux extension here.
# If mux extension exists:
# - Remove processors of extensions for logical channel,
# which are processors located before the mux processor
# - Pass extension requests for logical channel to mux processor
# - Attach the mux processor to the request. It will be referred
# by dispatcher to see whether the dispatcher should use mux
# handler or not.
mux_index = -1
for i, processor in enumerate(processors):
if processor.name() == common.MUX_EXTENSION:
mux_index = i
break
if mux_index >= 0:
logical_channel_extensions = []
for processor in processors[:mux_index]:
logical_channel_extensions.append(processor.request())
processor.set_active(False)
self._request.mux_processor = processors[mux_index]
self._request.mux_processor.set_extensions(
logical_channel_extensions)
processors = filter(lambda processor: processor.is_active(),
processors)
stream_options = StreamOptions()
for index, processor in enumerate(processors):
if not processor.is_active():
continue
extension_response = processor.get_extension_response()
if extension_response is None:
# Rejected.
continue
accepted_extensions.append(extension_response)
processor.setup_stream_options(stream_options)
if not is_compression_extension(processor.name()):
continue
# Inactivate all of the following compression extensions.
for j in xrange(index + 1, len(processors)):
if is_compression_extension(processors[j].name()):
processors[j].set_active(False)
if len(accepted_extensions) > 0:
self._request.ws_extensions = accepted_extensions
self._logger.debug(
'Extensions accepted: %r',
map(common.ExtensionParameter.name, accepted_extensions))
else:
self._request.ws_extensions = None
self._request.ws_stream = self._create_stream(stream_options)
if self._request.ws_requested_protocols is not None:
if self._request.ws_protocol is None:
raise HandshakeException(
'do_extra_handshake must choose one subprotocol from '
'ws_requested_protocols and set it to ws_protocol')
validate_subprotocol(self._request.ws_protocol)
self._logger.debug(
'Subprotocol accepted: %r',
self._request.ws_protocol)
else:
if self._request.ws_protocol is not None:
raise HandshakeException(
'ws_protocol must be None when the client didn\'t '
'request any subprotocol')
self._send_handshake(accept)
except HandshakeException, e:
if not e.status:
# Fallback to 400 bad request by default.
e.status = common.HTTP_STATUS_BAD_REQUEST
raise e
def _get_origin(self):
if self._request.ws_version is _VERSION_HYBI08:
origin_header = common.SEC_WEBSOCKET_ORIGIN_HEADER
else:
origin_header = common.ORIGIN_HEADER
origin = self._request.headers_in.get(origin_header)
if origin is None:
self._logger.debug('Client request does not have origin header')
self._request.ws_origin = origin
def _check_version(self):
version = get_mandatory_header(self._request,
common.SEC_WEBSOCKET_VERSION_HEADER)
if version == _VERSION_HYBI08_STRING:
return _VERSION_HYBI08
if version == _VERSION_LATEST_STRING:
return _VERSION_LATEST
if version.find(',') >= 0:
raise HandshakeException(
'Multiple versions (%r) are not allowed for header %s' %
(version, common.SEC_WEBSOCKET_VERSION_HEADER),
status=common.HTTP_STATUS_BAD_REQUEST)
raise VersionException(
'Unsupported version %r for header %s' %
(version, common.SEC_WEBSOCKET_VERSION_HEADER),
supported_versions=', '.join(map(str, _SUPPORTED_VERSIONS)))
def _set_protocol(self):
self._request.ws_protocol = None
protocol_header = self._request.headers_in.get(
common.SEC_WEBSOCKET_PROTOCOL_HEADER)
if protocol_header is None:
self._request.ws_requested_protocols = None
return
self._request.ws_requested_protocols = parse_token_list(
protocol_header)
self._logger.debug('Subprotocols requested: %r',
self._request.ws_requested_protocols)
def _parse_extensions(self):
extensions_header = self._request.headers_in.get(
common.SEC_WEBSOCKET_EXTENSIONS_HEADER)
if not extensions_header:
self._request.ws_requested_extensions = None
return
if self._request.ws_version is common.VERSION_HYBI08:
allow_quoted_string=False
else:
allow_quoted_string=True
try:
self._request.ws_requested_extensions = common.parse_extensions(
extensions_header, allow_quoted_string=allow_quoted_string)
except common.ExtensionParsingException, e:
raise HandshakeException(
'Failed to parse Sec-WebSocket-Extensions header: %r' % e)
self._logger.debug(
'Extensions requested: %r',
map(common.ExtensionParameter.name,
self._request.ws_requested_extensions))
def _validate_key(self, key):
if key.find(',') >= 0:
raise HandshakeException('Request has multiple %s header lines or '
'contains illegal character \',\': %r' %
(common.SEC_WEBSOCKET_KEY_HEADER, key))
# Validate
key_is_valid = False
try:
# Validate key by quick regex match before parsing by base64
# module. Because base64 module skips invalid characters, we have
# to do this in advance to make this server strictly reject illegal
# keys.
if _SEC_WEBSOCKET_KEY_REGEX.match(key):
decoded_key = base64.b64decode(key)
if len(decoded_key) == 16:
key_is_valid = True
except TypeError, e:
pass
if not key_is_valid:
raise HandshakeException(
'Illegal value for header %s: %r' %
(common.SEC_WEBSOCKET_KEY_HEADER, key))
return decoded_key
def _get_key(self):
key = get_mandatory_header(
self._request, common.SEC_WEBSOCKET_KEY_HEADER)
decoded_key = self._validate_key(key)
self._logger.debug(
'%s: %r (%s)',
common.SEC_WEBSOCKET_KEY_HEADER,
key,
util.hexify(decoded_key))
return key
def _create_stream(self, stream_options):
return Stream(self._request, stream_options)
def _create_handshake_response(self, accept):
response = []
response.append('HTTP/1.1 101 Switching Protocols\r\n')
# WebSocket headers
response.append(format_header(
common.UPGRADE_HEADER, common.WEBSOCKET_UPGRADE_TYPE))
response.append(format_header(
common.CONNECTION_HEADER, common.UPGRADE_CONNECTION_TYPE))
response.append(format_header(
common.SEC_WEBSOCKET_ACCEPT_HEADER, accept))
if self._request.ws_protocol is not None:
response.append(format_header(
common.SEC_WEBSOCKET_PROTOCOL_HEADER,
self._request.ws_protocol))
if (self._request.ws_extensions is not None and
len(self._request.ws_extensions) != 0):
response.append(format_header(
common.SEC_WEBSOCKET_EXTENSIONS_HEADER,
common.format_extensions(self._request.ws_extensions)))
# Headers not specific for WebSocket
for name, value in self._request.extra_headers:
response.append(format_header(name, value))
response.append('\r\n')
return ''.join(response)
def _send_handshake(self, accept):
raw_response = self._create_handshake_response(accept)
self._request.connection.write(raw_response)
self._logger.debug('Sent server\'s opening handshake: %r',
raw_response)
# vi:sts=4 sw=4 et
| mpl-2.0 | 1,322,489,892,581,806,000 | 38.241379 | 79 | 0.615993 | false |
openstack/nova | nova/virt/storage_users.py | 9 | 3738 | # Copyright 2012 Michael Still and Canonical Inc
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import time
from oslo_config import cfg
from oslo_log import log as logging
from oslo_serialization import jsonutils
from nova import utils
LOG = logging.getLogger(__name__)
CONF = cfg.CONF
TWENTY_FOUR_HOURS = 3600 * 24
# NOTE(morganfainberg): Due to circular import dependencies, the use of the
# CONF.instances_path needs to be wrapped so that it can be resolved at the
# appropriate time. Because compute.manager imports this file, we end up in
# a rather ugly dependency loop without moving this into a wrapped function.
# This issue mostly stems from the use of a decorator for the lock
# synchronize and the implications of how decorators wrap the wrapped function
# or method. If this needs to be used outside of compute.manager, it should
# be refactored to eliminate this circular dependency loop.
# config option import is avoided here since it is
# explicitly imported from compute.manager and may cause issues with
# defining options after config has been processed with the
# wrapped-function style used here.
def register_storage_use(storage_path, hostname):
"""Identify the id of this instance storage."""
LOCK_PATH = os.path.join(CONF.instances_path, 'locks')
@utils.synchronized('storage-registry-lock', external=True,
lock_path=LOCK_PATH)
def do_register_storage_use(storage_path, hostname):
# NOTE(mikal): this is required to determine if the instance storage is
# shared, which is something that the image cache manager needs to
# know. I can imagine other uses as well though.
d = {}
id_path = os.path.join(storage_path, 'compute_nodes')
if os.path.exists(id_path):
with open(id_path) as f:
try:
d = jsonutils.loads(f.read())
except ValueError:
LOG.warning("Cannot decode JSON from %(id_path)s",
{"id_path": id_path})
d[hostname] = time.time()
with open(id_path, 'w') as f:
f.write(jsonutils.dumps(d))
return do_register_storage_use(storage_path, hostname)
def get_storage_users(storage_path):
"""Get a list of all the users of this storage path."""
# See comments above method register_storage_use
LOCK_PATH = os.path.join(CONF.instances_path, 'locks')
@utils.synchronized('storage-registry-lock', external=True,
lock_path=LOCK_PATH)
def do_get_storage_users(storage_path):
d = {}
id_path = os.path.join(storage_path, 'compute_nodes')
if os.path.exists(id_path):
with open(id_path) as f:
try:
d = jsonutils.loads(f.read())
except ValueError:
LOG.warning("Cannot decode JSON from %(id_path)s",
{"id_path": id_path})
recent_users = []
for node in d:
if time.time() - d[node] < TWENTY_FOUR_HOURS:
recent_users.append(node)
return recent_users
return do_get_storage_users(storage_path)
| apache-2.0 | 7,174,028,137,702,570,000 | 35.647059 | 79 | 0.651953 | false |
google-code/android-scripting | python/src/Demo/pysvr/pysvr.py | 51 | 3430 | #! /usr/bin/env python
"""A multi-threaded telnet-like server that gives a Python prompt.
This is really a prototype for the same thing in C.
Usage: pysvr.py [port]
For security reasons, it only accepts requests from the current host.
This can still be insecure, but restricts violations from people who
can log in on your machine. Use with caution!
"""
import sys, os, string, getopt, thread, socket, traceback
PORT = 4000 # Default port
def main():
try:
opts, args = getopt.getopt(sys.argv[1:], "")
if len(args) > 1:
raise getopt.error, "Too many arguments."
except getopt.error, msg:
usage(msg)
for o, a in opts:
pass
if args:
try:
port = string.atoi(args[0])
except ValueError, msg:
usage(msg)
else:
port = PORT
main_thread(port)
def usage(msg=None):
sys.stdout = sys.stderr
if msg:
print msg
print "\n", __doc__,
sys.exit(2)
def main_thread(port):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.bind(("", port))
sock.listen(5)
print "Listening on port", port, "..."
while 1:
(conn, addr) = sock.accept()
if addr[0] != conn.getsockname()[0]:
conn.close()
print "Refusing connection from non-local host", addr[0], "."
continue
thread.start_new_thread(service_thread, (conn, addr))
del conn, addr
def service_thread(conn, addr):
(caddr, cport) = addr
print "Thread %s has connection from %s.\n" % (str(thread.get_ident()),
caddr),
stdin = conn.makefile("r")
stdout = conn.makefile("w", 0)
run_interpreter(stdin, stdout)
print "Thread %s is done.\n" % str(thread.get_ident()),
def run_interpreter(stdin, stdout):
globals = {}
try:
str(sys.ps1)
except:
sys.ps1 = ">>> "
source = ""
while 1:
stdout.write(sys.ps1)
line = stdin.readline()
if line[:2] == '\377\354':
line = ""
if not line and not source:
break
if line[-2:] == '\r\n':
line = line[:-2] + '\n'
source = source + line
try:
code = compile_command(source)
except SyntaxError, err:
source = ""
traceback.print_exception(SyntaxError, err, None, file=stdout)
continue
if not code:
continue
source = ""
try:
run_command(code, stdin, stdout, globals)
except SystemExit, how:
if how:
try:
how = str(how)
except:
how = ""
stdout.write("Exit %s\n" % how)
break
stdout.write("\nGoodbye.\n")
def run_command(code, stdin, stdout, globals):
save = sys.stdin, sys.stdout, sys.stderr
try:
sys.stdout = sys.stderr = stdout
sys.stdin = stdin
try:
exec code in globals
except SystemExit, how:
raise SystemExit, how, sys.exc_info()[2]
except:
type, value, tb = sys.exc_info()
if tb: tb = tb.tb_next
traceback.print_exception(type, value, tb)
del tb
finally:
sys.stdin, sys.stdout, sys.stderr = save
from code import compile_command
main()
| apache-2.0 | -2,561,494,151,521,315,000 | 26.66129 | 75 | 0.53207 | false |
aparrish/cashclones | generate.py | 1 | 10326 | import rdflib
from rdflib import URIRef
from rdflib.namespace import RDFS
from jinja2 import Template
import random
import urllib
import json
import time
import re
from altuniverse import alternate_universe
def get_random_class(g):
return random.choice(list(g.subjects(RDFS.subClassOf, None)))
def get_label_string(g, thing):
return g.preferredLabel(thing, lang="en")[0][1]
def get_property(subj, prop):
query = """
SELECT ?prop
WHERE { <%s> %s ?prop }
""" % (subj, prop)
qstr = urllib.urlencode(
{'query': query, 'output': 'json', 'default-graph-uri': 'http://dbpedia.org'})
resp = urllib.urlopen("http://dbpedia.org/sparql?" + qstr)
obj = json.loads(resp.read())
if len(obj['results']['bindings']) > 0:
return obj['results']['bindings'][0]['prop']['value']
else:
return None
def schema_convert(url, val):
from dateutil.parser import parse
months = ["Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep",
"Oct", "Nov", "Dec"]
if url == "http://www.w3.org/2001/XMLSchema#date":
val = re.sub("\+.*$", "", val)
dt = parse(val)
retval = "%s %d, %d" % (months[dt.month-1], dt.day, dt.year)
elif url == "http://www.w3.org/2001/XMLSchema#gYear":
val = re.sub("\+.*$", "", val)
dt = parse(val)
retval = str(dt.year)
elif url == "http://www.w3.org/2001/XMLSchema#gMonthDay":
val = re.sub("\+.*$", "", val)
dt = parse(val)
retval = "%s %d" % (months[dt.month-1], dt.day)
else:
retval = val
return retval
def get_random_property(subj):
filter_terms = ["ID","id","Id","image","Image","gray","dorlands","wiki",
"lat","long","color","info","Info","homepage","map","Map",
"updated","Updated","logo","Logo","pushpin","label","Label",
"photo","Photo"]
query = """
SELECT ?property ?propertyLabel ?propertyVal ?propertyValLabel
WHERE {
<%s> ?property ?propertyVal.
?property rdfs:label ?propertyLabel.
FILTER(lang(?propertyLabel) = "en").
OPTIONAL {
?propertyVal rdfs:label ?propertyValLabel.
FILTER(lang(?propertyValLabel) = "en").
}
FILTER(regex(?property, "..")).
FILTER(!regex(?property, "(%s)")).
FILTER(?property != <http://dbpedia.org/ontology/wikiPageRevisionID>).
FILTER(?property != <http://dbpedia.org/ontology/wikiPageID>).
FILTER(?property != <http://dbpedia.org/ontology/abstract>).
FILTER(?property != <http://dbpedia.org/ontology/wikiPageExternalLink>).
FILTER(?property != <http://dbpedia.org/ontology/filename>).
FILTER(?property != <http://dbpedia.org/property/imageSize>).
FILTER(?property != <http://dbpedia.org/property/imagesize>).
FILTER(?property != <http://dbpedia.org/property/logoImage>).
FILTER(?property != <http://dbpedia.org/property/webpage>).
FILTER(?property != <http://dbpedia.org/property/name>).
FILTER(?property != <http://dbpedia.org/property/image>).
FILTER(?property != <http://dbpedia.org/ontology/thumbnail>).
FILTER(?property != <http://dbpedia.org/property/graypage>).
FILTER(?property != <http://dbpedia.org/ontology/grayPage>).
FILTER(?property != <http://dbpedia.org/property/imageCaption>).
FILTER(?property != <http://dbpedia.org/property/id>).
FILTER(?property != <http://dbpedia.org/property/photo>).
FILTER(?property != <http://dbpedia.org/property/caption>).
FILTER(?property != <http://dbpedia.org/ontology/graySubject>).
FILTER(?property != <http://dbpedia.org/property/graysubject>).
FILTER(?property != <http://dbpedia.org/property/website>).
FILTER(?property != <http://dbpedia.org/property/imageName>).
FILTER(?property != <http://dbpedia.org/ontology/dorlandsSuffix>).
FILTER(?property != <http://dbpedia.org/property/dorlandssuf>).
FILTER(?property != <http://dbpedia.org/property/signature>).
FILTER(?property != <http://dbpedia.org/ontology/viafId>).
FILTER(?property != <http://dbpedia.org/property/pixels>).
FILTER(?property != <http://dbpedia.org/property/mapCaption>).
FILTER(?property != <http://dbpedia.org/property/picture>).
FILTER(?property != <http://dbpedia.org/property/imageFlag>).
FILTER(?property != <http://dbpedia.org/property/neurolexid>).
FILTER(?property != <http://dbpedia.org/property/gnd>).
FILTER(?property != <http://dbpedia.org/ontology/dorlandsPrefix>).
FILTER(?property != <http://dbpedia.org/property/dorlandspre>).
FILTER(?property != <http://dbpedia.org/property/imageWidth>).
FILTER(?property != <http://dbpedia.org/property/verifiedrevid>).
}
""" % (subj, '|'.join(filter_terms))
qstr = urllib.urlencode({'query': query, 'output': 'json',
'default-graph-uri': 'http://dbpedia.org'})
resp = urllib.urlopen("http://dbpedia.org/sparql?" + qstr)
obj = json.loads(resp.read())
properties = dict()
for prop in obj['results']['bindings']:
purl = prop['property']['value']
plabel = prop['propertyLabel']['value']
if 'propertyValLabel' in prop:
pval = prop['propertyValLabel']['value']
else:
pval = schema_convert(prop['propertyVal'].get('datatype', ''),
prop['propertyVal']['value'])
if pval.startswith("List of"): continue
if plabel not in properties:
properties[(purl, plabel)] = set()
properties[(purl, plabel)].add(pval)
chosen = random.choice(properties.items())
return {'url': chosen[0][0], 'label': chosen[0][1],
'value': random.choice(list(chosen[1]))}
def get_random_neighboring_property(subj, prop):
query = """
SELECT DISTINCT ?t ?tlabel
WHERE {
<%s> <http://purl.org/dc/terms/subject> ?val.
?s ?prop ?val.
?s <%s> ?t.
OPTIONAL {
?t rdfs:label ?tlabel.
FILTER(lang(?tlabel) = "en").
}
}
limit 1000""" % (subj, prop)
qstr = urllib.urlencode({'query': query, 'output': 'json',
'default-graph-uri': 'http://dbpedia.org'})
resp = urllib.urlopen("http://dbpedia.org/sparql?" + qstr)
obj = json.loads(resp.read())
alternates = list()
for prop in obj['results']['bindings']:
if 'tlabel' in prop:
if prop['tlabel']['value'].startswith("List of"): continue
alternates.append(prop['tlabel']['value'])
else:
val = schema_convert(prop['t'].get('datatype', ''),
prop['t']['value'])
alternates.append(val)
return random.choice(alternates)
cache = {}
def get_subject_count(class_):
if class_ in cache:
return cache[class_]
query = """
SELECT count(*) WHERE {
?subject rdf:type <%s>.
?subject rdfs:label ?label.
?subject foaf:name ?name.
FILTER(lang(?label) = "en").
}
""" % class_
qstr = urllib.urlencode({'query': query, 'output': 'json',
'default-graph-uri': 'http://dbpedia.org'})
resp = urllib.urlopen("http://dbpedia.org/sparql?" + qstr)
obj = json.loads(resp.read())
result = int(obj['results']['bindings'][0]['callret-0']['value'])
cache[class_] = result
return result
def get_random_subject(class_, count):
query = """
SELECT * WHERE {
?subject rdf:type <%s>.
?subject rdfs:label ?label.
?subject foaf:name ?name.
FILTER(lang(?label) = "en").
FILTER(!STRSTARTS(?label, "List of")).
FILTER EXISTS {?subject foaf:depiction ?url}
}
offset %d
limit 1""" % (class_, random.randrange(count))
qstr = urllib.urlencode({'query': query, 'output': 'json',
'default-graph-uri': 'http://dbpedia.org'})
resp = urllib.urlopen("http://dbpedia.org/sparql?" + qstr)
obj = json.loads(resp.read())
info = dict([(k, v['value']) for k, v \
in obj['results']['bindings'][0].iteritems() \
if not(k.startswith("List of"))])
return info
def get_random_resource(g):
while True:
class_ = get_random_class(g)
class_str = get_label_string(g, class_)
count = get_subject_count(class_)
if count > 0:
try:
return get_random_subject(class_, count)
except IndexError as e:
continue
def get_subj_from_wikilink(href):
query = """
SELECT * WHERE {
?subject rdfs:label ?label.
?subject foaf:isPrimaryTopicOf <%s>.
FILTER(lang(?label) = "en").
}
""" % href
qstr = urllib.urlencode({'query': query, 'output': 'json',
'default-graph-uri': 'http://dbpedia.org'})
resp = urllib.urlopen("http://dbpedia.org/sparql?" + qstr)
obj = json.loads(resp.read())
try:
info = dict([(k, v['value']) for k, v \
in obj['results']['bindings'][0].iteritems() \
if not(k.startswith("List of"))])
except IndexError:
return None
return info
def generate(subj=None):
g = rdflib.Graph()
g.parse("dbpedia_3.9.owl")
while True:
if subj is None:
subj = get_random_resource(g)
try:
prop_dict = get_random_property(subj['subject'])
alt_prop = get_random_neighboring_property(subj['subject'],
prop_dict['url']).strip()
except IndexError as e:
continue
real_prop = prop_dict['value'].strip()
if real_prop.startswith('http') or alt_prop.startswith('http'):
continue
if real_prop.lower() == alt_prop.lower(): continue
output = alternate_universe(subj['label'], prop_dict['label'],
real_prop, alt_prop)
output = output.replace("\n", "")
if len(output) <= 115:
return output + " " + get_property(subj['subject'],
'foaf:isPrimaryTopicOf')
elif len(output) <= 140:
return output
if __name__ == '__main__':
pool = [s.strip() for s in open("pool.txt").readlines()]
while True:
print "---"
if random.randrange(4) > 0:
subj = get_subj_from_wikilink('http://en.wikipedia.org' + random.choice(pool))
print generate(subj)
else:
print generate()
time.sleep(1)
| mit | 5,967,689,754,401,846,000 | 37.674157 | 90 | 0.582994 | false |
PerilousApricot/bigcouch | couchjs/scons/scons-local-2.0.1/SCons/Tool/wix.py | 61 | 3563 | """SCons.Tool.wix
Tool-specific initialization for wix, the Windows Installer XML Tool.
There normally shouldn't be any need to import this module directly.
It will usually be imported through the generic SCons.Tool.Tool()
selection method.
"""
#
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Tool/wix.py 5134 2010/08/16 23:02:40 bdeegan"
import SCons.Builder
import SCons.Action
import os
def generate(env):
"""Add Builders and construction variables for WiX to an Environment."""
if not exists(env):
return
env['WIXCANDLEFLAGS'] = ['-nologo']
env['WIXCANDLEINCLUDE'] = []
env['WIXCANDLECOM'] = '$WIXCANDLE $WIXCANDLEFLAGS -I $WIXCANDLEINCLUDE -o ${TARGET} ${SOURCE}'
env['WIXLIGHTFLAGS'].append( '-nologo' )
env['WIXLIGHTCOM'] = "$WIXLIGHT $WIXLIGHTFLAGS -out ${TARGET} ${SOURCES}"
object_builder = SCons.Builder.Builder(
action = '$WIXCANDLECOM',
suffix = '.wxiobj',
src_suffix = '.wxs')
linker_builder = SCons.Builder.Builder(
action = '$WIXLIGHTCOM',
src_suffix = '.wxiobj',
src_builder = object_builder)
env['BUILDERS']['WiX'] = linker_builder
def exists(env):
env['WIXCANDLE'] = 'candle.exe'
env['WIXLIGHT'] = 'light.exe'
# try to find the candle.exe and light.exe tools and
# add the install directory to light libpath.
#for path in os.environ['PATH'].split(os.pathsep):
for path in os.environ['PATH'].split(os.pathsep):
if not path:
continue
# workaround for some weird python win32 bug.
if path[0] == '"' and path[-1:]=='"':
path = path[1:-1]
# normalize the path
path = os.path.normpath(path)
# search for the tools in the PATH environment variable
try:
if env['WIXCANDLE'] in os.listdir(path) and\
env['WIXLIGHT'] in os.listdir(path):
env.PrependENVPath('PATH', path)
env['WIXLIGHTFLAGS'] = [ os.path.join( path, 'wixui.wixlib' ),
'-loc',
os.path.join( path, 'WixUI_en-us.wxl' ) ]
return 1
except OSError:
pass # ignore this, could be a stale PATH entry.
return None
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| apache-2.0 | -4,882,959,222,670,390,000 | 34.989899 | 98 | 0.649453 | false |
lizardsystem/lizard-wbconfiguration | lizard_wbconfiguration/migrations/0003_auto__del_field_dbfconfiguration_filepath__add_field_dbfconfiguration_.py | 1 | 35057 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting field 'DBFConfiguration.filepath'
db.delete_column('lizard_wbconfiguration_dbfconfiguration', 'filepath')
# Adding field 'DBFConfiguration.owner'
db.add_column('lizard_wbconfiguration_dbfconfiguration', 'owner', self.gf('django.db.models.fields.CharField')(max_length=128, null=True, blank=True), keep_default=False)
# Adding field 'DBFConfiguration.save_to'
db.add_column('lizard_wbconfiguration_dbfconfiguration', 'save_to', self.gf('django.db.models.fields.CharField')(max_length=128, null=True, blank=True), keep_default=False)
def backwards(self, orm):
# Adding field 'DBFConfiguration.filepath'
db.add_column('lizard_wbconfiguration_dbfconfiguration', 'filepath', self.gf('django.db.models.fields.files.FileField')(default=datetime.date(2011, 12, 8), max_length=100), keep_default=False)
# Deleting field 'DBFConfiguration.owner'
db.delete_column('lizard_wbconfiguration_dbfconfiguration', 'owner')
# Deleting field 'DBFConfiguration.save_to'
db.delete_column('lizard_wbconfiguration_dbfconfiguration', 'save_to')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'lizard_area.area': {
'Meta': {'ordering': "('name',)", 'object_name': 'Area', '_ormbases': ['lizard_area.Communique']},
'area_class': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'communique_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['lizard_area.Communique']", 'unique': 'True', 'primary_key': 'True'}),
'data_administrator': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lizard_area.DataAdministrator']"}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lizard_area.Area']", 'null': 'True', 'blank': 'True'})
},
'lizard_area.areacode': {
'Meta': {'object_name': 'AreaCode'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'})
},
'lizard_area.areatype': {
'Meta': {'object_name': 'AreaType'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'})
},
'lizard_area.basin': {
'Meta': {'object_name': 'Basin'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'})
},
'lizard_area.communique': {
'Meta': {'object_name': 'Communique', '_ormbases': ['lizard_geo.GeoObject']},
'area_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lizard_area.AreaType']", 'null': 'True', 'blank': 'True'}),
'basin': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lizard_area.Basin']", 'null': 'True', 'blank': 'True'}),
'code': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lizard_area.AreaCode']", 'null': 'True', 'blank': 'True'}),
'geoobject_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['lizard_geo.GeoObject']", 'unique': 'True', 'primary_key': 'True'}),
'municipality': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lizard_area.Municipality']", 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'province': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lizard_area.Province']", 'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lizard_area.Status']", 'null': 'True', 'blank': 'True'}),
'watermanagementarea': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lizard_area.WaterManagementArea']", 'null': 'True', 'blank': 'True'})
},
'lizard_area.dataadministrator': {
'Meta': {'object_name': 'DataAdministrator'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'})
},
'lizard_area.municipality': {
'Meta': {'object_name': 'Municipality'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'})
},
'lizard_area.province': {
'Meta': {'object_name': 'Province'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'})
},
'lizard_area.status': {
'Meta': {'object_name': 'Status'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'})
},
'lizard_area.watermanagementarea': {
'Meta': {'object_name': 'WaterManagementArea'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'})
},
'lizard_fewsnorm.fewsnormsource': {
'Meta': {'object_name': 'FewsNormSource'},
'database_name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '40'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50', 'db_index': 'True'})
},
'lizard_fewsnorm.geolocationcache': {
'Meta': {'ordering': "('ident', 'name')", 'object_name': 'GeoLocationCache', '_ormbases': ['lizard_geo.GeoObject']},
'fews_norm_source': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lizard_fewsnorm.FewsNormSource']"}),
'geoobject_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['lizard_geo.GeoObject']", 'unique': 'True', 'primary_key': 'True'}),
'icon': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'module': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['lizard_fewsnorm.ModuleCache']", 'null': 'True', 'through': "orm['lizard_fewsnorm.TimeSeriesCache']", 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'parameter': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['lizard_fewsnorm.ParameterCache']", 'null': 'True', 'through': "orm['lizard_fewsnorm.TimeSeriesCache']", 'blank': 'True'}),
'shortname': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'timestep': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['lizard_fewsnorm.TimeStepCache']", 'null': 'True', 'through': "orm['lizard_fewsnorm.TimeSeriesCache']", 'blank': 'True'}),
'tooltip': ('django.db.models.fields.CharField', [], {'max_length': '64'})
},
'lizard_fewsnorm.modulecache': {
'Meta': {'ordering': "('ident',)", 'object_name': 'ModuleCache'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ident': ('django.db.models.fields.CharField', [], {'max_length': '64'})
},
'lizard_fewsnorm.parametercache': {
'Meta': {'ordering': "('ident',)", 'object_name': 'ParameterCache'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ident': ('django.db.models.fields.CharField', [], {'max_length': '64'})
},
'lizard_fewsnorm.timeseriescache': {
'Meta': {'object_name': 'TimeSeriesCache'},
'geolocationcache': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lizard_fewsnorm.GeoLocationCache']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modulecache': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lizard_fewsnorm.ModuleCache']"}),
'parametercache': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lizard_fewsnorm.ParameterCache']"}),
'timestepcache': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lizard_fewsnorm.TimeStepCache']"})
},
'lizard_fewsnorm.timestepcache': {
'Meta': {'ordering': "('ident',)", 'object_name': 'TimeStepCache'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ident': ('django.db.models.fields.CharField', [], {'max_length': '64'})
},
'lizard_geo.geoobject': {
'Meta': {'object_name': 'GeoObject'},
'geo_object_group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lizard_geo.GeoObjectGroup']"}),
'geometry': ('django.contrib.gis.db.models.fields.GeometryField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ident': ('django.db.models.fields.CharField', [], {'max_length': '80'})
},
'lizard_geo.geoobjectgroup': {
'Meta': {'object_name': 'GeoObjectGroup'},
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50', 'db_index': 'True'}),
'source_log': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
},
'lizard_wbconfiguration.areaconfiguration': {
'Meta': {'object_name': 'AreaConfiguration'},
'area': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['lizard_area.Area']", 'unique': 'True'}),
'bottom_height': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '3', 'blank': 'True'}),
'concentr_chloride_precipitation': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '3', 'blank': 'True'}),
'concentr_chloride_seepage': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '3', 'blank': 'True'}),
'herfstp': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '3', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ident': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '64'}),
'incr_concentr_nitrogyn_precipitation': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '3', 'blank': 'True'}),
'incr_concentr_nitrogyn_seepage': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '3', 'blank': 'True'}),
'incr_concentr_phosphate_precipitation': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '3', 'blank': 'True'}),
'incr_concentr_phosphate_seepage': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '3', 'blank': 'True'}),
'ini_con_cl': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '3', 'blank': 'True'}),
'init_water_level': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '3', 'blank': 'True'}),
'kwel': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '3', 'blank': 'True'}),
'kwel_is_ts': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'lentep': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '3', 'blank': 'True'}),
'marge_bov': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '3', 'blank': 'True'}),
'marge_ond': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '3', 'blank': 'True'}),
'max_intake': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '3', 'blank': 'True'}),
'max_outtake': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '3', 'blank': 'True'}),
'min_concentr_nitrogyn_precipitation': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '3', 'blank': 'True'}),
'min_concentr_nitrogyn_seepage': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '3', 'blank': 'True'}),
'min_concentr_phopshate_seepage': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '3', 'blank': 'True'}),
'min_concentr_phosphate_precipitation': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '3', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'nutc_inc_1': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '3', 'blank': 'True'}),
'nutc_inc_2': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '3', 'blank': 'True'}),
'nutc_inc_3': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '3', 'blank': 'True'}),
'nutc_inc_4': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '3', 'blank': 'True'}),
'nutc_min_1': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '3', 'blank': 'True'}),
'nutc_min_2': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '3', 'blank': 'True'}),
'nutc_min_3': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '3', 'blank': 'True'}),
'nutc_min_4': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '3', 'blank': 'True'}),
'peilh_issp': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'sp_is_ts': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'start_dt': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'start_hp': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'start_lp': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'start_wp': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'start_zp': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'surface': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '1', 'blank': 'True'}),
'ts_concentr_chloride_1': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'ts_concentr_chloride_1'", 'null': 'True', 'to': "orm['lizard_fewsnorm.TimeSeriesCache']"}),
'ts_concentr_chloride_2': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'ts_concentr_chloride_2'", 'null': 'True', 'to': "orm['lizard_fewsnorm.TimeSeriesCache']"}),
'ts_evaporation': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'ts_evaporation'", 'null': 'True', 'to': "orm['lizard_fewsnorm.TimeSeriesCache']"}),
'ts_kwel': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'ts_kwel'", 'null': 'True', 'to': "orm['lizard_fewsnorm.TimeSeriesCache']"}),
'ts_precipitation': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'ts_precipitation'", 'null': 'True', 'to': "orm['lizard_fewsnorm.TimeSeriesCache']"}),
'ts_sp': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'sp'", 'null': 'True', 'to': "orm['lizard_fewsnorm.TimeSeriesCache']"}),
'ts_water_level': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'ts_water_level'", 'null': 'True', 'to': "orm['lizard_fewsnorm.TimeSeriesCache']"}),
'ts_wegz': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'ts_wegz'", 'null': 'True', 'to': "orm['lizard_fewsnorm.TimeSeriesCache']"}),
'wegz': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '3', 'blank': 'True'}),
'wegz_is_ts': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'winterp': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '3', 'blank': 'True'}),
'zomerp': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '3', 'blank': 'True'})
},
'lizard_wbconfiguration.areafield': {
'Meta': {'object_name': 'AreaField'},
'app_name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'code': ('django.db.models.fields.CharField', [], {'max_length': '256', 'primary_key': 'True'}),
'field_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'model_name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'lizard_wbconfiguration.areagridconfiguration': {
'Meta': {'object_name': 'AreaGridConfiguration'},
'app_name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
'lizard_wbconfiguration.areagridfieldconfiguration': {
'Meta': {'object_name': 'AreaGridFieldConfiguration'},
'display_name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'editable': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'field_name': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lizard_wbconfiguration.AreaField']", 'max_length': '128'}),
'field_type': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'grid': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lizard_wbconfiguration.AreaGridConfiguration']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'sequence': ('django.db.models.fields.IntegerField', [], {}),
'ts_parameter': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'visible': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'lizard_wbconfiguration.bucket': {
'Meta': {'ordering': "['id']", 'object_name': 'Bucket'},
'area': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lizard_wbconfiguration.AreaConfiguration']"}),
'bottom_crop_evaporation_factor': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '3', 'blank': 'True'}),
'bottom_drainage_fraction': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '3', 'blank': 'True'}),
'bottom_indraft_fraction': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '3', 'blank': 'True'}),
'bottom_init_water_level': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '3', 'blank': 'True'}),
'bottom_max_water_level': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '3', 'blank': 'True'}),
'bottom_min_crop_evaporation_factor': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '3', 'blank': 'True'}),
'bottom_min_water_level': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '3', 'blank': 'True'}),
'bottom_porosity': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '3', 'blank': 'True'}),
'bucket_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lizard_wbconfiguration.BucketsType']", 'null': 'True', 'blank': 'True'}),
'code': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'concentr_chloride_drainage_indraft': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '3', 'blank': 'True'}),
'concentr_chloride_flow_off': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '3', 'blank': 'True'}),
'crop_evaporation_factor': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '3', 'blank': 'True'}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'drainage_fraction': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '3', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'incr_concentr_nitrogen_drainage_indraft': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '3', 'blank': 'True'}),
'incr_concentr_nitrogen_flow_off': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '3', 'blank': 'True'}),
'incr_concentr_phosphate_drainage_indraft': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '3', 'blank': 'True'}),
'incr_concentr_phosphate_flow_off': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '3', 'blank': 'True'}),
'indraft_fraction': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '3', 'blank': 'True'}),
'init_water_level': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '3', 'blank': 'True'}),
'is_computed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'kwelwegz': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '3', 'blank': 'True'}),
'kwelwegz_is_ts': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'label_drainaige_indraft': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '3', 'blank': 'True'}),
'label_flow_off': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '3', 'blank': 'True'}),
'man_water_level': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '3', 'blank': 'True'}),
'min_concentr_nitrogen_drainage_indraft': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '3', 'blank': 'True'}),
'min_concentr_nitrogen_flow_off': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '3', 'blank': 'True'}),
'min_concentr_phosphate_drainage_indraft': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '3', 'blank': 'True'}),
'min_concentr_phosphate_flow_off': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '3', 'blank': 'True'}),
'min_crop_evaporation_factor': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '3', 'blank': 'True'}),
'min_water_level': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '3', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'porosity': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '3', 'blank': 'True'}),
'replace_impact_by_nutricalc': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'surface': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '1', 'blank': 'True'}),
'ts_drainageindraft': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'ts_drainageindraf_bucket'", 'null': 'True', 'to': "orm['lizard_fewsnorm.TimeSeriesCache']"}),
'ts_flowoff': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'ts_flowoff_bucket'", 'null': 'True', 'to': "orm['lizard_fewsnorm.TimeSeriesCache']"}),
'ts_kwelwegz': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'ts_kwelwegz_bucket'", 'null': 'True', 'to': "orm['lizard_fewsnorm.TimeSeriesCache']"}),
'ts_referenceoverflow': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'ts_referenceoverflow_bucket'", 'null': 'True', 'to': "orm['lizard_fewsnorm.TimeSeriesCache']"})
},
'lizard_wbconfiguration.bucketstype': {
'Meta': {'object_name': 'BucketsType'},
'bucket_type': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'lizard_wbconfiguration.dbfconfiguration': {
'Meta': {'object_name': 'DBFConfiguration'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'owner': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'save_to': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'})
},
'lizard_wbconfiguration.parametermapping': {
'Meta': {'object_name': 'ParameterMapping'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ident_wbconfiguration': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'}),
'parametercache': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lizard_fewsnorm.ParameterCache']"})
},
'lizard_wbconfiguration.structure': {
'Meta': {'ordering': "['id']", 'object_name': 'Structure'},
'area': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lizard_wbconfiguration.AreaConfiguration']"}),
'code': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'concentr_chloride': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '3', 'blank': 'True'}),
'deb_is_ts': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'deb_wint': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '3', 'blank': 'True'}),
'deb_zomer': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '3', 'blank': 'True'}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'in_out': ('django.db.models.fields.CharField', [], {'max_length': '3', 'null': 'True', 'blank': 'True'}),
'incr_concentr_nitrogen': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '3', 'blank': 'True'}),
'incr_concentr_phosphate': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '3', 'blank': 'True'}),
'is_computed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'min_concentr_nitrogen': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '3', 'blank': 'True'}),
'min_concentr_phosphate': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '3', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'ts_debiet': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'ts_debiet'", 'null': 'True', 'to': "orm['lizard_fewsnorm.TimeSeriesCache']"})
},
'lizard_wbconfiguration.wbconfigurationdbfmapping': {
'Meta': {'ordering': "['id']", 'object_name': 'WBConfigurationDBFMapping'},
'dbffield_decimals': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'dbffield_length': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'dbffield_name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'dbffield_type': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model_name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'wbfield_name': ('django.db.models.fields.CharField', [], {'max_length': '128'})
}
}
complete_apps = ['lizard_wbconfiguration']
| gpl-3.0 | -7,715,278,348,081,904,000 | 95.31044 | 240 | 0.568816 | false |
boomsbloom/dtm-fmri | DTM/for_gensim/lib/python2.7/site-packages/boto/gs/cors.py | 153 | 7717 | # Copyright 2012 Google Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
import types
from boto.gs.user import User
from boto.exception import InvalidCorsError
from xml.sax import handler
# Relevant tags for the CORS XML document.
CORS_CONFIG = 'CorsConfig'
CORS = 'Cors'
ORIGINS = 'Origins'
ORIGIN = 'Origin'
METHODS = 'Methods'
METHOD = 'Method'
HEADERS = 'ResponseHeaders'
HEADER = 'ResponseHeader'
MAXAGESEC = 'MaxAgeSec'
class Cors(handler.ContentHandler):
"""Encapsulates the CORS configuration XML document"""
def __init__(self):
# List of CORS elements found within a CorsConfig element.
self.cors = []
# List of collections (e.g. Methods, ResponseHeaders, Origins)
# found within a CORS element. We use a list of lists here
# instead of a dictionary because the collections need to be
# preserved in the order in which they appear in the input XML
# document (and Python dictionary keys are inherently unordered).
# The elements on this list are two element tuples of the form
# (collection name, [list of collection contents]).
self.collections = []
# Lists of elements within a collection. Again a list is needed to
# preserve ordering but also because the same element may appear
# multiple times within a collection.
self.elements = []
# Dictionary mapping supported collection names to element types
# which may be contained within each.
self.legal_collections = {
ORIGINS : [ORIGIN],
METHODS : [METHOD],
HEADERS : [HEADER],
MAXAGESEC: []
}
# List of supported element types within any collection, used for
# checking validadity of a parsed element name.
self.legal_elements = [ORIGIN, METHOD, HEADER]
self.parse_level = 0
self.collection = None
self.element = None
def validateParseLevel(self, tag, level):
"""Verify parse level for a given tag."""
if self.parse_level != level:
raise InvalidCorsError('Invalid tag %s at parse level %d: ' %
(tag, self.parse_level))
def startElement(self, name, attrs, connection):
"""SAX XML logic for parsing new element found."""
if name == CORS_CONFIG:
self.validateParseLevel(name, 0)
self.parse_level += 1;
elif name == CORS:
self.validateParseLevel(name, 1)
self.parse_level += 1;
elif name in self.legal_collections:
self.validateParseLevel(name, 2)
self.parse_level += 1;
self.collection = name
elif name in self.legal_elements:
self.validateParseLevel(name, 3)
# Make sure this tag is found inside a collection tag.
if self.collection is None:
raise InvalidCorsError('Tag %s found outside collection' % name)
# Make sure this tag is allowed for the current collection tag.
if name not in self.legal_collections[self.collection]:
raise InvalidCorsError('Tag %s not allowed in %s collection' %
(name, self.collection))
self.element = name
else:
raise InvalidCorsError('Unsupported tag ' + name)
def endElement(self, name, value, connection):
"""SAX XML logic for parsing new element found."""
if name == CORS_CONFIG:
self.validateParseLevel(name, 1)
self.parse_level -= 1;
elif name == CORS:
self.validateParseLevel(name, 2)
self.parse_level -= 1;
# Terminating a CORS element, save any collections we found
# and re-initialize collections list.
self.cors.append(self.collections)
self.collections = []
elif name in self.legal_collections:
self.validateParseLevel(name, 3)
if name != self.collection:
raise InvalidCorsError('Mismatched start and end tags (%s/%s)' %
(self.collection, name))
self.parse_level -= 1;
if not self.legal_collections[name]:
# If this collection doesn't contain any sub-elements, store
# a tuple of name and this tag's element value.
self.collections.append((name, value.strip()))
else:
# Otherwise, we're terminating a collection of sub-elements,
# so store a tuple of name and list of contained elements.
self.collections.append((name, self.elements))
self.elements = []
self.collection = None
elif name in self.legal_elements:
self.validateParseLevel(name, 3)
# Make sure this tag is found inside a collection tag.
if self.collection is None:
raise InvalidCorsError('Tag %s found outside collection' % name)
# Make sure this end tag is allowed for the current collection tag.
if name not in self.legal_collections[self.collection]:
raise InvalidCorsError('Tag %s not allowed in %s collection' %
(name, self.collection))
if name != self.element:
raise InvalidCorsError('Mismatched start and end tags (%s/%s)' %
(self.element, name))
# Terminating an element tag, add it to the list of elements
# for the current collection.
self.elements.append((name, value.strip()))
self.element = None
else:
raise InvalidCorsError('Unsupported end tag ' + name)
def to_xml(self):
"""Convert CORS object into XML string representation."""
s = '<' + CORS_CONFIG + '>'
for collections in self.cors:
s += '<' + CORS + '>'
for (collection, elements_or_value) in collections:
assert collection is not None
s += '<' + collection + '>'
# If collection elements has type string, append atomic value,
# otherwise, append sequence of values in named tags.
if isinstance(elements_or_value, str):
s += elements_or_value
else:
for (name, value) in elements_or_value:
assert name is not None
assert value is not None
s += '<' + name + '>' + value + '</' + name + '>'
s += '</' + collection + '>'
s += '</' + CORS + '>'
s += '</' + CORS_CONFIG + '>'
return s
| mit | 8,215,219,603,163,938,000 | 44.662722 | 80 | 0.602047 | false |
kapilrastogi/Impala | tests/comparison/discrepancy_searcher.py | 2 | 33095 | #!/usr/bin/env impala-python
# Copyright (c) 2014 Cloudera, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''This module will run random queries against existing databases and compare the
results.
'''
from copy import deepcopy
from decimal import Decimal
from itertools import izip
from logging import getLogger
from math import isinf, isnan
from os import getenv, symlink, unlink
from os.path import join as join_path
from random import choice, randint
from string import ascii_lowercase, digits
from subprocess import call
from tempfile import gettempdir
from threading import current_thread, Thread
from time import time
from db_types import BigInt
from db_connection import (
DbCursor,
IMPALA,
HIVE,
MYSQL,
ORACLE,
POSTGRESQL)
from model_translator import SqlWriter
from query_flattener import QueryFlattener
from query_generator import QueryGenerator
LOG = getLogger(__name__)
class QueryResultComparator(object):
'''Used for comparing the results of a Query across two databases'''
# Used when comparing FLOAT values
EPSILON = 0.1
# The DECIMAL values will be rounded before comparison
DECIMAL_PLACES = 2
def __init__(self, query_profile, ref_conn,
test_conn, query_timeout_seconds, flatten_dialect=None):
'''test/ref_conn arguments should be an instance of DbConnection'''
ref_cursor = ref_conn.cursor()
test_cursor = test_conn.cursor()
self.ref_conn = ref_conn
self.ref_sql_writer = SqlWriter.create(
dialect=ref_conn.db_type, nulls_order_asc=query_profile.nulls_order_asc())
self.test_conn = test_conn
self.test_sql_writer = SqlWriter.create(dialect=test_conn.db_type)
self.query_executor = QueryExecutor(
[ref_cursor, test_cursor],
[self.ref_sql_writer, self.test_sql_writer],
query_timeout_seconds=query_timeout_seconds,
flatten_dialect=flatten_dialect)
@property
def ref_db_type(self):
return self.ref_conn.db_type
def compare_query_results(self, query):
'''Execute the query, compare the data, and return a ComparisonResult, which
summarizes the outcome.
'''
comparison_result = ComparisonResult(query, self.ref_db_type)
(ref_sql, ref_exception, ref_data_set, ref_cursor_description), (test_sql,
test_exception, test_data_set, test_cursor_description) = \
self.query_executor.fetch_query_results(query)
comparison_result.ref_sql = ref_sql
comparison_result.test_sql = test_sql
if ref_exception:
comparison_result.exception = ref_exception
error_message = str(ref_exception)
if 'Year is out of valid range: 1400..10000' in error_message:
# This comes from Postgresql. Overflow errors will be ignored.
comparison_result.exception = TypeOverflow(error_message)
LOG.debug('%s encountered an error running query: %s',
self.ref_conn.db_type, ref_exception, exc_info=True)
return comparison_result
if test_exception:
# "known errors" will be ignored
error_message = str(test_exception)
known_error = None
if 'Expressions in the ORDER BY clause must not be constant' in error_message \
or 'Expressions in the PARTITION BY clause must not be consta' in error_message:
# It's too much work to avoid this bug. Just ignore it if it comes up.
known_error = KnownError('https://issues.cloudera.org/browse/IMPALA-1354')
elif 'GROUP BY expression must not contain aggregate functions' in error_message \
or 'select list expression not produced by aggregation output' in error_message:
known_error = KnownError('https://issues.cloudera.org/browse/IMPALA-1423')
elif ('max(' in error_message or 'min(' in error_message) \
and 'only supported with an UNBOUNDED PRECEDING start bound' in error_message:
# This analytic isn't supported and ignoring this here is much easier than not
# generating the query...
known_error = KnownError('MAX UNBOUNDED PRECISION')
elif 'IN and/or EXISTS subquery predicates are not supported in binary predicates' \
in error_message:
known_error = KnownError('https://issues.cloudera.org/browse/IMPALA-1418')
elif 'Unsupported predicate with subquery' in error_message:
known_error = KnownError('https://issues.cloudera.org/browse/IMPALA-1950')
elif 'RIGHT OUTER JOIN type with no equi-join' in error_message:
known_error = KnownError('https://issues.cloudera.org/browse/IMPALA-3063')
elif 'Operation is in ERROR_STATE' in error_message:
known_error = KnownError('Mem limit exceeded')
if known_error:
comparison_result.exception = known_error
else:
comparison_result.exception = test_exception
LOG.debug('%s encountered an error running query: %s',
self.test_conn.db_type, test_exception, exc_info=True)
return comparison_result
comparison_result.ref_row_count = len(ref_data_set)
comparison_result.test_row_count = len(test_data_set)
comparison_result.query_resulted_in_data = (comparison_result.test_row_count > 0
or comparison_result.ref_row_count > 0)
if comparison_result.ref_row_count != comparison_result.test_row_count:
return comparison_result
# Standardize data (round FLOATs) in each column, and sort the data set
for data_set in (ref_data_set, test_data_set):
for row_idx, row in enumerate(data_set):
data_set[row_idx] = []
for col_idx, col in enumerate(row):
data_set[row_idx].append(self.standardize_data(col,
ref_cursor_description[col_idx], test_cursor_description[col_idx]))
# TODO: If the query has an ORDER BY clause, sorting should only be done within
# subsets of rows that have the same order by values.
data_set.sort(cmp=self.row_sort_cmp)
found_data = False # Will be set to True if the result contains non-zero/NULL data
for ref_row, test_row in izip(ref_data_set, test_data_set):
for col_idx, (ref_val, test_val) in enumerate(izip(ref_row, test_row)):
if ref_val or test_val: # Ignores zeros, ex "SELECT COUNT(*) ... WHERE FALSE"
found_data = True
if self.vals_are_equal(ref_val, test_val):
continue
if isinstance(test_val, int) \
and isinstance(ref_val, (int, float, Decimal)) \
and abs(ref_val) > BigInt.MAX:
# Impala will return incorrect results if the val is greater than max BigInt
comparison_result.exception = KnownError(
'https://issues.cloudera.org/browse/IMPALA-865')
elif isinstance(test_val, float) \
and (isinf(test_val) or isnan(test_val)):
# In some cases, Impala gives NaNs and Infs instead of NULLs
comparison_result.exception = KnownError(
'https://issues.cloudera.org/browse/IMPALA-724')
comparison_result.ref_row = ref_row
comparison_result.test_row = test_row
comparison_result.mismatch_at_row_number = row_idx + 1
comparison_result.mismatch_at_col_number = col_idx + 1
return comparison_result
comparison_result.query_resulted_in_data = found_data
return comparison_result
def standardize_data(self, data, ref_col_description, test_col_description):
'''Return a val that is suitable for comparison.'''
# For float data we need to round otherwise differences in precision will cause errors
if isinstance(data, float):
return round(data, self.DECIMAL_PLACES)
if isinstance(data, Decimal):
if ref_col_description[5] is not None and test_col_description[5] is not None:
return round(data, min(ref_col_description[5], test_col_description[5]))
return data
def row_sort_cmp(self, ref_row, test_row):
'''Comparison used for sorting. '''
for ref_val, test_val in izip(ref_row, test_row):
if ref_val is None and test_val is not None:
return -1
if ref_val is not None and test_val is None:
return 1
result = cmp(ref_val, test_val)
if result:
return result
return 0
def vals_are_equal(self, ref, test):
'''Compares if two values are equal in two cells. Floats are considered equal if the
difference between them is very small.'''
if ref == test:
return True
# For some reason Postgresql will return Decimals when using some aggregate
# functions such as AVG().
if isinstance(ref, (float, Decimal)) and isinstance(test, float):
return self.floats_are_equal(ref, test)
LOG.debug("Values differ, reference: %s (%s), test: %s (%s)",
ref, type(ref),
test, type(test))
return False
def floats_are_equal(self, ref, test):
'''Compare two floats.'''
ref = round(ref, self.DECIMAL_PLACES)
test = round(test, self.DECIMAL_PLACES)
diff = abs(ref - test)
if ref * test == 0:
return diff < self.EPSILON
result = diff / (abs(ref) + abs(test)) < self.EPSILON
if not result:
LOG.debug("Floats differ, diff: %s, |reference|: %s, |test|: %s",
diff, abs(ref), abs(test))
return result
class QueryExecutor(object):
'''Concurrently executes queries'''
# If the number of rows * cols is greater than this val, then the comparison will
# be aborted. Raising this value also raises the risk of python being OOM killed. At
# 10M python would get OOM killed occasionally even on a physical machine with 32GB
# ram.
TOO_MUCH_DATA = 1000 * 1000
def __init__(self, cursors, sql_writers, query_timeout_seconds, flatten_dialect=None):
'''cursors should be a list of db_connector.Cursors.
sql_writers should be a list of model_translator.SqlWriters, with translators in
the same order as cursors in "cursors".
'''
self.query_timeout_seconds = query_timeout_seconds
self.cursors = cursors
self.sql_writers = sql_writers
self.query_logs = list()
# SQL dialect for which the queries should be flattened
self.flatten_dialect = flatten_dialect
for cursor in cursors:
# A list of all queries attempted
query_log_path = gettempdir() + '/test_query_log_%s_%s.sql' \
% (cursor.db_type.lower(), time())
self.query_logs.append(open(query_log_path, 'w'))
link = gettempdir() + '/test_query_log_%s.sql' % cursor.db_type.lower()
try:
unlink(link)
except OSError as e:
if not 'No such file' in str(e):
raise e
try:
symlink(query_log_path, link)
except OSError as e:
# TODO: Figure out what the error message is where there is a race condition
# and ignore it.
raise e
# In case the query will be executed as a "CREATE TABLE <name> AS ..." or
# "CREATE VIEW <name> AS ...", this will be the value of "<name>".
self._table_or_view_name = None
def set_impala_query_optons(self, cursor):
opts = """
SET MEM_LIMIT={mem_limit};
SET BATCH_SIZE={batch_size};
SET DISABLE_CODEGEN={disable_codegen};
SET DISABLE_OUTERMOST_TOPN={disable_outermost_topn};
SET DISABLE_ROW_RUNTIME_FILTERING={disable_row_runtime_filtering};
SET DISABLE_STREAMING_PREAGGREGATIONS={disable_streaming_preaggregations};
SET DISABLE_UNSAFE_SPILLS={disable_unsafe_spills};
SET EXEC_SINGLE_NODE_ROWS_THRESHOLD={exec_single_node_rows_threshold};
SET MAX_BLOCK_MGR_MEMORY={max_block_mgr_memory};
SET MAX_IO_BUFFERS={max_io_buffers};
SET MAX_SCAN_RANGE_LENGTH={max_scan_range_length};
SET NUM_NODES={num_nodes};
SET NUM_SCANNER_THREADS={num_scanner_threads};
SET OPTIMIZE_PARTITION_KEY_SCANS={optimize_partition_key_scans};
SET RUNTIME_BLOOM_FILTER_SIZE={runtime_bloom_filter_size};
SET RUNTIME_FILTER_MODE={runtime_filter_mode};
SET RUNTIME_FILTER_WAIT_TIME_MS={runtime_filter_wait_time_ms};
SET SCAN_NODE_CODEGEN_THRESHOLD={scan_node_codegen_threshold}""".format(
mem_limit=randint(1024 ** 3, 10 * 1024 ** 3),
batch_size=randint(1, 4096),
disable_codegen=choice((0, 1)),
disable_outermost_topn=choice((0, 1)),
disable_row_runtime_filtering=choice((0, 1)),
disable_streaming_preaggregations=choice((0, 1)),
disable_unsafe_spills=choice((0, 1)),
exec_single_node_rows_threshold=randint(1, 100000000),
max_block_mgr_memory=randint(1, 100000000),
max_io_buffers=randint(1, 100000000),
max_scan_range_length=randint(1, 100000000),
num_nodes=randint(3, 3),
num_scanner_threads=randint(1, 100),
optimize_partition_key_scans=choice((0, 1)),
random_replica=choice((0, 1)),
replica_preference=choice(("CACHE_LOCAL", "DISK_LOCAL", "REMOTE")),
runtime_bloom_filter_size=randint(4096, 16777216),
runtime_filter_mode=choice(("OFF", "LOCAL", "GLOBAL")),
runtime_filter_wait_time_ms=randint(1, 100000000),
scan_node_codegen_threshold=randint(1, 100000000))
LOG.debug(opts)
for opt in opts.strip().split(";"):
cursor.execute(opt)
def fetch_query_results(self, query):
'''Concurrently execute the query using each cursor and return a list of tuples
containing the result information for each cursor. The tuple format is
(<exception or None>, <data set or None>).
If query_timeout_seconds is reached and the connection is killable then the
query will be cancelled and the connection reset. Otherwise the query will
continue to run in the background.
"query" should be an instance of query.Query.
'''
if query.execution != 'RAW':
self._table_or_view_name = self._create_random_table_name()
query_threads = list()
for sql_writer, cursor, log_file \
in izip(self.sql_writers, self.cursors, self.query_logs):
if cursor.db_type == IMPALA:
self.set_impala_query_optons(cursor)
query_thread = Thread(
target=self._fetch_sql_results,
args=[query, cursor, sql_writer, log_file],
name='Query execution thread {0}'.format(current_thread().name))
query_thread.daemon = True
query_thread.sql = ''
query_thread.data_set = None
query_thread.cursor_description = None
query_thread.exception = None
query_thread.start()
query_threads.append(query_thread)
end_time = time() + self.query_timeout_seconds
for query_thread, cursor in izip(query_threads, self.cursors):
join_time = end_time - time()
if join_time > 0:
query_thread.join(join_time)
if query_thread.is_alive():
# Kill connection and reconnect to return cursor to initial state.
if cursor.conn.supports_kill:
LOG.debug('Attempting to kill connection')
cursor.conn.kill()
LOG.debug('Kill connection')
try:
# XXX: Sometimes this takes a very long time causing the program to appear to
# hang. Maybe this should be done in another thread so a timeout can be
# applied?
cursor.close()
except Exception as e:
LOG.info('Error closing cursor: %s', e)
cursor.reconnect()
query_thread.exception = QueryTimeout(
'Query timed out after %s seconds' % self.query_timeout_seconds)
return [(query_thread.sql,
query_thread.exception,
query_thread.data_set,
query_thread.cursor_description) for query_thread in query_threads]
def _fetch_sql_results(self, query, cursor, sql_writer, log_file):
'''Execute the query using the cursor and set the result or exception on the local
thread.
'''
try:
log_file.write('/***** Start Query *****/\n')
if sql_writer.DIALECT == self.flatten_dialect:
# Converts the query model for the flattened version of the data. This is for
# testing of Impala nested types support.
query = deepcopy(query)
QueryFlattener().flatten(query)
if query.execution == 'CREATE_TABLE_AS':
setup_sql = sql_writer.write_create_table_as(query, self._table_or_view_name)
query_sql = 'SELECT * FROM ' + self._table_or_view_name
elif query.execution == 'VIEW':
setup_sql = sql_writer.write_create_view(query, self._table_or_view_name)
query_sql = 'SELECT * FROM ' + self._table_or_view_name
else:
setup_sql = None
query_sql = sql_writer.write_query(query)
if setup_sql:
LOG.debug("Executing on %s:\n%s", cursor.db_type, setup_sql)
current_thread().sql = setup_sql + ';\n'
log_file.write(setup_sql + ';\n')
log_file.flush()
cursor.execute(setup_sql)
LOG.debug("Executing on %s:\n%s", cursor.db_type, query_sql)
current_thread().sql += query_sql
log_file.write(query_sql + ';\n')
log_file.write('/***** End Query *****/\n')
log_file.flush()
cursor.execute(query_sql)
col_count = len(cursor.description)
batch_size = max(10000 / col_count, 1)
row_limit = self.TOO_MUCH_DATA / col_count
data_set = list()
current_thread().data_set = data_set
current_thread().cursor_description = cursor.description
LOG.debug("Fetching results from %s", cursor.db_type)
while True:
batch = cursor.fetchmany(batch_size)
data_set.extend(batch)
if len(batch) < batch_size:
if cursor.db_type == IMPALA:
impala_log = cursor.get_log()
if 'Expression overflowed, returning NULL' in impala_log:
raise TypeOverflow('Numeric overflow; data may not match')
break
if len(data_set) > row_limit:
raise DataLimitExceeded('Too much data')
except Exception as e:
current_thread().exception = e
finally:
if query.execution == 'CREATE_TABLE_AS':
cursor.drop_table(self._table_or_view_name)
elif query.execution == 'VIEW':
cursor.drop_view(self._table_or_view_name)
def _create_random_table_name(self):
char_choices = ascii_lowercase
chars = list()
for idx in xrange(4): # will result in ~1M combinations
if idx == 1:
char_choices += '_' + digits
chars.append(choice(char_choices))
return 'qgen_' + ''.join(chars)
class ComparisonResult(object):
'''Represents a result.'''
def __init__(self, query, ref_db_type):
self.query = query
self.ref_db_type = ref_db_type
self.ref_sql = None
self.test_sql = None
self.query_resulted_in_data = False
self.ref_row_count = None
self.test_row_count = None
self.mismatch_at_row_number = None
self.mismatch_at_col_number = None
self.ref_row = None # The test row where mismatch happened
self.test_row = None # The reference row where mismatch happened
self.exception = None
self._error_message = None
@property
def error(self):
if not self._error_message:
if self.exception:
self._error_message = str(self.exception)
elif (self.ref_row_count or self.test_row_count) and \
self.ref_row_count != self.test_row_count:
self._error_message = 'Row counts do not match: %s Impala rows vs %s %s rows' \
% (self.test_row_count,
self.ref_db_type,
self.ref_row_count)
elif self.mismatch_at_row_number is not None:
# Write a row like "[a, b, <<c>>, d]" where c is a bad value
test_row = '[' + ', '.join(
'<<' + str(val) + '>>' if idx == self.mismatch_at_col_number - 1 else str(val)
for idx, val in enumerate(self.test_row)
) + ']'
ref_row = '[' + ', '.join(
'<<' + str(val) + '>>' if idx == self.mismatch_at_col_number - 1 else str(val)
for idx, val in enumerate(self.ref_row)
) + ']'
self._error_message = \
'Column %s in row %s does not match: %s Impala row vs %s %s row' \
% (self.mismatch_at_col_number,
self.mismatch_at_row_number,
test_row,
ref_row,
self.ref_db_type)
return self._error_message
@property
def is_known_error(self):
return isinstance(self.exception, KnownError)
@property
def query_timed_out(self):
return isinstance(self.exception, QueryTimeout)
QueryTimeout = type('QueryTimeout', (Exception, ), {})
TypeOverflow = type('TypeOverflow', (Exception, ), {})
DataLimitExceeded = type('DataLimitExceeded', (Exception, ), {})
class KnownError(Exception):
def __init__(self, jira_url):
Exception.__init__(self, 'Known issue: ' + jira_url)
self.jira_url = jira_url
class FrontendExceptionSearcher(object):
def __init__(self, query_profile, ref_conn, test_conn):
'''query_profile should be an instance of one of the profiles in query_profile.py'''
self.query_profile = query_profile
self.ref_conn = ref_conn
self.test_conn = test_conn
self.ref_sql_writer = SqlWriter.create(dialect=ref_conn.db_type)
self.test_sql_writer = SqlWriter.create(dialect=test_conn.db_type)
with ref_conn.cursor() as ref_cursor:
with test_conn.cursor() as test_cursor:
self.common_tables = DbCursor.describe_common_tables([ref_cursor, test_cursor])
if not self.common_tables:
raise Exception("Unable to find a common set of tables in both databases")
def search(self, number_of_test_queries):
query_generator = QueryGenerator(self.query_profile)
def on_ref_db_error(e, sql):
LOG.warn("Error generating explain plan for reference db:\n%s\n%s" % (e, sql))
def on_test_db_error(e, sql):
LOG.error("Error generating explain plan for test db:\n%s" % sql)
raise e
for idx in xrange(number_of_test_queries):
LOG.info("Explaining query #%s" % (idx + 1))
query = query_generator.create_query(self.common_tables)
if not self._explain_query(self.ref_conn, self.ref_sql_writer, query,
on_ref_db_error):
continue
self._explain_query(self.test_conn, self.test_sql_writer, query,
on_test_db_error)
def _explain_query(self, conn, writer, query, exception_handler):
sql = writer.write_query(query)
try:
with conn.cursor() as cursor:
cursor.execute("EXPLAIN %s" % sql)
return True
except Exception as e:
exception_handler(e, sql)
return False
class QueryResultDiffSearcher(object):
'''This class uses the query generator (query_generator.py) along with the
query profile (query_profile.py) to randomly generate queries then executes the
queries on the reference and test databases, then compares the results.
'''
# Sometimes things get into a bad state and the same error loops forever
ABORT_ON_REPEAT_ERROR_COUNT = 2
def __init__(self, query_profile, ref_conn, test_conn):
'''query_profile should be an instance of one of the profiles in query_profile.py'''
self.query_profile = query_profile
self.ref_conn = ref_conn
self.test_conn = test_conn
with ref_conn.cursor() as ref_cursor:
with test_conn.cursor() as test_cursor:
self.common_tables = DbCursor.describe_common_tables([ref_cursor, test_cursor])
if not self.common_tables:
raise Exception("Unable to find a common set of tables in both databases")
def search(self, number_of_test_queries, stop_on_result_mismatch, stop_on_crash,
query_timeout_seconds):
'''Returns an instance of SearchResults, which is a summary report. This method
oversees the generation, execution, and comparison of queries.
number_of_test_queries should an integer indicating the maximum number of queries
to generate and execute.
'''
start_time = time()
query_result_comparator = QueryResultComparator(
self.query_profile, self.ref_conn, self.test_conn, query_timeout_seconds)
query_generator = QueryGenerator(self.query_profile)
query_count = 0
queries_resulted_in_data_count = 0
mismatch_count = 0
query_timeout_count = 0
known_error_count = 0
test_crash_count = 0
last_error = None
repeat_error_count = 0
while number_of_test_queries > query_count:
query = query_generator.create_query(self.common_tables)
query.execution = self.query_profile.get_query_execution()
query_count += 1
LOG.info('Running query #%s', query_count)
result = query_result_comparator.compare_query_results(query)
if result.query_resulted_in_data:
queries_resulted_in_data_count += 1
if isinstance(result.exception, DataLimitExceeded) \
or isinstance(result.exception, TypeOverflow):
continue
if result.error:
# TODO: These first two come from psycopg2, the postgres driver. Maybe we should
# try a different driver? Or maybe the usage of the driver isn't correct.
# Anyhow ignore these failures.
if 'division by zero' in result.error \
or 'out of range' in result.error:
LOG.debug('Ignoring error: %s', result.error)
query_count -= 1
continue
if result.is_known_error:
known_error_count += 1
elif result.query_timed_out:
query_timeout_count += 1
else:
mismatch_count += 1
print('---Test Query---\n')
print(result.test_sql + '\n')
print('---Reference Query---\n')
print(result.ref_sql + '\n')
print('---Error---\n')
print(result.error + '\n')
print('------\n')
if 'Could not connect' in result.error \
or "Couldn't open transport for" in result.error:
if stop_on_crash:
break
# Assume Impala crashed and try restarting
test_crash_count += 1
LOG.info('Restarting Impala')
call([join_path(getenv('IMPALA_HOME'), 'bin/start-impala-cluster.py'),
'--log_dir=%s' % getenv('LOG_DIR', "/tmp/")])
self.test_conn.reconnect()
query_result_comparator.test_cursor = self.test_conn.cursor()
result = query_result_comparator.compare_query_results(query)
if result.error:
LOG.info('Restarting Impala')
call([join_path(getenv('IMPALA_HOME'), 'bin/start-impala-cluster.py'),
'--log_dir=%s' % getenv('LOG_DIR', "/tmp/")])
self.test_conn.reconnect()
query_result_comparator.test_cursor = self.test_conn.cursor()
else:
break
if stop_on_result_mismatch and \
not (result.is_known_error or result.query_timed_out):
break
if last_error == result.error \
and not (result.is_known_error or result.query_timed_out):
repeat_error_count += 1
if repeat_error_count == self.ABORT_ON_REPEAT_ERROR_COUNT:
break
else:
last_error = result.error
repeat_error_count = 0
else:
if result.query_resulted_in_data:
LOG.info('Results matched (%s rows)', result.test_row_count)
else:
LOG.info('Query did not produce meaningful data')
last_error = None
repeat_error_count = 0
return SearchResults(
query_count,
queries_resulted_in_data_count,
mismatch_count,
query_timeout_count,
known_error_count,
test_crash_count,
time() - start_time)
class SearchResults(object):
'''This class holds information about the outcome of a search run.'''
def __init__(self,
query_count,
queries_resulted_in_data_count,
mismatch_count,
query_timeout_count,
known_error_count,
test_crash_count,
run_time_in_seconds):
# Approx number of queries run, some queries may have been ignored
self.query_count = query_count
self.queries_resulted_in_data_count = queries_resulted_in_data_count
# Number of queries that had an error or result mismatch
self.mismatch_count = mismatch_count
self.query_timeout_count = query_timeout_count
self.known_error_count = known_error_count
self.test_crash_count = test_crash_count
self.run_time_in_seconds = run_time_in_seconds
def __str__(self):
'''Returns the string representation of the results.'''
mins, secs = divmod(self.run_time_in_seconds, 60)
hours, mins = divmod(mins, 60)
hours = int(hours)
mins = int(mins)
if hours:
run_time = '%s hour and %s minutes' % (hours, mins)
else:
secs = int(secs)
run_time = '%s seconds' % secs
if mins:
run_time = '%s mins and ' % mins + run_time
summary_params = self.__dict__
summary_params['run_time'] = run_time
return (
'%(mismatch_count)s mismatches found after running %(query_count)s queries in '
'%(run_time)s.\n'
'%(queries_resulted_in_data_count)s of %(query_count)s queries produced results.'
'\n'
'%(test_crash_count)s crashes occurred.\n'
'%(known_error_count)s queries were excluded from the mismatch count because '
'they are known errors.\n'
'%(query_timeout_count)s queries timed out and were excluded from all counts.') \
% summary_params
if __name__ == '__main__':
import sys
from argparse import ArgumentDefaultsHelpFormatter, ArgumentParser
import cli_options
from query_profile import PROFILES
parser = ArgumentParser(formatter_class=ArgumentDefaultsHelpFormatter)
cli_options.add_logging_options(parser)
cli_options.add_db_name_option(parser)
cli_options.add_cluster_options(parser)
cli_options.add_connection_option_groups(parser)
cli_options.add_timeout_option(parser)
parser.add_argument('--test-db-type', default=IMPALA,
choices=(HIVE, IMPALA, MYSQL, ORACLE, POSTGRESQL),
help='The type of the test database to use. Ex: IMPALA.')
parser.add_argument('--ref-db-type', default=POSTGRESQL,
choices=(MYSQL, ORACLE, POSTGRESQL),
help='The type of the ref database to use. Ex: POSTGRESQL.')
parser.add_argument('--stop-on-mismatch', default=False, action='store_true',
help='Exit immediately upon find a discrepancy in a query result.')
parser.add_argument('--stop-on-crash', default=False, action='store_true',
help='Exit immediately if Impala crashes.')
parser.add_argument('--query-count', default=1000000, type=int,
help='Exit after running the given number of queries.')
parser.add_argument('--exclude-types', default='',
help='A comma separated list of data types to exclude while generating queries.')
parser.add_argument('--explain-only', action='store_true',
help="Don't run the queries only explain them to see if there was an error in "
"planning.")
profiles = dict()
for profile in PROFILES:
profile_name = profile.__name__
if profile_name.endswith('Profile'):
profile_name = profile_name[:-1 * len('Profile')]
profiles[profile_name.lower()] = profile
parser.add_argument('--profile', default='default',
choices=(sorted(profiles.keys())),
help='Determines the mix of SQL features to use during query generation.')
# TODO: Seed the random query generator for repeatable queries?
args = parser.parse_args()
cli_options.configure_logging(args.log_level, debug_log_file=args.debug_log_file)
cluster = cli_options.create_cluster(args)
ref_conn = cli_options.create_connection(args, args.ref_db_type, db_name=args.db_name)
if args.test_db_type == IMPALA:
test_conn = cluster.impala.connect(db_name=args.db_name)
elif args.test_db_type == HIVE:
test_conn = cluster.hive.connect(db_name=args.db_name)
else:
test_conn = cli_options.create_connection(
args, args.test_db_type, db_name=args.db_name)
# Create an instance of profile class (e.g. DefaultProfile)
query_profile = profiles[args.profile]()
if args.explain_only:
searcher = FrontendExceptionSearcher(query_profile, ref_conn, test_conn)
searcher.search(args.query_count)
else:
diff_searcher = QueryResultDiffSearcher(query_profile, ref_conn, test_conn)
query_timeout_seconds = args.timeout
search_results = diff_searcher.search(
args.query_count, args.stop_on_mismatch, args.stop_on_crash, query_timeout_seconds)
print(search_results)
sys.exit(search_results.mismatch_count)
| apache-2.0 | 8,082,747,563,557,498,000 | 40.524467 | 91 | 0.650008 | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.