repo_name
stringlengths 5
100
| path
stringlengths 4
299
| copies
stringclasses 990
values | size
stringlengths 4
7
| content
stringlengths 666
1.03M
| license
stringclasses 15
values | hash
int64 -9,223,351,895,964,839,000
9,223,297,778B
| line_mean
float64 3.17
100
| line_max
int64 7
1k
| alpha_frac
float64 0.25
0.98
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
j00bar/ansible | lib/ansible/modules/storage/netapp/na_cdot_lun.py | 69 | 12603 | #!/usr/bin/python
# (c) 2017, NetApp, Inc
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
module: na_cdot_lun
short_description: Manage NetApp cDOT luns
extends_documentation_fragment:
- netapp.ontap
version_added: '2.3'
author: Sumit Kumar ([email protected])
description:
- Create, destroy, resize luns on NetApp cDOT.
options:
state:
description:
- Whether the specified lun should exist or not.
required: true
choices: ['present', 'absent']
name:
description:
- The name of the lun to manage.
required: true
flexvol_name:
description:
- The name of the FlexVol the lun should exist on.
- Required when C(state=present).
size:
description:
- The size of the lun in C(size_unit).
- Required when C(state=present).
size_unit:
description:
- The unit used to interpret the size parameter.
choices: ['bytes', 'b', 'kb', 'mb', 'gb', 'tb', 'pb', 'eb', 'zb', 'yb']
default: 'gb'
force_resize:
description:
- Forcibly reduce the size. This is required for reducing the size of the LUN to avoid accidentally reducing the LUN size.
default: false
force_remove:
description:
- If "true", override checks that prevent a LUN from being destroyed if it is online and mapped.
- If "false", destroying an online and mapped LUN will fail.
default: false
force_remove_fenced:
description:
- If "true", override checks that prevent a LUN from being destroyed while it is fenced.
- If "false", attempting to destroy a fenced LUN will fail.
- The default if not specified is "false". This field is available in Data ONTAP 8.2 and later.
default: false
vserver:
required: true
description:
- The name of the vserver to use.
'''
EXAMPLES = """
- name: Create LUN
na_cdot_lun:
state: present
name: ansibleLUN
flexvol_name: ansibleVolume
vserver: ansibleVServer
size: 5
size_unit: mb
hostname: "{{ netapp_hostname }}"
username: "{{ netapp_username }}"
password: "{{ netapp_password }}"
- name: Resize Lun
na_cdot_lun:
state: present
name: ansibleLUN
force_resize: True
flexvol_name: ansibleVolume
vserver: ansibleVServer
size: 5
size_unit: gb
hostname: "{{ netapp_hostname }}"
username: "{{ netapp_username }}"
password: "{{ netapp_password }}"
"""
RETURN = """
"""
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.pycompat24 import get_exception
import ansible.module_utils.netapp as netapp_utils
HAS_NETAPP_LIB = netapp_utils.has_netapp_lib()
class NetAppCDOTLUN(object):
def __init__(self):
self._size_unit_map = dict(
bytes=1,
b=1,
kb=1024,
mb=1024 ** 2,
gb=1024 ** 3,
tb=1024 ** 4,
pb=1024 ** 5,
eb=1024 ** 6,
zb=1024 ** 7,
yb=1024 ** 8
)
self.argument_spec = netapp_utils.ontap_sf_host_argument_spec()
self.argument_spec.update(dict(
state=dict(required=True, choices=['present', 'absent']),
name=dict(required=True, type='str'),
size=dict(type='int'),
size_unit=dict(default='gb',
choices=['bytes', 'b', 'kb', 'mb', 'gb', 'tb',
'pb', 'eb', 'zb', 'yb'], type='str'),
force_resize=dict(default=False, type='bool'),
force_remove=dict(default=False, type='bool'),
force_remove_fenced=dict(default=False, type='bool'),
flexvol_name=dict(type='str'),
vserver=dict(required=True, type='str'),
))
self.module = AnsibleModule(
argument_spec=self.argument_spec,
required_if=[
('state', 'present', ['flexvol_name', 'size'])
],
supports_check_mode=True
)
p = self.module.params
# set up state variables
self.state = p['state']
self.name = p['name']
self.size_unit = p['size_unit']
if p['size'] is not None:
self.size = p['size'] * self._size_unit_map[self.size_unit]
else:
self.size = None
self.force_resize = p['force_resize']
self.force_remove = p['force_remove']
self.force_remove_fenced = p['force_remove_fenced']
self.flexvol_name = p['flexvol_name']
self.vserver = p['vserver']
if HAS_NETAPP_LIB is False:
self.module.fail_json(msg="the python NetApp-Lib module is required")
else:
self.server = netapp_utils.setup_ontap_zapi(module=self.module, vserver=self.vserver)
def get_lun(self):
"""
Return details about the LUN
:return: Details about the lun
:rtype: dict
"""
luns = []
tag = None
while True:
lun_info = netapp_utils.zapi.NaElement('lun-get-iter')
if tag:
lun_info.add_new_child('tag', tag, True)
query_details = netapp_utils.zapi.NaElement('lun-info')
query_details.add_new_child('vserver', self.vserver)
query_details.add_new_child('volume', self.flexvol_name)
query = netapp_utils.zapi.NaElement('query')
query.add_child_elem(query_details)
lun_info.add_child_elem(query)
result = self.server.invoke_successfully(lun_info, True)
if result.get_child_by_name('num-records') and int(result.get_child_content('num-records')) >= 1:
attr_list = result.get_child_by_name('attributes-list')
luns.extend(attr_list.get_children())
tag = result.get_child_content('next-tag')
if tag is None:
break
# The LUNs have been extracted.
# Find the specified lun and extract details.
return_value = None
for lun in luns:
path = lun.get_child_content('path')
_rest, _splitter, found_name = path.rpartition('/')
if found_name == self.name:
size = lun.get_child_content('size')
# Find out if the lun is attached
attached_to = None
lun_id = None
if lun.get_child_content('mapped') == 'true':
lun_map_list = netapp_utils.zapi.NaElement.create_node_with_children(
'lun-map-list-info', **{'path': path})
result = self.server.invoke_successfully(
lun_map_list, enable_tunneling=True)
igroups = result.get_child_by_name('initiator-groups')
if igroups:
for igroup_info in igroups.get_children():
igroup = igroup_info.get_child_content(
'initiator-group-name')
attached_to = igroup
lun_id = igroup_info.get_child_content('lun-id')
return_value = {
'name': found_name,
'size': size,
'attached_to': attached_to,
'lun_id': lun_id
}
else:
continue
return return_value
def create_lun(self):
"""
Create LUN with requested name and size
"""
path = '/vol/%s/%s' % (self.flexvol_name, self.name)
lun_create = netapp_utils.zapi.NaElement.create_node_with_children(
'lun-create-by-size', **{'path': path,
'size': str(self.size),
'ostype': 'linux'})
try:
self.server.invoke_successfully(lun_create, enable_tunneling=True)
except netapp_utils.zapi.NaApiError:
err = get_exception()
self.module.fail_json(msg="Error provisioning lun %s of size %s" % (self.name, self.size),
exception=str(err))
def delete_lun(self):
"""
Delete requested LUN
"""
path = '/vol/%s/%s' % (self.flexvol_name, self.name)
lun_delete = netapp_utils.zapi.NaElement.create_node_with_children(
'lun-destroy', **{'path': path,
'force': str(self.force_remove),
'destroy-fenced-lun':
str(self.force_remove_fenced)})
try:
self.server.invoke_successfully(lun_delete, enable_tunneling=True)
except netapp_utils.zapi.NaApiError:
err = get_exception()
self.module.fail_json(msg="Error deleting lun %s" % path,
exception=str(err))
def resize_lun(self):
"""
Resize requested LUN.
:return: True if LUN was actually re-sized, false otherwise.
:rtype: bool
"""
path = '/vol/%s/%s' % (self.flexvol_name, self.name)
lun_resize = netapp_utils.zapi.NaElement.create_node_with_children(
'lun-resize', **{'path': path,
'size': str(self.size),
'force': str(self.force_resize)})
try:
self.server.invoke_successfully(lun_resize, enable_tunneling=True)
except netapp_utils.zapi.NaApiError:
e = get_exception()
if str(e.code) == "9042":
# Error 9042 denotes the new LUN size being the same as the
# old LUN size. This happens when there's barely any difference
# in the two sizes. For example, from 8388608 bytes to
# 8194304 bytes. This should go away if/when the default size
# requested/reported to/from the controller is changed to a
# larger unit (MB/GB/TB).
return False
else:
err = get_exception()
self.module.fail_json(msg="Error resizing lun %s" % path,
exception=str(err))
return True
def apply(self):
property_changed = False
multiple_properties_changed = False
size_changed = False
lun_exists = False
lun_detail = self.get_lun()
if lun_detail:
lun_exists = True
current_size = lun_detail['size']
if self.state == 'absent':
property_changed = True
elif self.state == 'present':
if not current_size == self.size:
size_changed = True
property_changed = True
else:
if self.state == 'present':
property_changed = True
if property_changed:
if self.module.check_mode:
pass
else:
if self.state == 'present':
if not lun_exists:
self.create_lun()
else:
if size_changed:
# Ensure that size was actually changed. Please
# read notes in 'resize_lun' function for details.
size_changed = self.resize_lun()
if not size_changed and not \
multiple_properties_changed:
property_changed = False
elif self.state == 'absent':
self.delete_lun()
changed = property_changed or size_changed
# TODO: include other details about the lun (size, etc.)
self.module.exit_json(changed=changed)
def main():
v = NetAppCDOTLUN()
v.apply()
if __name__ == '__main__':
main()
| gpl-3.0 | -1,145,635,739,152,599,800 | 31.735065 | 126 | 0.542172 | false |
0jpq0/kbengine | kbe/res/scripts/common/Lib/test/test_peepholer.py | 84 | 13107 | import dis
import re
import sys
from io import StringIO
import unittest
from math import copysign
from test.bytecode_helper import BytecodeTestCase
class TestTranforms(BytecodeTestCase):
def test_unot(self):
# UNARY_NOT POP_JUMP_IF_FALSE --> POP_JUMP_IF_TRUE'
def unot(x):
if not x == 2:
del x
self.assertNotInBytecode(unot, 'UNARY_NOT')
self.assertNotInBytecode(unot, 'POP_JUMP_IF_FALSE')
self.assertInBytecode(unot, 'POP_JUMP_IF_TRUE')
def test_elim_inversion_of_is_or_in(self):
for line, cmp_op in (
('not a is b', 'is not',),
('not a in b', 'not in',),
('not a is not b', 'is',),
('not a not in b', 'in',),
):
code = compile(line, '', 'single')
self.assertInBytecode(code, 'COMPARE_OP', cmp_op)
def test_global_as_constant(self):
# LOAD_GLOBAL None/True/False --> LOAD_CONST None/True/False
def f(x):
None
None
return x
def g(x):
True
return x
def h(x):
False
return x
for func, elem in ((f, None), (g, True), (h, False)):
self.assertNotInBytecode(func, 'LOAD_GLOBAL')
self.assertInBytecode(func, 'LOAD_CONST', elem)
def f():
'Adding a docstring made this test fail in Py2.5.0'
return None
self.assertNotInBytecode(f, 'LOAD_GLOBAL')
self.assertInBytecode(f, 'LOAD_CONST', None)
def test_while_one(self):
# Skip over: LOAD_CONST trueconst POP_JUMP_IF_FALSE xx
def f():
while 1:
pass
return list
for elem in ('LOAD_CONST', 'POP_JUMP_IF_FALSE'):
self.assertNotInBytecode(f, elem)
for elem in ('JUMP_ABSOLUTE',):
self.assertInBytecode(f, elem)
def test_pack_unpack(self):
for line, elem in (
('a, = a,', 'LOAD_CONST',),
('a, b = a, b', 'ROT_TWO',),
('a, b, c = a, b, c', 'ROT_THREE',),
):
code = compile(line,'','single')
self.assertInBytecode(code, elem)
self.assertNotInBytecode(code, 'BUILD_TUPLE')
self.assertNotInBytecode(code, 'UNPACK_TUPLE')
def test_folding_of_tuples_of_constants(self):
for line, elem in (
('a = 1,2,3', (1, 2, 3)),
('("a","b","c")', ('a', 'b', 'c')),
('a,b,c = 1,2,3', (1, 2, 3)),
('(None, 1, None)', (None, 1, None)),
('((1, 2), 3, 4)', ((1, 2), 3, 4)),
):
code = compile(line,'','single')
self.assertInBytecode(code, 'LOAD_CONST', elem)
self.assertNotInBytecode(code, 'BUILD_TUPLE')
# Long tuples should be folded too.
code = compile(repr(tuple(range(10000))),'','single')
self.assertNotInBytecode(code, 'BUILD_TUPLE')
# One LOAD_CONST for the tuple, one for the None return value
load_consts = [instr for instr in dis.get_instructions(code)
if instr.opname == 'LOAD_CONST']
self.assertEqual(len(load_consts), 2)
# Bug 1053819: Tuple of constants misidentified when presented with:
# . . . opcode_with_arg 100 unary_opcode BUILD_TUPLE 1 . . .
# The following would segfault upon compilation
def crater():
(~[
0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
],)
def test_folding_of_lists_of_constants(self):
for line, elem in (
# in/not in constants with BUILD_LIST should be folded to a tuple:
('a in [1,2,3]', (1, 2, 3)),
('a not in ["a","b","c"]', ('a', 'b', 'c')),
('a in [None, 1, None]', (None, 1, None)),
('a not in [(1, 2), 3, 4]', ((1, 2), 3, 4)),
):
code = compile(line, '', 'single')
self.assertInBytecode(code, 'LOAD_CONST', elem)
self.assertNotInBytecode(code, 'BUILD_LIST')
def test_folding_of_sets_of_constants(self):
for line, elem in (
# in/not in constants with BUILD_SET should be folded to a frozenset:
('a in {1,2,3}', frozenset({1, 2, 3})),
('a not in {"a","b","c"}', frozenset({'a', 'c', 'b'})),
('a in {None, 1, None}', frozenset({1, None})),
('a not in {(1, 2), 3, 4}', frozenset({(1, 2), 3, 4})),
('a in {1, 2, 3, 3, 2, 1}', frozenset({1, 2, 3})),
):
code = compile(line, '', 'single')
self.assertNotInBytecode(code, 'BUILD_SET')
self.assertInBytecode(code, 'LOAD_CONST', elem)
# Ensure that the resulting code actually works:
def f(a):
return a in {1, 2, 3}
def g(a):
return a not in {1, 2, 3}
self.assertTrue(f(3))
self.assertTrue(not f(4))
self.assertTrue(not g(3))
self.assertTrue(g(4))
def test_folding_of_binops_on_constants(self):
for line, elem in (
('a = 2+3+4', 9), # chained fold
('"@"*4', '@@@@'), # check string ops
('a="abc" + "def"', 'abcdef'), # check string ops
('a = 3**4', 81), # binary power
('a = 3*4', 12), # binary multiply
('a = 13//4', 3), # binary floor divide
('a = 14%4', 2), # binary modulo
('a = 2+3', 5), # binary add
('a = 13-4', 9), # binary subtract
('a = (12,13)[1]', 13), # binary subscr
('a = 13 << 2', 52), # binary lshift
('a = 13 >> 2', 3), # binary rshift
('a = 13 & 7', 5), # binary and
('a = 13 ^ 7', 10), # binary xor
('a = 13 | 7', 15), # binary or
):
code = compile(line, '', 'single')
self.assertInBytecode(code, 'LOAD_CONST', elem)
for instr in dis.get_instructions(code):
self.assertFalse(instr.opname.startswith('BINARY_'))
# Verify that unfoldables are skipped
code = compile('a=2+"b"', '', 'single')
self.assertInBytecode(code, 'LOAD_CONST', 2)
self.assertInBytecode(code, 'LOAD_CONST', 'b')
# Verify that large sequences do not result from folding
code = compile('a="x"*1000', '', 'single')
self.assertInBytecode(code, 'LOAD_CONST', 1000)
def test_binary_subscr_on_unicode(self):
# valid code get optimized
code = compile('"foo"[0]', '', 'single')
self.assertInBytecode(code, 'LOAD_CONST', 'f')
self.assertNotInBytecode(code, 'BINARY_SUBSCR')
code = compile('"\u0061\uffff"[1]', '', 'single')
self.assertInBytecode(code, 'LOAD_CONST', '\uffff')
self.assertNotInBytecode(code,'BINARY_SUBSCR')
# With PEP 393, non-BMP char get optimized
code = compile('"\U00012345"[0]', '', 'single')
self.assertInBytecode(code, 'LOAD_CONST', '\U00012345')
self.assertNotInBytecode(code, 'BINARY_SUBSCR')
# invalid code doesn't get optimized
# out of range
code = compile('"fuu"[10]', '', 'single')
self.assertInBytecode(code, 'BINARY_SUBSCR')
def test_folding_of_unaryops_on_constants(self):
for line, elem in (
('-0.5', -0.5), # unary negative
('-0.0', -0.0), # -0.0
('-(1.0-1.0)', -0.0), # -0.0 after folding
('-0', 0), # -0
('~-2', 1), # unary invert
('+1', 1), # unary positive
):
code = compile(line, '', 'single')
self.assertInBytecode(code, 'LOAD_CONST', elem)
for instr in dis.get_instructions(code):
self.assertFalse(instr.opname.startswith('UNARY_'))
# Check that -0.0 works after marshaling
def negzero():
return -(1.0-1.0)
for instr in dis.get_instructions(code):
self.assertFalse(instr.opname.startswith('UNARY_'))
# Verify that unfoldables are skipped
for line, elem, opname in (
('-"abc"', 'abc', 'UNARY_NEGATIVE'),
('~"abc"', 'abc', 'UNARY_INVERT'),
):
code = compile(line, '', 'single')
self.assertInBytecode(code, 'LOAD_CONST', elem)
self.assertInBytecode(code, opname)
def test_elim_extra_return(self):
# RETURN LOAD_CONST None RETURN --> RETURN
def f(x):
return x
self.assertNotInBytecode(f, 'LOAD_CONST', None)
returns = [instr for instr in dis.get_instructions(f)
if instr.opname == 'RETURN_VALUE']
self.assertEqual(len(returns), 1)
def test_elim_jump_to_return(self):
# JUMP_FORWARD to RETURN --> RETURN
def f(cond, true_value, false_value):
return true_value if cond else false_value
self.assertNotInBytecode(f, 'JUMP_FORWARD')
self.assertNotInBytecode(f, 'JUMP_ABSOLUTE')
returns = [instr for instr in dis.get_instructions(f)
if instr.opname == 'RETURN_VALUE']
self.assertEqual(len(returns), 2)
def test_elim_jump_after_return1(self):
# Eliminate dead code: jumps immediately after returns can't be reached
def f(cond1, cond2):
if cond1: return 1
if cond2: return 2
while 1:
return 3
while 1:
if cond1: return 4
return 5
return 6
self.assertNotInBytecode(f, 'JUMP_FORWARD')
self.assertNotInBytecode(f, 'JUMP_ABSOLUTE')
returns = [instr for instr in dis.get_instructions(f)
if instr.opname == 'RETURN_VALUE']
self.assertEqual(len(returns), 6)
def test_elim_jump_after_return2(self):
# Eliminate dead code: jumps immediately after returns can't be reached
def f(cond1, cond2):
while 1:
if cond1: return 4
self.assertNotInBytecode(f, 'JUMP_FORWARD')
# There should be one jump for the while loop.
returns = [instr for instr in dis.get_instructions(f)
if instr.opname == 'JUMP_ABSOLUTE']
self.assertEqual(len(returns), 1)
returns = [instr for instr in dis.get_instructions(f)
if instr.opname == 'RETURN_VALUE']
self.assertEqual(len(returns), 2)
def test_make_function_doesnt_bail(self):
def f():
def g()->1+1:
pass
return g
self.assertNotInBytecode(f, 'BINARY_ADD')
def test_constant_folding(self):
# Issue #11244: aggressive constant folding.
exprs = [
'3 * -5',
'-3 * 5',
'2 * (3 * 4)',
'(2 * 3) * 4',
'(-1, 2, 3)',
'(1, -2, 3)',
'(1, 2, -3)',
'(1, 2, -3) * 6',
'lambda x: x in {(3 * -5) + (-1 - 6), (1, -2, 3) * 2, None}',
]
for e in exprs:
code = compile(e, '', 'single')
for instr in dis.get_instructions(code):
self.assertFalse(instr.opname.startswith('UNARY_'))
self.assertFalse(instr.opname.startswith('BINARY_'))
self.assertFalse(instr.opname.startswith('BUILD_'))
class TestBuglets(unittest.TestCase):
def test_bug_11510(self):
# folded constant set optimization was commingled with the tuple
# unpacking optimization which would fail if the set had duplicate
# elements so that the set length was unexpected
def f():
x, y = {1, 1}
return x, y
with self.assertRaises(ValueError):
f()
def test_main(verbose=None):
import sys
from test import support
test_classes = (TestTranforms, TestBuglets)
support.run_unittest(*test_classes)
# verify reference counting
if verbose and hasattr(sys, 'gettotalrefcount'):
import gc
counts = [None] * 5
for i in range(len(counts)):
support.run_unittest(*test_classes)
gc.collect()
counts[i] = sys.gettotalrefcount()
print(counts)
if __name__ == "__main__":
test_main(verbose=True)
| lgpl-3.0 | 4,436,646,010,475,193,300 | 37.663717 | 81 | 0.492866 | false |
xHeliotrope/injustice_dropper | env/lib/python3.4/site-packages/six.py | 878 | 29664 | """Utilities for writing code that runs on Python 2 and 3"""
# Copyright (c) 2010-2015 Benjamin Peterson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from __future__ import absolute_import
import functools
import itertools
import operator
import sys
import types
__author__ = "Benjamin Peterson <[email protected]>"
__version__ = "1.9.0"
# Useful for very coarse version differentiation.
PY2 = sys.version_info[0] == 2
PY3 = sys.version_info[0] == 3
if PY3:
string_types = str,
integer_types = int,
class_types = type,
text_type = str
binary_type = bytes
MAXSIZE = sys.maxsize
else:
string_types = basestring,
integer_types = (int, long)
class_types = (type, types.ClassType)
text_type = unicode
binary_type = str
if sys.platform.startswith("java"):
# Jython always uses 32 bits.
MAXSIZE = int((1 << 31) - 1)
else:
# It's possible to have sizeof(long) != sizeof(Py_ssize_t).
class X(object):
def __len__(self):
return 1 << 31
try:
len(X())
except OverflowError:
# 32-bit
MAXSIZE = int((1 << 31) - 1)
else:
# 64-bit
MAXSIZE = int((1 << 63) - 1)
del X
def _add_doc(func, doc):
"""Add documentation to a function."""
func.__doc__ = doc
def _import_module(name):
"""Import module, returning the module after the last dot."""
__import__(name)
return sys.modules[name]
class _LazyDescr(object):
def __init__(self, name):
self.name = name
def __get__(self, obj, tp):
result = self._resolve()
setattr(obj, self.name, result) # Invokes __set__.
try:
# This is a bit ugly, but it avoids running this again by
# removing this descriptor.
delattr(obj.__class__, self.name)
except AttributeError:
pass
return result
class MovedModule(_LazyDescr):
def __init__(self, name, old, new=None):
super(MovedModule, self).__init__(name)
if PY3:
if new is None:
new = name
self.mod = new
else:
self.mod = old
def _resolve(self):
return _import_module(self.mod)
def __getattr__(self, attr):
_module = self._resolve()
value = getattr(_module, attr)
setattr(self, attr, value)
return value
class _LazyModule(types.ModuleType):
def __init__(self, name):
super(_LazyModule, self).__init__(name)
self.__doc__ = self.__class__.__doc__
def __dir__(self):
attrs = ["__doc__", "__name__"]
attrs += [attr.name for attr in self._moved_attributes]
return attrs
# Subclasses should override this
_moved_attributes = []
class MovedAttribute(_LazyDescr):
def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
super(MovedAttribute, self).__init__(name)
if PY3:
if new_mod is None:
new_mod = name
self.mod = new_mod
if new_attr is None:
if old_attr is None:
new_attr = name
else:
new_attr = old_attr
self.attr = new_attr
else:
self.mod = old_mod
if old_attr is None:
old_attr = name
self.attr = old_attr
def _resolve(self):
module = _import_module(self.mod)
return getattr(module, self.attr)
class _SixMetaPathImporter(object):
"""
A meta path importer to import six.moves and its submodules.
This class implements a PEP302 finder and loader. It should be compatible
with Python 2.5 and all existing versions of Python3
"""
def __init__(self, six_module_name):
self.name = six_module_name
self.known_modules = {}
def _add_module(self, mod, *fullnames):
for fullname in fullnames:
self.known_modules[self.name + "." + fullname] = mod
def _get_module(self, fullname):
return self.known_modules[self.name + "." + fullname]
def find_module(self, fullname, path=None):
if fullname in self.known_modules:
return self
return None
def __get_module(self, fullname):
try:
return self.known_modules[fullname]
except KeyError:
raise ImportError("This loader does not know module " + fullname)
def load_module(self, fullname):
try:
# in case of a reload
return sys.modules[fullname]
except KeyError:
pass
mod = self.__get_module(fullname)
if isinstance(mod, MovedModule):
mod = mod._resolve()
else:
mod.__loader__ = self
sys.modules[fullname] = mod
return mod
def is_package(self, fullname):
"""
Return true, if the named module is a package.
We need this method to get correct spec objects with
Python 3.4 (see PEP451)
"""
return hasattr(self.__get_module(fullname), "__path__")
def get_code(self, fullname):
"""Return None
Required, if is_package is implemented"""
self.__get_module(fullname) # eventually raises ImportError
return None
get_source = get_code # same as get_code
_importer = _SixMetaPathImporter(__name__)
class _MovedItems(_LazyModule):
"""Lazy loading of moved objects"""
__path__ = [] # mark as package
_moved_attributes = [
MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"),
MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
MovedAttribute("intern", "__builtin__", "sys"),
MovedAttribute("map", "itertools", "builtins", "imap", "map"),
MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
MovedAttribute("reload_module", "__builtin__", "imp", "reload"),
MovedAttribute("reduce", "__builtin__", "functools"),
MovedAttribute("shlex_quote", "pipes", "shlex", "quote"),
MovedAttribute("StringIO", "StringIO", "io"),
MovedAttribute("UserDict", "UserDict", "collections"),
MovedAttribute("UserList", "UserList", "collections"),
MovedAttribute("UserString", "UserString", "collections"),
MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"),
MovedModule("builtins", "__builtin__"),
MovedModule("configparser", "ConfigParser"),
MovedModule("copyreg", "copy_reg"),
MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"),
MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
MovedModule("http_cookies", "Cookie", "http.cookies"),
MovedModule("html_entities", "htmlentitydefs", "html.entities"),
MovedModule("html_parser", "HTMLParser", "html.parser"),
MovedModule("http_client", "httplib", "http.client"),
MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"),
MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
MovedModule("cPickle", "cPickle", "pickle"),
MovedModule("queue", "Queue"),
MovedModule("reprlib", "repr"),
MovedModule("socketserver", "SocketServer"),
MovedModule("_thread", "thread", "_thread"),
MovedModule("tkinter", "Tkinter"),
MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"),
MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
MovedModule("tkinter_colorchooser", "tkColorChooser",
"tkinter.colorchooser"),
MovedModule("tkinter_commondialog", "tkCommonDialog",
"tkinter.commondialog"),
MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
MovedModule("tkinter_font", "tkFont", "tkinter.font"),
MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
"tkinter.simpledialog"),
MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"),
MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"),
MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),
MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),
MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"),
MovedModule("winreg", "_winreg"),
]
for attr in _moved_attributes:
setattr(_MovedItems, attr.name, attr)
if isinstance(attr, MovedModule):
_importer._add_module(attr, "moves." + attr.name)
del attr
_MovedItems._moved_attributes = _moved_attributes
moves = _MovedItems(__name__ + ".moves")
_importer._add_module(moves, "moves")
class Module_six_moves_urllib_parse(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_parse"""
_urllib_parse_moved_attributes = [
MovedAttribute("ParseResult", "urlparse", "urllib.parse"),
MovedAttribute("SplitResult", "urlparse", "urllib.parse"),
MovedAttribute("parse_qs", "urlparse", "urllib.parse"),
MovedAttribute("parse_qsl", "urlparse", "urllib.parse"),
MovedAttribute("urldefrag", "urlparse", "urllib.parse"),
MovedAttribute("urljoin", "urlparse", "urllib.parse"),
MovedAttribute("urlparse", "urlparse", "urllib.parse"),
MovedAttribute("urlsplit", "urlparse", "urllib.parse"),
MovedAttribute("urlunparse", "urlparse", "urllib.parse"),
MovedAttribute("urlunsplit", "urlparse", "urllib.parse"),
MovedAttribute("quote", "urllib", "urllib.parse"),
MovedAttribute("quote_plus", "urllib", "urllib.parse"),
MovedAttribute("unquote", "urllib", "urllib.parse"),
MovedAttribute("unquote_plus", "urllib", "urllib.parse"),
MovedAttribute("urlencode", "urllib", "urllib.parse"),
MovedAttribute("splitquery", "urllib", "urllib.parse"),
MovedAttribute("splittag", "urllib", "urllib.parse"),
MovedAttribute("splituser", "urllib", "urllib.parse"),
MovedAttribute("uses_fragment", "urlparse", "urllib.parse"),
MovedAttribute("uses_netloc", "urlparse", "urllib.parse"),
MovedAttribute("uses_params", "urlparse", "urllib.parse"),
MovedAttribute("uses_query", "urlparse", "urllib.parse"),
MovedAttribute("uses_relative", "urlparse", "urllib.parse"),
]
for attr in _urllib_parse_moved_attributes:
setattr(Module_six_moves_urllib_parse, attr.name, attr)
del attr
Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes
_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"),
"moves.urllib_parse", "moves.urllib.parse")
class Module_six_moves_urllib_error(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_error"""
_urllib_error_moved_attributes = [
MovedAttribute("URLError", "urllib2", "urllib.error"),
MovedAttribute("HTTPError", "urllib2", "urllib.error"),
MovedAttribute("ContentTooShortError", "urllib", "urllib.error"),
]
for attr in _urllib_error_moved_attributes:
setattr(Module_six_moves_urllib_error, attr.name, attr)
del attr
Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes
_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"),
"moves.urllib_error", "moves.urllib.error")
class Module_six_moves_urllib_request(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_request"""
_urllib_request_moved_attributes = [
MovedAttribute("urlopen", "urllib2", "urllib.request"),
MovedAttribute("install_opener", "urllib2", "urllib.request"),
MovedAttribute("build_opener", "urllib2", "urllib.request"),
MovedAttribute("pathname2url", "urllib", "urllib.request"),
MovedAttribute("url2pathname", "urllib", "urllib.request"),
MovedAttribute("getproxies", "urllib", "urllib.request"),
MovedAttribute("Request", "urllib2", "urllib.request"),
MovedAttribute("OpenerDirector", "urllib2", "urllib.request"),
MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"),
MovedAttribute("ProxyHandler", "urllib2", "urllib.request"),
MovedAttribute("BaseHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"),
MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"),
MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"),
MovedAttribute("FileHandler", "urllib2", "urllib.request"),
MovedAttribute("FTPHandler", "urllib2", "urllib.request"),
MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"),
MovedAttribute("UnknownHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"),
MovedAttribute("urlretrieve", "urllib", "urllib.request"),
MovedAttribute("urlcleanup", "urllib", "urllib.request"),
MovedAttribute("URLopener", "urllib", "urllib.request"),
MovedAttribute("FancyURLopener", "urllib", "urllib.request"),
MovedAttribute("proxy_bypass", "urllib", "urllib.request"),
]
for attr in _urllib_request_moved_attributes:
setattr(Module_six_moves_urllib_request, attr.name, attr)
del attr
Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes
_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"),
"moves.urllib_request", "moves.urllib.request")
class Module_six_moves_urllib_response(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_response"""
_urllib_response_moved_attributes = [
MovedAttribute("addbase", "urllib", "urllib.response"),
MovedAttribute("addclosehook", "urllib", "urllib.response"),
MovedAttribute("addinfo", "urllib", "urllib.response"),
MovedAttribute("addinfourl", "urllib", "urllib.response"),
]
for attr in _urllib_response_moved_attributes:
setattr(Module_six_moves_urllib_response, attr.name, attr)
del attr
Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes
_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"),
"moves.urllib_response", "moves.urllib.response")
class Module_six_moves_urllib_robotparser(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_robotparser"""
_urllib_robotparser_moved_attributes = [
MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"),
]
for attr in _urllib_robotparser_moved_attributes:
setattr(Module_six_moves_urllib_robotparser, attr.name, attr)
del attr
Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes
_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"),
"moves.urllib_robotparser", "moves.urllib.robotparser")
class Module_six_moves_urllib(types.ModuleType):
"""Create a six.moves.urllib namespace that resembles the Python 3 namespace"""
__path__ = [] # mark as package
parse = _importer._get_module("moves.urllib_parse")
error = _importer._get_module("moves.urllib_error")
request = _importer._get_module("moves.urllib_request")
response = _importer._get_module("moves.urllib_response")
robotparser = _importer._get_module("moves.urllib_robotparser")
def __dir__(self):
return ['parse', 'error', 'request', 'response', 'robotparser']
_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"),
"moves.urllib")
def add_move(move):
"""Add an item to six.moves."""
setattr(_MovedItems, move.name, move)
def remove_move(name):
"""Remove item from six.moves."""
try:
delattr(_MovedItems, name)
except AttributeError:
try:
del moves.__dict__[name]
except KeyError:
raise AttributeError("no such move, %r" % (name,))
if PY3:
_meth_func = "__func__"
_meth_self = "__self__"
_func_closure = "__closure__"
_func_code = "__code__"
_func_defaults = "__defaults__"
_func_globals = "__globals__"
else:
_meth_func = "im_func"
_meth_self = "im_self"
_func_closure = "func_closure"
_func_code = "func_code"
_func_defaults = "func_defaults"
_func_globals = "func_globals"
try:
advance_iterator = next
except NameError:
def advance_iterator(it):
return it.next()
next = advance_iterator
try:
callable = callable
except NameError:
def callable(obj):
return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
if PY3:
def get_unbound_function(unbound):
return unbound
create_bound_method = types.MethodType
Iterator = object
else:
def get_unbound_function(unbound):
return unbound.im_func
def create_bound_method(func, obj):
return types.MethodType(func, obj, obj.__class__)
class Iterator(object):
def next(self):
return type(self).__next__(self)
callable = callable
_add_doc(get_unbound_function,
"""Get the function out of a possibly unbound function""")
get_method_function = operator.attrgetter(_meth_func)
get_method_self = operator.attrgetter(_meth_self)
get_function_closure = operator.attrgetter(_func_closure)
get_function_code = operator.attrgetter(_func_code)
get_function_defaults = operator.attrgetter(_func_defaults)
get_function_globals = operator.attrgetter(_func_globals)
if PY3:
def iterkeys(d, **kw):
return iter(d.keys(**kw))
def itervalues(d, **kw):
return iter(d.values(**kw))
def iteritems(d, **kw):
return iter(d.items(**kw))
def iterlists(d, **kw):
return iter(d.lists(**kw))
viewkeys = operator.methodcaller("keys")
viewvalues = operator.methodcaller("values")
viewitems = operator.methodcaller("items")
else:
def iterkeys(d, **kw):
return iter(d.iterkeys(**kw))
def itervalues(d, **kw):
return iter(d.itervalues(**kw))
def iteritems(d, **kw):
return iter(d.iteritems(**kw))
def iterlists(d, **kw):
return iter(d.iterlists(**kw))
viewkeys = operator.methodcaller("viewkeys")
viewvalues = operator.methodcaller("viewvalues")
viewitems = operator.methodcaller("viewitems")
_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.")
_add_doc(itervalues, "Return an iterator over the values of a dictionary.")
_add_doc(iteritems,
"Return an iterator over the (key, value) pairs of a dictionary.")
_add_doc(iterlists,
"Return an iterator over the (key, [values]) pairs of a dictionary.")
if PY3:
def b(s):
return s.encode("latin-1")
def u(s):
return s
unichr = chr
if sys.version_info[1] <= 1:
def int2byte(i):
return bytes((i,))
else:
# This is about 2x faster than the implementation above on 3.2+
int2byte = operator.methodcaller("to_bytes", 1, "big")
byte2int = operator.itemgetter(0)
indexbytes = operator.getitem
iterbytes = iter
import io
StringIO = io.StringIO
BytesIO = io.BytesIO
_assertCountEqual = "assertCountEqual"
_assertRaisesRegex = "assertRaisesRegex"
_assertRegex = "assertRegex"
else:
def b(s):
return s
# Workaround for standalone backslash
def u(s):
return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape")
unichr = unichr
int2byte = chr
def byte2int(bs):
return ord(bs[0])
def indexbytes(buf, i):
return ord(buf[i])
iterbytes = functools.partial(itertools.imap, ord)
import StringIO
StringIO = BytesIO = StringIO.StringIO
_assertCountEqual = "assertItemsEqual"
_assertRaisesRegex = "assertRaisesRegexp"
_assertRegex = "assertRegexpMatches"
_add_doc(b, """Byte literal""")
_add_doc(u, """Text literal""")
def assertCountEqual(self, *args, **kwargs):
return getattr(self, _assertCountEqual)(*args, **kwargs)
def assertRaisesRegex(self, *args, **kwargs):
return getattr(self, _assertRaisesRegex)(*args, **kwargs)
def assertRegex(self, *args, **kwargs):
return getattr(self, _assertRegex)(*args, **kwargs)
if PY3:
exec_ = getattr(moves.builtins, "exec")
def reraise(tp, value, tb=None):
if value is None:
value = tp()
if value.__traceback__ is not tb:
raise value.with_traceback(tb)
raise value
else:
def exec_(_code_, _globs_=None, _locs_=None):
"""Execute code in a namespace."""
if _globs_ is None:
frame = sys._getframe(1)
_globs_ = frame.f_globals
if _locs_ is None:
_locs_ = frame.f_locals
del frame
elif _locs_ is None:
_locs_ = _globs_
exec("""exec _code_ in _globs_, _locs_""")
exec_("""def reraise(tp, value, tb=None):
raise tp, value, tb
""")
if sys.version_info[:2] == (3, 2):
exec_("""def raise_from(value, from_value):
if from_value is None:
raise value
raise value from from_value
""")
elif sys.version_info[:2] > (3, 2):
exec_("""def raise_from(value, from_value):
raise value from from_value
""")
else:
def raise_from(value, from_value):
raise value
print_ = getattr(moves.builtins, "print", None)
if print_ is None:
def print_(*args, **kwargs):
"""The new-style print function for Python 2.4 and 2.5."""
fp = kwargs.pop("file", sys.stdout)
if fp is None:
return
def write(data):
if not isinstance(data, basestring):
data = str(data)
# If the file has an encoding, encode unicode with it.
if (isinstance(fp, file) and
isinstance(data, unicode) and
fp.encoding is not None):
errors = getattr(fp, "errors", None)
if errors is None:
errors = "strict"
data = data.encode(fp.encoding, errors)
fp.write(data)
want_unicode = False
sep = kwargs.pop("sep", None)
if sep is not None:
if isinstance(sep, unicode):
want_unicode = True
elif not isinstance(sep, str):
raise TypeError("sep must be None or a string")
end = kwargs.pop("end", None)
if end is not None:
if isinstance(end, unicode):
want_unicode = True
elif not isinstance(end, str):
raise TypeError("end must be None or a string")
if kwargs:
raise TypeError("invalid keyword arguments to print()")
if not want_unicode:
for arg in args:
if isinstance(arg, unicode):
want_unicode = True
break
if want_unicode:
newline = unicode("\n")
space = unicode(" ")
else:
newline = "\n"
space = " "
if sep is None:
sep = space
if end is None:
end = newline
for i, arg in enumerate(args):
if i:
write(sep)
write(arg)
write(end)
if sys.version_info[:2] < (3, 3):
_print = print_
def print_(*args, **kwargs):
fp = kwargs.get("file", sys.stdout)
flush = kwargs.pop("flush", False)
_print(*args, **kwargs)
if flush and fp is not None:
fp.flush()
_add_doc(reraise, """Reraise an exception.""")
if sys.version_info[0:2] < (3, 4):
def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS,
updated=functools.WRAPPER_UPDATES):
def wrapper(f):
f = functools.wraps(wrapped, assigned, updated)(f)
f.__wrapped__ = wrapped
return f
return wrapper
else:
wraps = functools.wraps
def with_metaclass(meta, *bases):
"""Create a base class with a metaclass."""
# This requires a bit of explanation: the basic idea is to make a dummy
# metaclass for one level of class instantiation that replaces itself with
# the actual metaclass.
class metaclass(meta):
def __new__(cls, name, this_bases, d):
return meta(name, bases, d)
return type.__new__(metaclass, 'temporary_class', (), {})
def add_metaclass(metaclass):
"""Class decorator for creating a class with a metaclass."""
def wrapper(cls):
orig_vars = cls.__dict__.copy()
slots = orig_vars.get('__slots__')
if slots is not None:
if isinstance(slots, str):
slots = [slots]
for slots_var in slots:
orig_vars.pop(slots_var)
orig_vars.pop('__dict__', None)
orig_vars.pop('__weakref__', None)
return metaclass(cls.__name__, cls.__bases__, orig_vars)
return wrapper
def python_2_unicode_compatible(klass):
"""
A decorator that defines __unicode__ and __str__ methods under Python 2.
Under Python 3 it does nothing.
To support Python 2 and 3 with a single code base, define a __str__ method
returning text and apply this decorator to the class.
"""
if PY2:
if '__str__' not in klass.__dict__:
raise ValueError("@python_2_unicode_compatible cannot be applied "
"to %s because it doesn't define __str__()." %
klass.__name__)
klass.__unicode__ = klass.__str__
klass.__str__ = lambda self: self.__unicode__().encode('utf-8')
return klass
# Complete the moves implementation.
# This code is at the end of this module to speed up module loading.
# Turn this module into a package.
__path__ = [] # required for PEP 302 and PEP 451
__package__ = __name__ # see PEP 366 @ReservedAssignment
if globals().get("__spec__") is not None:
__spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable
# Remove other six meta path importers, since they cause problems. This can
# happen if six is removed from sys.modules and then reloaded. (Setuptools does
# this for some reason.)
if sys.meta_path:
for i, importer in enumerate(sys.meta_path):
# Here's some real nastiness: Another "instance" of the six module might
# be floating around. Therefore, we can't use isinstance() to check for
# the six meta path importer, since the other six instance will have
# inserted an importer with different class.
if (type(importer).__name__ == "_SixMetaPathImporter" and
importer.name == __name__):
del sys.meta_path[i]
break
del i, importer
# Finally, add the importer to the meta path import hook.
sys.meta_path.append(_importer)
| mit | 7,588,587,853,806,405,000 | 34.398568 | 98 | 0.632012 | false |
AlphaSmartDog/DeepLearningNotes | Note-6 A3CNet/Note 6 simple ACNet/sonnet/python/modules/nets/mlp.py | 10 | 8926 | # Copyright 2017 The Sonnet Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""A minimal interface mlp module."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
from six.moves import xrange # pylint: disable=redefined-builtin
from sonnet.python.modules import base
from sonnet.python.modules import basic
from sonnet.python.modules import util
import tensorflow as tf
class MLP(base.AbstractModule, base.Transposable):
"""A Multi-Layer perceptron module."""
def __init__(self,
output_sizes,
activation=tf.nn.relu,
activate_final=False,
initializers=None,
partitioners=None,
regularizers=None,
use_bias=True,
custom_getter=None,
name="mlp"):
"""Constructs an MLP module.
Args:
output_sizes: An iterable of output dimensionalities as defined in
`basic.Linear`. Output size can be defined either as number or via a
callable. In the latter case, since the function invocation is deferred
to graph construction time, the user must only ensure that entries can
be called when build is called. Each entry in the iterable defines
properties in the corresponding linear layer.
activation: An activation op. The activation is applied to intermediate
layers, and optionally to the output of the final layer.
activate_final: Boolean determining if the activation is applied to
the output of the final layer. Default `False`.
initializers: Optional dict containing ops to initialize the linear
layers' weights (with key 'w') or biases (with key 'b').
partitioners: Optional dict containing partitioners to partition the
linear layers' weights (with key 'w') or biases (with key 'b').
regularizers: Optional dict containing regularizers for the linear layers'
weights (with key 'w') and the biases (with key 'b'). As a default, no
regularizers are used. A regularizer should be a function that takes
a single `Tensor` as an input and returns a scalar `Tensor` output, e.g.
the L1 and L2 regularizers in `tf.contrib.layers`.
use_bias: Whether to include bias parameters in the linear layers.
Default `True`.
custom_getter: Callable or dictionary of callables to use as
custom getters inside the module. If a dictionary, the keys
correspond to regexes to match variable names. See the `tf.get_variable`
documentation for information about the custom_getter API.
name: Name of the module.
Raises:
KeyError: If initializers contains any keys other than 'w' or 'b'.
KeyError: If regularizers contains any keys other than 'w' or 'b'.
ValueError: If output_sizes is empty.
TypeError: If `activation` is not callable; or if `output_sizes` is not
iterable.
"""
super(MLP, self).__init__(custom_getter=custom_getter, name=name)
if not isinstance(output_sizes, collections.Iterable):
raise TypeError("output_sizes must be iterable")
output_sizes = tuple(output_sizes)
if not output_sizes:
raise ValueError("output_sizes must not be empty")
self._output_sizes = output_sizes
self._num_layers = len(self._output_sizes)
self._input_shape = None
self.possible_keys = self.get_possible_initializer_keys(use_bias=use_bias)
self._initializers = util.check_initializers(
initializers, self.possible_keys)
self._partitioners = util.check_partitioners(
partitioners, self.possible_keys)
self._regularizers = util.check_regularizers(
regularizers, self.possible_keys)
if not callable(activation):
raise TypeError("Input 'activation' must be callable")
self._activation = activation
self._activate_final = activate_final
self._use_bias = use_bias
self._instantiate_layers()
def _instantiate_layers(self):
"""Instantiates all the linear modules used in the network.
Layers are instantiated in the constructor, as opposed to the build
function, because MLP implements the Transposable interface, and the
transpose function can be called before the module is actually connected
to the graph and build is called.
Notice that this is safe since layers in the transposed module are
instantiated using a lambda returning input_size of the mlp layers, and
this doesn't have to return sensible values until the original module is
connected to the graph.
"""
with self._enter_variable_scope():
self._layers = [basic.Linear(self._output_sizes[i],
name="linear_{}".format(i),
initializers=self._initializers,
partitioners=self._partitioners,
regularizers=self._regularizers,
use_bias=self.use_bias)
for i in xrange(self._num_layers)]
@classmethod
def get_possible_initializer_keys(cls, use_bias=True):
return basic.Linear.get_possible_initializer_keys(use_bias=use_bias)
def _build(self, inputs):
"""Assembles the `MLP` and connects it to the graph.
Args:
inputs: A 2D Tensor of size `[batch_size, input_size]`.
Returns:
A 2D Tensor of size `[batch_size, output_sizes[-1]]`.
"""
self._input_shape = tuple(inputs.get_shape().as_list())
net = inputs
final_index = self._num_layers - 1
for layer_id in xrange(self._num_layers):
net = self._layers[layer_id](net)
if final_index != layer_id or self._activate_final:
net = self._activation(net)
return net
@property
def layers(self):
"""Returns a tuple containing the linear layers of the `MLP`."""
return self._layers
@property
def output_sizes(self):
"""Returns a tuple of all output sizes of all the layers."""
return tuple([l() if callable(l) else l for l in self._output_sizes])
@property
def output_size(self):
"""Returns the size of the module output, not including the batch dimension.
This allows the MLP to be used inside a DeepRNN.
Returns:
The scalar size of the module output.
"""
last_size = self._output_sizes[-1]
return last_size() if callable(last_size) else last_size
@property
def use_bias(self):
return self._use_bias
@property
def initializers(self):
"""Returns the intializers dictionary."""
return self._initializers
@property
def partitioners(self):
"""Returns the partitioners dictionary."""
return self._partitioners
@property
def regularizers(self):
"""Returns the regularizers dictionary."""
return self._regularizers
@property
def activation(self):
return self._activation
@property
def activate_final(self):
return self._activate_final
# Implements Transposable interface
@property
def input_shape(self):
"""Returns shape of input `Tensor` passed at last call to `build`."""
self._ensure_is_connected()
return self._input_shape
# Implements Transposable interface
def transpose(self, name=None, activate_final=None):
"""Returns transposed `MLP`.
Args:
name: Optional string specifying the name of the transposed module. The
default name is constructed by appending "_transpose"
to `self.module_name`.
activate_final: Optional boolean determining if the activation and batch
normalization, if turned on, are applied to the final layer.
Returns:
Matching transposed `MLP` module.
"""
if name is None:
name = self.module_name + "_transpose"
if activate_final is None:
activate_final = self.activate_final
output_sizes = [lambda l=layer: l.input_shape[1] for layer in self._layers]
output_sizes.reverse()
return MLP(name=name,
output_sizes=output_sizes,
activation=self.activation,
activate_final=activate_final,
initializers=self.initializers,
partitioners=self.partitioners,
regularizers=self.regularizers,
use_bias=self.use_bias)
| mit | -5,910,874,843,038,269,000 | 36.504202 | 80 | 0.666032 | false |
disqus/django-old | tests/modeltests/test_client/views.py | 1 | 7992 | from xml.dom.minidom import parseString
from django.core import mail
from django.template import Context, Template
from django.http import HttpResponse, HttpResponseRedirect, HttpResponseNotFound
from django.contrib.auth.decorators import login_required, permission_required
from django.forms.forms import Form
from django.forms import fields
from django.shortcuts import render_to_response
from django.utils.decorators import method_decorator
def get_view(request):
"A simple view that expects a GET request, and returns a rendered template"
t = Template('This is a test. {{ var }} is the value.', name='GET Template')
c = Context({'var': request.GET.get('var', 42)})
return HttpResponse(t.render(c))
def post_view(request):
"""A view that expects a POST, and returns a different template depending
on whether any POST data is available
"""
if request.method == 'POST':
if request.POST:
t = Template('Data received: {{ data }} is the value.', name='POST Template')
c = Context({'data': request.POST['value']})
else:
t = Template('Viewing POST page.', name='Empty POST Template')
c = Context()
else:
t = Template('Viewing GET page.', name='Empty GET Template')
c = Context()
return HttpResponse(t.render(c))
def view_with_header(request):
"A view that has a custom header"
response = HttpResponse()
response['X-DJANGO-TEST'] = 'Slartibartfast'
return response
def raw_post_view(request):
"""A view which expects raw XML to be posted and returns content extracted
from the XML"""
if request.method == 'POST':
root = parseString(request.raw_post_data)
first_book = root.firstChild.firstChild
title, author = [n.firstChild.nodeValue for n in first_book.childNodes]
t = Template("{{ title }} - {{ author }}", name="Book template")
c = Context({"title": title, "author": author})
else:
t = Template("GET request.", name="Book GET template")
c = Context()
return HttpResponse(t.render(c))
def redirect_view(request):
"A view that redirects all requests to the GET view"
if request.GET:
from urllib import urlencode
query = '?' + urlencode(request.GET, True)
else:
query = ''
return HttpResponseRedirect('/test_client/get_view/' + query)
def view_with_secure(request):
"A view that indicates if the request was secure"
response = HttpResponse()
response.test_was_secure_request = request.is_secure()
return response
def double_redirect_view(request):
"A view that redirects all requests to a redirection view"
return HttpResponseRedirect('/test_client/permanent_redirect_view/')
def bad_view(request):
"A view that returns a 404 with some error content"
return HttpResponseNotFound('Not found!. This page contains some MAGIC content')
TestChoices = (
('a', 'First Choice'),
('b', 'Second Choice'),
('c', 'Third Choice'),
('d', 'Fourth Choice'),
('e', 'Fifth Choice')
)
class TestForm(Form):
text = fields.CharField()
email = fields.EmailField()
value = fields.IntegerField()
single = fields.ChoiceField(choices=TestChoices)
multi = fields.MultipleChoiceField(choices=TestChoices)
def form_view(request):
"A view that tests a simple form"
if request.method == 'POST':
form = TestForm(request.POST)
if form.is_valid():
t = Template('Valid POST data.', name='Valid POST Template')
c = Context()
else:
t = Template('Invalid POST data. {{ form.errors }}', name='Invalid POST Template')
c = Context({'form': form})
else:
form = TestForm(request.GET)
t = Template('Viewing base form. {{ form }}.', name='Form GET Template')
c = Context({'form': form})
return HttpResponse(t.render(c))
def form_view_with_template(request):
"A view that tests a simple form"
if request.method == 'POST':
form = TestForm(request.POST)
if form.is_valid():
message = 'POST data OK'
else:
message = 'POST data has errors'
else:
form = TestForm()
message = 'GET form page'
return render_to_response('form_view.html',
{
'form': form,
'message': message
}
)
def login_protected_view(request):
"A simple view that is login protected."
t = Template('This is a login protected test. Username is {{ user.username }}.', name='Login Template')
c = Context({'user': request.user})
return HttpResponse(t.render(c))
login_protected_view = login_required(login_protected_view)
def login_protected_view_changed_redirect(request):
"A simple view that is login protected with a custom redirect field set"
t = Template('This is a login protected test. Username is {{ user.username }}.', name='Login Template')
c = Context({'user': request.user})
return HttpResponse(t.render(c))
login_protected_view_changed_redirect = login_required(redirect_field_name="redirect_to")(login_protected_view_changed_redirect)
def _permission_protected_view(request):
"A simple view that is permission protected."
t = Template('This is a permission protected test. '
'Username is {{ user.username }}. '
'Permissions are {{ user.get_all_permissions }}.' ,
name='Permissions Template')
c = Context({'user': request.user})
return HttpResponse(t.render(c))
permission_protected_view = permission_required('modeltests.test_perm')(_permission_protected_view)
permission_protected_view_exception = permission_required('modeltests.test_perm', raise_exception=True)(_permission_protected_view)
class _ViewManager(object):
@method_decorator(login_required)
def login_protected_view(self, request):
t = Template('This is a login protected test using a method. '
'Username is {{ user.username }}.',
name='Login Method Template')
c = Context({'user': request.user})
return HttpResponse(t.render(c))
@method_decorator(permission_required('modeltests.test_perm'))
def permission_protected_view(self, request):
t = Template('This is a permission protected test using a method. '
'Username is {{ user.username }}. '
'Permissions are {{ user.get_all_permissions }}.' ,
name='Permissions Template')
c = Context({'user': request.user})
return HttpResponse(t.render(c))
_view_manager = _ViewManager()
login_protected_method_view = _view_manager.login_protected_view
permission_protected_method_view = _view_manager.permission_protected_view
def session_view(request):
"A view that modifies the session"
request.session['tobacconist'] = 'hovercraft'
t = Template('This is a view that modifies the session.',
name='Session Modifying View Template')
c = Context()
return HttpResponse(t.render(c))
def broken_view(request):
"""A view which just raises an exception, simulating a broken view."""
raise KeyError("Oops! Looks like you wrote some bad code.")
def mail_sending_view(request):
mail.EmailMessage(
"Test message",
"This is a test email",
"[email protected]",
['[email protected]', '[email protected]']).send()
return HttpResponse("Mail sent")
def mass_mail_sending_view(request):
m1 = mail.EmailMessage(
'First Test message',
'This is the first test email',
'[email protected]',
['[email protected]', '[email protected]'])
m2 = mail.EmailMessage(
'Second Test message',
'This is the second test email',
'[email protected]',
['[email protected]', '[email protected]'])
c = mail.get_connection()
c.send_messages([m1,m2])
return HttpResponse("Mail sent")
| bsd-3-clause | 1,192,078,506,306,328,000 | 36.172093 | 131 | 0.647648 | false |
django-nonrel/django | docs/_ext/literals_to_xrefs.py | 92 | 4869 | """
Runs through a reST file looking for old-style literals, and helps replace them
with new-style references.
"""
import re
import sys
import shelve
refre = re.compile(r'``([^`\s]+?)``')
ROLES = (
'attr',
'class',
"djadmin",
'data',
'exc',
'file',
'func',
'lookup',
'meth',
'mod' ,
"djadminopt",
"ref",
"setting",
"term",
"tfilter",
"ttag",
# special
"skip"
)
ALWAYS_SKIP = [
"NULL",
"True",
"False",
]
def fixliterals(fname):
with open(fname) as fp:
data = fp.read()
last = 0
new = []
storage = shelve.open("/tmp/literals_to_xref.shelve")
lastvalues = storage.get("lastvalues", {})
for m in refre.finditer(data):
new.append(data[last:m.start()])
last = m.end()
line_start = data.rfind("\n", 0, m.start())
line_end = data.find("\n", m.end())
prev_start = data.rfind("\n", 0, line_start)
next_end = data.find("\n", line_end + 1)
# Skip always-skip stuff
if m.group(1) in ALWAYS_SKIP:
new.append(m.group(0))
continue
# skip when the next line is a title
next_line = data[m.end():next_end].strip()
if next_line[0] in "!-/:-@[-`{-~" and all(c == next_line[0] for c in next_line):
new.append(m.group(0))
continue
sys.stdout.write("\n"+"-"*80+"\n")
sys.stdout.write(data[prev_start+1:m.start()])
sys.stdout.write(colorize(m.group(0), fg="red"))
sys.stdout.write(data[m.end():next_end])
sys.stdout.write("\n\n")
replace_type = None
while replace_type is None:
replace_type = raw_input(
colorize("Replace role: ", fg="yellow")
).strip().lower()
if replace_type and replace_type not in ROLES:
replace_type = None
if replace_type == "":
new.append(m.group(0))
continue
if replace_type == "skip":
new.append(m.group(0))
ALWAYS_SKIP.append(m.group(1))
continue
default = lastvalues.get(m.group(1), m.group(1))
if default.endswith("()") and replace_type in ("class", "func", "meth"):
default = default[:-2]
replace_value = raw_input(
colorize("Text <target> [", fg="yellow") + default + colorize("]: ", fg="yellow")
).strip()
if not replace_value:
replace_value = default
new.append(":%s:`%s`" % (replace_type, replace_value))
lastvalues[m.group(1)] = replace_value
new.append(data[last:])
with open(fname, "w") as fp:
fp.write("".join(new))
storage["lastvalues"] = lastvalues
storage.close()
#
# The following is taken from django.utils.termcolors and is copied here to
# avoid the dependency.
#
def colorize(text='', opts=(), **kwargs):
"""
Returns your text, enclosed in ANSI graphics codes.
Depends on the keyword arguments 'fg' and 'bg', and the contents of
the opts tuple/list.
Returns the RESET code if no parameters are given.
Valid colors:
'black', 'red', 'green', 'yellow', 'blue', 'magenta', 'cyan', 'white'
Valid options:
'bold'
'underscore'
'blink'
'reverse'
'conceal'
'noreset' - string will not be auto-terminated with the RESET code
Examples:
colorize('hello', fg='red', bg='blue', opts=('blink',))
colorize()
colorize('goodbye', opts=('underscore',))
print(colorize('first line', fg='red', opts=('noreset',)))
print('this should be red too')
print(colorize('and so should this'))
print('this should not be red')
"""
color_names = ('black', 'red', 'green', 'yellow', 'blue', 'magenta', 'cyan', 'white')
foreground = dict([(color_names[x], '3%s' % x) for x in range(8)])
background = dict([(color_names[x], '4%s' % x) for x in range(8)])
RESET = '0'
opt_dict = {'bold': '1', 'underscore': '4', 'blink': '5', 'reverse': '7', 'conceal': '8'}
text = str(text)
code_list = []
if text == '' and len(opts) == 1 and opts[0] == 'reset':
return '\x1b[%sm' % RESET
for k, v in kwargs.iteritems():
if k == 'fg':
code_list.append(foreground[v])
elif k == 'bg':
code_list.append(background[v])
for o in opts:
if o in opt_dict:
code_list.append(opt_dict[o])
if 'noreset' not in opts:
text = text + '\x1b[%sm' % RESET
return ('\x1b[%sm' % ';'.join(code_list)) + text
if __name__ == '__main__':
try:
fixliterals(sys.argv[1])
except (KeyboardInterrupt, SystemExit):
print('')
| bsd-3-clause | 920,105,961,648,006,400 | 27.144509 | 93 | 0.522284 | false |
e-dorigatti/pyspider | pyspider/database/mysql/mysqlbase.py | 75 | 1880 | #!/usr/bin/env python
# -*- encoding: utf-8 -*-
# vim: set et sw=4 ts=4 sts=4 ff=unix fenc=utf8:
# Author: Binux<[email protected]>
# http://binux.me
# Created on 2014-11-05 10:42:24
import time
import mysql.connector
class MySQLMixin(object):
@property
def dbcur(self):
try:
if self.conn.unread_result:
self.conn.get_rows()
return self.conn.cursor()
except (mysql.connector.OperationalError, mysql.connector.InterfaceError):
self.conn.ping(reconnect=True)
self.conn.database = self.database_name
return self.conn.cursor()
class SplitTableMixin(object):
UPDATE_PROJECTS_TIME = 10 * 60
def _tablename(self, project):
if self.__tablename__:
return '%s_%s' % (self.__tablename__, project)
else:
return project
@property
def projects(self):
if time.time() - getattr(self, '_last_update_projects', 0) \
> self.UPDATE_PROJECTS_TIME:
self._list_project()
return self._projects
@projects.setter
def projects(self, value):
self._projects = value
def _list_project(self):
self._last_update_projects = time.time()
self.projects = set()
if self.__tablename__:
prefix = '%s_' % self.__tablename__
else:
prefix = ''
for project, in self._execute('show tables;'):
if project.startswith(prefix):
project = project[len(prefix):]
self.projects.add(project)
def drop(self, project):
if project not in self.projects:
self._list_project()
if project not in self.projects:
return
tablename = self._tablename(project)
self._execute("DROP TABLE %s" % self.escape(tablename))
self._list_project()
| apache-2.0 | -5,175,928,262,583,091,000 | 27.923077 | 82 | 0.569149 | false |
doismellburning/edx-platform | lms/djangoapps/lms_xblock/test/test_runtime.py | 92 | 6099 | """
Tests of the LMS XBlock Runtime and associated utilities
"""
from django.contrib.auth.models import User
from django.conf import settings
from ddt import ddt, data
from mock import Mock
from unittest import TestCase
from urlparse import urlparse
from opaque_keys.edx.locations import SlashSeparatedCourseKey
from lms.djangoapps.lms_xblock.runtime import quote_slashes, unquote_slashes, LmsModuleSystem
from xblock.fields import ScopeIds
TEST_STRINGS = [
'',
'foobar',
'foo/bar',
'foo/bar;',
'foo;;bar',
'foo;_bar',
'foo/',
'/bar',
'foo//bar',
'foo;;;bar',
]
@ddt
class TestQuoteSlashes(TestCase):
"""Test the quote_slashes and unquote_slashes functions"""
@data(*TEST_STRINGS)
def test_inverse(self, test_string):
self.assertEquals(test_string, unquote_slashes(quote_slashes(test_string)))
@data(*TEST_STRINGS)
def test_escaped(self, test_string):
self.assertNotIn('/', quote_slashes(test_string))
class TestHandlerUrl(TestCase):
"""Test the LMS handler_url"""
def setUp(self):
super(TestHandlerUrl, self).setUp()
self.block = Mock(name='block', scope_ids=ScopeIds(None, None, None, 'dummy'))
self.course_key = SlashSeparatedCourseKey("org", "course", "run")
self.runtime = LmsModuleSystem(
static_url='/static',
track_function=Mock(),
get_module=Mock(),
render_template=Mock(),
replace_urls=str,
course_id=self.course_key,
descriptor_runtime=Mock(),
)
def test_trailing_characters(self):
self.assertFalse(self.runtime.handler_url(self.block, 'handler').endswith('?'))
self.assertFalse(self.runtime.handler_url(self.block, 'handler').endswith('/'))
self.assertFalse(self.runtime.handler_url(self.block, 'handler', 'suffix').endswith('?'))
self.assertFalse(self.runtime.handler_url(self.block, 'handler', 'suffix').endswith('/'))
self.assertFalse(self.runtime.handler_url(self.block, 'handler', 'suffix', 'query').endswith('?'))
self.assertFalse(self.runtime.handler_url(self.block, 'handler', 'suffix', 'query').endswith('/'))
self.assertFalse(self.runtime.handler_url(self.block, 'handler', query='query').endswith('?'))
self.assertFalse(self.runtime.handler_url(self.block, 'handler', query='query').endswith('/'))
def _parsed_query(self, query_string):
"""Return the parsed query string from a handler_url generated with the supplied query_string"""
return urlparse(self.runtime.handler_url(self.block, 'handler', query=query_string)).query
def test_query_string(self):
self.assertIn('foo=bar', self._parsed_query('foo=bar'))
self.assertIn('foo=bar&baz=true', self._parsed_query('foo=bar&baz=true'))
self.assertIn('foo&bar&baz', self._parsed_query('foo&bar&baz'))
def _parsed_path(self, handler_name='handler', suffix=''):
"""Return the parsed path from a handler_url with the supplied handler_name and suffix"""
return urlparse(self.runtime.handler_url(self.block, handler_name, suffix=suffix)).path
def test_suffix(self):
self.assertTrue(self._parsed_path(suffix="foo").endswith('foo'))
self.assertTrue(self._parsed_path(suffix="foo/bar").endswith('foo/bar'))
self.assertTrue(self._parsed_path(suffix="/foo/bar").endswith('/foo/bar'))
def test_handler_name(self):
self.assertIn('handler1', self._parsed_path('handler1'))
self.assertIn('handler_a', self._parsed_path('handler_a'))
def test_thirdparty_fq(self):
"""Testing the Fully-Qualified URL returned by thirdparty=True"""
parsed_fq_url = urlparse(self.runtime.handler_url(self.block, 'handler', thirdparty=True))
self.assertEqual(parsed_fq_url.scheme, 'https')
self.assertEqual(parsed_fq_url.hostname, settings.SITE_NAME)
def test_not_thirdparty_rel(self):
"""Testing the Fully-Qualified URL returned by thirdparty=False"""
parsed_fq_url = urlparse(self.runtime.handler_url(self.block, 'handler', thirdparty=False))
self.assertEqual(parsed_fq_url.scheme, '')
self.assertIsNone(parsed_fq_url.hostname)
class TestUserServiceAPI(TestCase):
"""Test the user service interface"""
def setUp(self):
super(TestUserServiceAPI, self).setUp()
self.course_id = SlashSeparatedCourseKey("org", "course", "run")
self.user = User(username='runtime_robot', email='[email protected]', password='test', first_name='Robot')
self.user.save()
def mock_get_real_user(_anon_id):
"""Just returns the test user"""
return self.user
self.runtime = LmsModuleSystem(
static_url='/static',
track_function=Mock(),
get_module=Mock(),
render_template=Mock(),
replace_urls=str,
course_id=self.course_id,
get_real_user=mock_get_real_user,
descriptor_runtime=Mock(),
)
self.scope = 'course'
self.key = 'key1'
self.mock_block = Mock()
self.mock_block.service_declaration.return_value = 'needs'
def test_get_set_tag(self):
# test for when we haven't set the tag yet
tag = self.runtime.service(self.mock_block, 'user_tags').get_tag(self.scope, self.key)
self.assertIsNone(tag)
# set the tag
set_value = 'value'
self.runtime.service(self.mock_block, 'user_tags').set_tag(self.scope, self.key, set_value)
tag = self.runtime.service(self.mock_block, 'user_tags').get_tag(self.scope, self.key)
self.assertEqual(tag, set_value)
# Try to set tag in wrong scope
with self.assertRaises(ValueError):
self.runtime.service(self.mock_block, 'user_tags').set_tag('fake_scope', self.key, set_value)
# Try to get tag in wrong scope
with self.assertRaises(ValueError):
self.runtime.service(self.mock_block, 'user_tags').get_tag('fake_scope', self.key)
| agpl-3.0 | 951,441,535,099,486,800 | 38.348387 | 118 | 0.646663 | false |
sorenk/ansible | lib/ansible/modules/crypto/openssl_certificate.py | 15 | 36712 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2016-2017, Yanis Guenane <[email protected]>
# (c) 2017, Markus Teufelberger <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: openssl_certificate
author:
- Yanis Guenane (@Spredzy)
- Markus Teufelberger (@MarkusTeufelberger)
version_added: "2.4"
short_description: Generate and/or check OpenSSL certificates
description:
- "This module allows one to (re)generate OpenSSL certificates. It implements a notion
of provider (ie. C(selfsigned), C(acme), C(assertonly)) for your certificate.
The 'assertonly' provider is intended for use cases where one is only interested in
checking properties of a supplied certificate.
Many properties that can be specified in this module are for validation of an
existing or newly generated certificate. The proper place to specify them, if you
want to receive a certificate with these properties is a CSR (Certificate Signing Request).
It uses the pyOpenSSL python library to interact with OpenSSL."
requirements:
- python-pyOpenSSL >= 0.15 (if using C(selfsigned) or C(assertonly) provider)
- acme-tiny (if using the C(acme) provider)
options:
state:
default: "present"
choices: [ present, absent ]
description:
- Whether the certificate should exist or not, taking action if the state is different from what is stated.
path:
required: true
description:
- Remote absolute path where the generated certificate file should be created or is already located.
provider:
required: true
choices: [ 'selfsigned', 'assertonly', 'acme' ]
description:
- Name of the provider to use to generate/retrieve the OpenSSL certificate.
The C(assertonly) provider will not generate files and fail if the certificate file is missing.
force:
default: False
type: bool
description:
- Generate the certificate, even if it already exists.
csr_path:
description:
- Path to the Certificate Signing Request (CSR) used to generate this certificate. This is not required in C(assertonly) mode.
privatekey_path:
description:
- Path to the private key to use when signing the certificate.
privatekey_passphrase:
description:
- The passphrase for the I(privatekey_path).
selfsigned_version:
default: 3
description:
- Version of the C(selfsigned) certificate. Nowadays it should almost always be C(3).
version_added: "2.5"
selfsigned_digest:
default: "sha256"
description:
- Digest algorithm to be used when self-signing the certificate
selfsigned_not_before:
description:
- The timestamp at which the certificate starts being valid. The timestamp is formatted as an ASN.1 TIME.
If this value is not specified, certificate will start being valid from now.
aliases: [ selfsigned_notBefore ]
selfsigned_not_after:
description:
- The timestamp at which the certificate stops being valid. The timestamp is formatted as an ASN.1 TIME.
If this value is not specified, certificate will stop being valid 10 years from now.
aliases: [ selfsigned_notAfter ]
acme_accountkey_path:
description:
- Path to the accountkey for the C(acme) provider
acme_challenge_path:
description:
- Path to the ACME challenge directory that is served on U(http://<HOST>:80/.well-known/acme-challenge/)
acme_chain:
default: True
description:
- Include the intermediate certificate to the generated certificate
version_added: "2.5"
signature_algorithms:
description:
- list of algorithms that you would accept the certificate to be signed with
(e.g. ['sha256WithRSAEncryption', 'sha512WithRSAEncryption']).
issuer:
description:
- Key/value pairs that must be present in the issuer name field of the certificate.
If you need to specify more than one value with the same key, use a list as value.
issuer_strict:
default: False
type: bool
description:
- If set to True, the I(issuer) field must contain only these values.
version_added: "2.5"
subject:
description:
- Key/value pairs that must be present in the subject name field of the certificate.
If you need to specify more than one value with the same key, use a list as value.
subject_strict:
default: False
type: bool
description:
- If set to True, the I(subject) field must contain only these values.
version_added: "2.5"
has_expired:
default: False
type: bool
description:
- Checks if the certificate is expired/not expired at the time the module is executed.
version:
description:
- Version of the certificate. Nowadays it should almost always be 3.
valid_at:
description:
- The certificate must be valid at this point in time. The timestamp is formatted as an ASN.1 TIME.
invalid_at:
description:
- The certificate must be invalid at this point in time. The timestamp is formatted as an ASN.1 TIME.
not_before:
description:
- The certificate must start to become valid at this point in time. The timestamp is formatted as an ASN.1 TIME.
aliases: [ notBefore ]
not_after:
description:
- The certificate must expire at this point in time. The timestamp is formatted as an ASN.1 TIME.
aliases: [ notAfter ]
valid_in:
description:
- The certificate must still be valid in I(valid_in) seconds from now.
key_usage:
description:
- The I(key_usage) extension field must contain all these values.
aliases: [ keyUsage ]
key_usage_strict:
default: False
type: bool
description:
- If set to True, the I(key_usage) extension field must contain only these values.
aliases: [ keyUsage_strict ]
extended_key_usage:
description:
- The I(extended_key_usage) extension field must contain all these values.
aliases: [ extendedKeyUsage ]
extended_key_usage_strict:
default: False
type: bool
description:
- If set to True, the I(extended_key_usage) extension field must contain only these values.
aliases: [ extendedKeyUsage_strict ]
subject_alt_name:
description:
- The I(subject_alt_name) extension field must contain these values.
aliases: [ subjectAltName ]
subject_alt_name_strict:
default: False
type: bool
description:
- If set to True, the I(subject_alt_name) extension field must contain only these values.
aliases: [ subjectAltName_strict ]
extends_documentation_fragment: files
notes:
- All ASN.1 TIME values should be specified following the YYYYMMDDHHMMSSZ pattern.
Date specified should be UTC. Minutes and seconds are mandatory.
'''
EXAMPLES = '''
- name: Generate a Self Signed OpenSSL certificate
openssl_certificate:
path: /etc/ssl/crt/ansible.com.crt
privatekey_path: /etc/ssl/private/ansible.com.pem
csr_path: /etc/ssl/csr/ansible.com.csr
provider: selfsigned
- name: Generate a Let's Encrypt Certificate
openssl_certificate:
path: /etc/ssl/crt/ansible.com.crt
csr_path: /etc/ssl/csr/ansible.com.csr
provider: acme
acme_accountkey_path: /etc/ssl/private/ansible.com.pem
acme_challenge_path: /etc/ssl/challenges/ansible.com/
- name: Force (re-)generate a new Let's Encrypt Certificate
openssl_certificate:
path: /etc/ssl/crt/ansible.com.crt
csr_path: /etc/ssl/csr/ansible.com.csr
provider: acme
acme_accountkey_path: /etc/ssl/private/ansible.com.pem
acme_challenge_path: /etc/ssl/challenges/ansible.com/
force: True
# Examples for some checks one could use the assertonly provider for:
# How to use the assertonly provider to implement and trigger your own custom certificate generation workflow:
- name: Check if a certificate is currently still valid, ignoring failures
openssl_certificate:
path: /etc/ssl/crt/example.com.crt
provider: assertonly
has_expired: False
ignore_errors: True
register: validity_check
- name: Run custom task(s) to get a new, valid certificate in case the initial check failed
command: superspecialSSL recreate /etc/ssl/crt/example.com.crt
when: validity_check.failed
- name: Check the new certificate again for validity with the same parameters, this time failing the play if it is still invalid
openssl_certificate:
path: /etc/ssl/crt/example.com.crt
provider: assertonly
has_expired: False
when: validity_check.failed
# Some other checks that assertonly could be used for:
- name: Verify that an existing certificate was issued by the Let's Encrypt CA and is currently still valid
openssl_certificate:
path: /etc/ssl/crt/example.com.crt
provider: assertonly
issuer:
O: Let's Encrypt
has_expired: False
- name: Ensure that a certificate uses a modern signature algorithm (no SHA1, MD5 or DSA)
openssl_certificate:
path: /etc/ssl/crt/example.com.crt
provider: assertonly
signature_algorithms:
- sha224WithRSAEncryption
- sha256WithRSAEncryption
- sha384WithRSAEncryption
- sha512WithRSAEncryption
- sha224WithECDSAEncryption
- sha256WithECDSAEncryption
- sha384WithECDSAEncryption
- sha512WithECDSAEncryption
- name: Ensure that the existing certificate belongs to the specified private key
openssl_certificate:
path: /etc/ssl/crt/example.com.crt
privatekey_path: /etc/ssl/private/example.com.pem
provider: assertonly
- name: Ensure that the existing certificate is still valid at the winter solstice 2017
openssl_certificate:
path: /etc/ssl/crt/example.com.crt
provider: assertonly
valid_at: 20171221162800Z
- name: Ensure that the existing certificate is still valid 2 weeks (1209600 seconds) from now
openssl_certificate:
path: /etc/ssl/crt/example.com.crt
provider: assertonly
valid_in: 1209600
- name: Ensure that the existing certificate is only used for digital signatures and encrypting other keys
openssl_certificate:
path: /etc/ssl/crt/example.com.crt
provider: assertonly
key_usage:
- digitalSignature
- keyEncipherment
key_usage_strict: true
- name: Ensure that the existing certificate can be used for client authentication
openssl_certificate:
path: /etc/ssl/crt/example.com.crt
provider: assertonly
extended_key_usage:
- clientAuth
- name: Ensure that the existing certificate can only be used for client authentication and time stamping
openssl_certificate:
path: /etc/ssl/crt/example.com.crt
provider: assertonly
extended_key_usage:
- clientAuth
- 1.3.6.1.5.5.7.3.8
extended_key_usage_strict: true
- name: Ensure that the existing certificate has a certain domain in its subjectAltName
openssl_certificate:
path: /etc/ssl/crt/example.com.crt
provider: assertonly
subject_alt_name:
- www.example.com
- test.example.com
'''
RETURN = '''
filename:
description: Path to the generated Certificate
returned: changed or success
type: string
sample: /etc/ssl/crt/www.ansible.com.crt
'''
from random import randint
import datetime
import os
from ansible.module_utils import crypto as crypto_utils
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_native, to_bytes
try:
import OpenSSL
from OpenSSL import crypto
except ImportError:
pyopenssl_found = False
else:
pyopenssl_found = True
class CertificateError(crypto_utils.OpenSSLObjectError):
pass
class Certificate(crypto_utils.OpenSSLObject):
def __init__(self, module):
super(Certificate, self).__init__(
module.params['path'],
module.params['state'],
module.params['force'],
module.check_mode
)
self.provider = module.params['provider']
self.privatekey_path = module.params['privatekey_path']
self.privatekey_passphrase = module.params['privatekey_passphrase']
self.csr_path = module.params['csr_path']
self.cert = None
self.privatekey = None
self.module = module
def check(self, module, perms_required=True):
"""Ensure the resource is in its desired state."""
state_and_perms = super(Certificate, self).check(module, perms_required)
def _validate_privatekey():
if self.privatekey_path:
ctx = OpenSSL.SSL.Context(OpenSSL.SSL.TLSv1_2_METHOD)
ctx.use_privatekey(self.privatekey)
ctx.use_certificate(self.cert)
try:
ctx.check_privatekey()
return True
except OpenSSL.SSL.Error:
return False
if not state_and_perms:
return False
self.cert = crypto_utils.load_certificate(self.path)
if self.privatekey_path:
self.privatekey = crypto_utils.load_privatekey(
self.privatekey_path,
self.privatekey_passphrase
)
return _validate_privatekey()
return True
class SelfSignedCertificate(Certificate):
"""Generate the self-signed certificate."""
def __init__(self, module):
super(SelfSignedCertificate, self).__init__(module)
self.notBefore = module.params['selfsigned_notBefore']
self.notAfter = module.params['selfsigned_notAfter']
self.digest = module.params['selfsigned_digest']
self.version = module.params['selfsigned_version']
self.serial_number = randint(1000, 99999)
self.csr = crypto_utils.load_certificate_request(self.csr_path)
self.privatekey = crypto_utils.load_privatekey(
self.privatekey_path, self.privatekey_passphrase
)
def generate(self, module):
if not os.path.exists(self.privatekey_path):
raise CertificateError(
'The private key %s does not exist' % self.privatekey_path
)
if not os.path.exists(self.csr_path):
raise CertificateError(
'The certificate signing request file %s does not exist' % self.csr_path
)
if not self.check(module, perms_required=False) or self.force:
cert = crypto.X509()
cert.set_serial_number(self.serial_number)
if self.notBefore:
cert.set_notBefore(self.notBefore)
else:
cert.gmtime_adj_notBefore(0)
if self.notAfter:
cert.set_notAfter(self.notAfter)
else:
# If no NotAfter specified, expire in
# 10 years. 315360000 is 10 years in seconds.
cert.gmtime_adj_notAfter(315360000)
cert.set_subject(self.csr.get_subject())
cert.set_issuer(self.csr.get_subject())
cert.set_version(self.version - 1)
cert.set_pubkey(self.csr.get_pubkey())
cert.add_extensions(self.csr.get_extensions())
cert.sign(self.privatekey, self.digest)
self.cert = cert
try:
with open(self.path, 'wb') as cert_file:
cert_file.write(crypto.dump_certificate(crypto.FILETYPE_PEM, self.cert))
except EnvironmentError as exc:
raise CertificateError(exc)
self.changed = True
file_args = module.load_file_common_arguments(module.params)
if module.set_fs_attributes_if_different(file_args, False):
self.changed = True
def dump(self, check_mode=False):
result = {
'changed': self.changed,
'filename': self.path,
'privatekey': self.privatekey_path,
'csr': self.csr_path
}
if check_mode:
now = datetime.datetime.utcnow()
ten = now.replace(now.year + 10)
result.update({
'notBefore': self.notBefore if self.notBefore else now.strftime("%Y%m%d%H%M%SZ"),
'notAfter': self.notAfter if self.notAfter else ten.strftime("%Y%m%d%H%M%SZ"),
'serial_number': self.serial_number,
})
else:
result.update({
'notBefore': self.cert.get_notBefore(),
'notAfter': self.cert.get_notAfter(),
'serial_number': self.cert.get_serial_number(),
})
return result
class AssertOnlyCertificate(Certificate):
"""validate the supplied certificate."""
def __init__(self, module):
super(AssertOnlyCertificate, self).__init__(module)
self.signature_algorithms = module.params['signature_algorithms']
if module.params['subject']:
self.subject = crypto_utils.parse_name_field(module.params['subject'])
else:
self.subject = []
self.subject_strict = module.params['subject_strict']
if module.params['issuer']:
self.issuer = crypto_utils.parse_name_field(module.params['issuer'])
else:
self.issuer = []
self.issuer_strict = module.params['issuer_strict']
self.has_expired = module.params['has_expired']
self.version = module.params['version']
self.keyUsage = module.params['keyUsage']
self.keyUsage_strict = module.params['keyUsage_strict']
self.extendedKeyUsage = module.params['extendedKeyUsage']
self.extendedKeyUsage_strict = module.params['extendedKeyUsage_strict']
self.subjectAltName = module.params['subjectAltName']
self.subjectAltName_strict = module.params['subjectAltName_strict']
self.notBefore = module.params['notBefore']
self.notAfter = module.params['notAfter']
self.valid_at = module.params['valid_at']
self.invalid_at = module.params['invalid_at']
self.valid_in = module.params['valid_in']
self.message = []
self._sanitize_inputs()
def _sanitize_inputs(self):
"""Ensure inputs are properly sanitized before comparison."""
for param in ['signature_algorithms', 'keyUsage', 'extendedKeyUsage',
'subjectAltName', 'subject', 'issuer', 'notBefore',
'notAfter', 'valid_at', 'invalid_at']:
attr = getattr(self, param)
if isinstance(attr, list) and attr:
if isinstance(attr[0], str):
setattr(self, param, [to_bytes(item) for item in attr])
elif isinstance(attr[0], tuple):
setattr(self, param, [(to_bytes(item[0]), to_bytes(item[1])) for item in attr])
elif isinstance(attr, tuple):
setattr(self, param, dict((to_bytes(k), to_bytes(v)) for (k, v) in attr.items()))
elif isinstance(attr, dict):
setattr(self, param, dict((to_bytes(k), to_bytes(v)) for (k, v) in attr.items()))
elif isinstance(attr, str):
setattr(self, param, to_bytes(attr))
def assertonly(self):
self.cert = crypto_utils.load_certificate(self.path)
def _validate_signature_algorithms():
if self.signature_algorithms:
if self.cert.get_signature_algorithm() not in self.signature_algorithms:
self.message.append(
'Invalid signature algorithm (got %s, expected one of %s)' % (self.cert.get_signature_algorithm(), self.signature_algorithms)
)
def _validate_subject():
if self.subject:
expected_subject = [(OpenSSL._util.lib.OBJ_txt2nid(sub[0]), sub[1]) for sub in self.subject]
cert_subject = self.cert.get_subject().get_components()
current_subject = [(OpenSSL._util.lib.OBJ_txt2nid(sub[0]), sub[1]) for sub in cert_subject]
if (not self.subject_strict and not all(x in current_subject for x in expected_subject)) or \
(self.subject_strict and not set(expected_subject) == set(current_subject)):
self.message.append(
'Invalid subject component (got %s, expected all of %s to be present)' % (cert_subject, self.subject)
)
def _validate_issuer():
if self.issuer:
expected_issuer = [(OpenSSL._util.lib.OBJ_txt2nid(iss[0]), iss[1]) for iss in self.issuer]
cert_issuer = self.cert.get_issuer().get_components()
current_issuer = [(OpenSSL._util.lib.OBJ_txt2nid(iss[0]), iss[1]) for iss in cert_issuer]
if (not self.issuer_strict and not all(x in current_issuer for x in expected_issuer)) or \
(self.issuer_strict and not set(expected_issuer) == set(current_issuer)):
self.message.append(
'Invalid issuer component (got %s, expected all of %s to be present)' % (cert_issuer, self.issuer)
)
def _validate_has_expired():
if self.has_expired:
if self.has_expired != self.cert.has_expired():
self.message.append(
'Certificate expiration check failed (certificate expiration is %s, expected %s)' % (self.cert.has_expired(), self.has_expired)
)
def _validate_version():
if self.version:
# Version numbers in certs are off by one:
# v1: 0, v2: 1, v3: 2 ...
if self.version != self.cert.get_version() + 1:
self.message.append(
'Invalid certificate version number (got %s, expected %s)' % (self.cert.get_version() + 1, self.version)
)
def _validate_keyUsage():
if self.keyUsage:
for extension_idx in range(0, self.cert.get_extension_count()):
extension = self.cert.get_extension(extension_idx)
if extension.get_short_name() == b'keyUsage':
keyUsage = [OpenSSL._util.lib.OBJ_txt2nid(keyUsage) for keyUsage in self.keyUsage]
current_ku = [OpenSSL._util.lib.OBJ_txt2nid(usage.strip()) for usage in
to_bytes(extension, errors='surrogate_or_strict').split(b',')]
if (not self.keyUsage_strict and not all(x in current_ku for x in keyUsage)) or \
(self.keyUsage_strict and not set(keyUsage) == set(current_ku)):
self.message.append(
'Invalid keyUsage component (got %s, expected all of %s to be present)' % (str(extension).split(', '), self.keyUsage)
)
def _validate_extendedKeyUsage():
if self.extendedKeyUsage:
for extension_idx in range(0, self.cert.get_extension_count()):
extension = self.cert.get_extension(extension_idx)
if extension.get_short_name() == b'extendedKeyUsage':
extKeyUsage = [OpenSSL._util.lib.OBJ_txt2nid(keyUsage) for keyUsage in self.extendedKeyUsage]
current_xku = [OpenSSL._util.lib.OBJ_txt2nid(usage.strip()) for usage in
to_bytes(extension, errors='surrogate_or_strict').split(b',')]
if (not self.extendedKeyUsage_strict and not all(x in current_xku for x in extKeyUsage)) or \
(self.extendedKeyUsage_strict and not set(extKeyUsage) == set(current_xku)):
self.message.append(
'Invalid extendedKeyUsage component (got %s, expected all of %s to be present)' % (str(extension).split(', '),
self.extendedKeyUsage)
)
def _validate_subjectAltName():
if self.subjectAltName:
for extension_idx in range(0, self.cert.get_extension_count()):
extension = self.cert.get_extension(extension_idx)
if extension.get_short_name() == b'subjectAltName':
l_altnames = [altname.replace(b'IP Address', b'IP') for altname in
to_bytes(extension, errors='surrogate_or_strict').split(b', ')]
if (not self.subjectAltName_strict and not all(x in l_altnames for x in self.subjectAltName)) or \
(self.subjectAltName_strict and not set(self.subjectAltName) == set(l_altnames)):
self.message.append(
'Invalid subjectAltName component (got %s, expected all of %s to be present)' % (l_altnames, self.subjectAltName)
)
def _validate_notBefore():
if self.notBefore:
if self.cert.get_notBefore() != self.notBefore:
self.message.append(
'Invalid notBefore component (got %s, expected %s to be present)' % (self.cert.get_notBefore(), self.notBefore)
)
def _validate_notAfter():
if self.notAfter:
if self.cert.get_notAfter() != self.notAfter:
self.message.append(
'Invalid notAfter component (got %s, expected %s to be present)' % (self.cert.get_notAfter(), self.notAfter)
)
def _validate_valid_at():
if self.valid_at:
if not (self.valid_at >= self.cert.get_notBefore() and self.valid_at <= self.cert.get_notAfter()):
self.message.append(
'Certificate is not valid for the specified date (%s) - notBefore: %s - notAfter: %s' % (self.valid_at,
self.cert.get_notBefore(),
self.cert.get_notAfter())
)
def _validate_invalid_at():
if self.invalid_at:
if not (self.invalid_at <= self.cert.get_notBefore() or self.invalid_at >= self.cert.get_notAfter()):
self.message.append(
'Certificate is not invalid for the specified date (%s) - notBefore: %s - notAfter: %s' % (self.invalid_at,
self.cert.get_notBefore(),
self.cert.get_notAfter())
)
def _validate_valid_in():
if self.valid_in:
valid_in_date = datetime.datetime.utcnow() + datetime.timedelta(seconds=self.valid_in)
valid_in_date = valid_in_date.strftime('%Y%m%d%H%M%SZ')
if not (valid_in_date >= self.cert.get_notBefore() and valid_in_date <= self.cert.get_notAfter()):
self.message.append(
'Certificate is not valid in %s seconds from now (%s) - notBefore: %s - notAfter: %s' % (self.valid_in,
valid_in_date,
self.cert.get_notBefore(),
self.cert.get_notAfter())
)
for validation in ['signature_algorithms', 'subject', 'issuer',
'has_expired', 'version', 'keyUsage',
'extendedKeyUsage', 'subjectAltName',
'notBefore', 'notAfter', 'valid_at',
'invalid_at', 'valid_in']:
f_name = locals()['_validate_%s' % validation]
f_name()
def generate(self, module):
"""Don't generate anything - assertonly"""
self.assertonly()
if self.privatekey_path and \
not super(AssertOnlyCertificate, self).check(module, perms_required=False):
self.message.append(
'Certificate %s and private key %s does not match' % (self.path, self.privatekey_path)
)
if len(self.message):
module.fail_json(msg=' | '.join(self.message))
def check(self, module, perms_required=True):
"""Ensure the resource is in its desired state."""
parent_check = super(AssertOnlyCertificate, self).check(module, perms_required)
self.assertonly()
assertonly_check = not len(self.message)
self.message = []
return parent_check and assertonly_check
def dump(self, check_mode=False):
result = {
'changed': self.changed,
'filename': self.path,
'privatekey': self.privatekey_path,
'csr': self.csr_path,
}
return result
class AcmeCertificate(Certificate):
"""Retrieve a certificate using the ACME protocol."""
def __init__(self, module):
super(AcmeCertificate, self).__init__(module)
self.accountkey_path = module.params['acme_accountkey_path']
self.challenge_path = module.params['acme_challenge_path']
self.use_chain = module.params['acme_chain']
def generate(self, module):
if not os.path.exists(self.privatekey_path):
raise CertificateError(
'The private key %s does not exist' % self.privatekey_path
)
if not os.path.exists(self.csr_path):
raise CertificateError(
'The certificate signing request file %s does not exist' % self.csr_path
)
if not os.path.exists(self.accountkey_path):
raise CertificateError(
'The account key %s does not exist' % self.accountkey_path
)
if not os.path.exists(self.challenge_path):
raise CertificateError(
'The challenge path %s does not exist' % self.challenge_path
)
if not self.check(module, perms_required=False) or self.force:
acme_tiny_path = self.module.get_bin_path('acme-tiny', required=True)
chain = ''
if self.use_chain:
chain = '--chain'
try:
crt = module.run_command("%s %s --account-key %s --csr %s "
"--acme-dir %s" % (acme_tiny_path, chain,
self.accountkey_path,
self.csr_path,
self.challenge_path),
check_rc=True)[1]
with open(self.path, 'wb') as certfile:
certfile.write(to_bytes(crt))
except OSError as exc:
raise CertificateError(exc)
file_args = module.load_file_common_arguments(module.params)
if module.set_fs_attributes_if_different(file_args, False):
self.changed = True
def dump(self, check_mode=False):
result = {
'changed': self.changed,
'filename': self.path,
'privatekey': self.privatekey_path,
'accountkey': self.accountkey_path,
'csr': self.csr_path,
}
return result
def main():
module = AnsibleModule(
argument_spec=dict(
state=dict(type='str', choices=['present', 'absent'], default='present'),
path=dict(type='path', required=True),
provider=dict(type='str', choices=['selfsigned', 'assertonly', 'acme']),
force=dict(type='bool', default=False,),
csr_path=dict(type='path'),
# General properties of a certificate
privatekey_path=dict(type='path'),
privatekey_passphrase=dict(type='str', no_log=True),
signature_algorithms=dict(type='list'),
subject=dict(type='dict'),
subject_strict=dict(type='bool', default=False),
issuer=dict(type='dict'),
issuer_strict=dict(type='bool', default=False),
has_expired=dict(type='bool', default=False),
version=dict(type='int'),
keyUsage=dict(type='list', aliases=['key_usage']),
keyUsage_strict=dict(type='bool', default=False, aliases=['key_usage_strict']),
extendedKeyUsage=dict(type='list', aliases=['extended_key_usage'], ),
extendedKeyUsage_strict=dict(type='bool', default=False, aliases=['extended_key_usage_strict']),
subjectAltName=dict(type='list', aliases=['subject_alt_name']),
subjectAltName_strict=dict(type='bool', default=False, aliases=['subject_alt_name_strict']),
notBefore=dict(type='str', aliases=['not_before']),
notAfter=dict(type='str', aliases=['not_after']),
valid_at=dict(type='str'),
invalid_at=dict(type='str'),
valid_in=dict(type='int'),
# provider: selfsigned
selfsigned_version=dict(type='int', default='3'),
selfsigned_digest=dict(type='str', default='sha256'),
selfsigned_notBefore=dict(type='str', aliases=['selfsigned_not_before']),
selfsigned_notAfter=dict(type='str', aliases=['selfsigned_not_after']),
# provider: acme
acme_accountkey_path=dict(type='path'),
acme_challenge_path=dict(type='path'),
acme_chain=dict(type='bool', default=True),
),
supports_check_mode=True,
add_file_common_args=True,
)
if not pyopenssl_found:
module.fail_json(msg='The python pyOpenSSL library is required')
if module.params['provider'] in ['selfsigned', 'assertonly']:
try:
getattr(crypto.X509Req, 'get_extensions')
except AttributeError:
module.fail_json(msg='You need to have PyOpenSSL>=0.15')
base_dir = os.path.dirname(module.params['path'])
if not os.path.isdir(base_dir):
module.fail_json(
name=base_dir,
msg='The directory %s does not exist or the file is not a directory' % base_dir
)
provider = module.params['provider']
if provider == 'selfsigned':
certificate = SelfSignedCertificate(module)
elif provider == 'acme':
certificate = AcmeCertificate(module)
else:
certificate = AssertOnlyCertificate(module)
if module.params['state'] == 'present':
if module.check_mode:
result = certificate.dump(check_mode=True)
result['changed'] = module.params['force'] or not certificate.check(module)
module.exit_json(**result)
try:
certificate.generate(module)
except CertificateError as exc:
module.fail_json(msg=to_native(exc))
else:
if module.check_mode:
result = certificate.dump(check_mode=True)
result['changed'] = os.path.exists(module.params['path'])
module.exit_json(**result)
try:
certificate.remove()
except CertificateError as exc:
module.fail_json(msg=to_native(exc))
result = certificate.dump()
module.exit_json(**result)
if __name__ == "__main__":
main()
| gpl-3.0 | 2,915,841,326,876,497,000 | 39.745838 | 151 | 0.585749 | false |
tomka/rednotebook | rednotebook/gui/options.py | 1 | 13404 | # -*- coding: utf-8 -*-
# -----------------------------------------------------------------------
# Copyright (c) 2009 Jendrik Seipp
#
# RedNotebook is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# RedNotebook is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with RedNotebook; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
# -----------------------------------------------------------------------
import os
import sys
import logging
import platform
import gtk
import gobject
from rednotebook.gui.customwidgets import UrlButton, CustomComboBoxEntry
from rednotebook.gui.customwidgets import ActionButton
from rednotebook.gui import browser
from rednotebook.util import filesystem, utils
from rednotebook import info
class Option(gtk.HBox):
def __init__(self, text, option_name, tooltip=''):
gtk.HBox.__init__(self)
self.text = text
self.option_name = option_name
self.set_spacing(5)
self.label = gtk.Label(self.text)
self.pack_start(self.label, False, False)
if tooltip:
self.set_tooltip_text(tooltip)
def get_value(self):
raise NotImplementedError
def get_string_value(self):
return str(self.get_value()).strip()
class TickOption(Option):
def __init__(self, text, name, default_value=None, tooltip=''):
Option.__init__(self, '', name, tooltip=tooltip)
self.check_button = gtk.CheckButton(text)
if default_value is None:
self.check_button.set_active(Option.config.read(name, 0) == 1)
else:
self.check_button.set_active(default_value)
self.pack_start(self.check_button, False)
def get_value(self):
return self.check_button.get_active()
def get_string_value(self):
'''
We use 0 and 1 internally for bool options
'''
return int(self.get_value())
class AutostartOption(TickOption):
def __init__(self):
home_dir = os.path.expanduser('~')
autostart_dir = os.path.join(home_dir, '.config/autostart/')
self.autostart_file = os.path.join(autostart_dir, 'rednotebook.desktop')
autostart_file_exists = os.path.exists(self.autostart_file)
TickOption.__init__(self, _('Load RedNotebook at startup'), None, \
default_value=autostart_file_exists)
def get_value(self):
return self.check_button.get_active()
def set(self):
'''Apply the current setting'''
selected = self.get_value()
if selected:
# Add autostart file if it is not present
filesystem.make_file_with_dir(self.autostart_file, info.desktop_file)
else:
# Remove autostart file
if os.path.exists(self.autostart_file):
os.remove(self.autostart_file)
#class TextOption(Option):
# def __init__(self, text, name):
# self.entry = gtk.Entry(30)
# self.entry.set_text(Option.config.read(name, ''))
#
# Option.__init__(self, text, name, self.entry)
#
# def get_value(self):
# return self.entry.get_text()
class CsvTextOption(Option):
def __init__(self, text, option_name, **kwargs):
Option.__init__(self, text, option_name, **kwargs)
# directly read the string, not the list
values_string = Option.config.read(option_name, '')
# Ensure that we have a string here
values_string = str(values_string)
self.entry = gtk.Entry()
self.entry.set_text(values_string)
self.pack_start(self.entry, True)
def get_value(self):
return self.entry.get_text()
#class TextAndButtonOption(TextOption):
# def __init__(self, text, name, button):
# TextOption.__init__(self, text, name)
# self.widget.pack_end(button, False, False)
class ComboBoxOption(Option):
def __init__(self, text, name, entries):
Option.__init__(self, text, name)
self.combo = CustomComboBoxEntry(gtk.ComboBoxEntry())
self.combo.set_entries(entries)
self.pack_start(self.combo.combo_box, False)
def get_value(self):
return self.combo.get_active_text()
class DateFormatOption(ComboBoxOption):
def __init__(self, text, name):
date_formats = ['%A, %x %X', _('%A, %x, Day %j'), '%H:%M', _('Week %W of Year %Y'), \
'%y-%m-%d', _('Day %j'), '%A', '%B']
ComboBoxOption.__init__(self, text, name, date_formats)
date_url = 'http://docs.python.org/library/time.html#time.strftime'
date_format_help_button = UrlButton(_('Help'), date_url)
self.preview = gtk.Label()
self.pack_start(self.preview, False)
self.pack_end(date_format_help_button, False)
# Set default format if not present
format = Option.config.read(name, '%A, %x %X')
self.combo.set_active_text(str(format))
self.combo.connect('changed', self.on_format_changed)
# Update the preview
self.on_format_changed(None)
def on_format_changed(self, widget):
import time
### Translators: Noun
self.preview.set_text(_('Preview:') + ' ' + time.strftime(self.combo.get_active_text()))
class FontSizeOption(ComboBoxOption):
def __init__(self, text, name):
sizes = range(6, 15) + range(16, 29, 2) + [32, 36, 40, 48, 56, 64, 72]
sizes = ['default'] + map(str, sizes)
ComboBoxOption.__init__(self, text, name, sizes)
# Set default size if not present
size = Option.config.read(name, -1)
if size == -1:
self.combo.set_active_text('default')
else:
self.combo.set_active_text(str(size))
self.combo.set_editable(False)
self.combo.combo_box.set_wrap_width(3)
self.combo.connect('changed', self.on_combo_changed)
def on_combo_changed(self, widget):
'''Live update'''
size = self.get_string_value()
Option.main_window.set_font_size(size)
def get_string_value(self):
'''We use 0 and 1 internally for size options'''
size = self.combo.get_active_text()
if size == 'default':
return -1
try:
return int(size)
except ValueError:
return -1
#class SpinOption(LabelAndWidgetOption):
# def __init__(self, text, name):
#
# adj = gtk.Adjustment(10.0, 6.0, 72.0, 1.0, 10.0, 0.0)
# self.spin = gtk.SpinButton(adj)#, climb_rate=1.0)
# self.spin.set_numeric(True)
# self.spin.set_range(6,72)
# self.spin.set_sensitive(True)
# value = Option.config.read(name, -1)
# if value >= 0:
# self.spin.set_value(value)
#
# LabelAndWidgetOption.__init__(self, text, name, self.spin)
#
# def get_value(self):
# print type(self.spin.get_value())
# return self.spin.get_value()
#
# def get_string_value(self):
# value = int(self.get_value())
# return value
class OptionsDialog(object):
def __init__(self, dialog):
self.dialog = dialog
self.categories = {}
def __getattr__(self, attr):
'''Wrap the dialog'''
return getattr(self.dialog, attr)
def add_option(self, category, option):
self.categories[category].pack_start(option, False)
option.show_all()
def add_category(self, name, vbox):
self.categories[name] = vbox
def clear(self):
for category, vbox in self.categories.items():
for option in vbox.get_children():
vbox.remove(option)
class OptionsManager(object):
def __init__(self, main_window):
self.main_window = main_window
self.builder = main_window.builder
self.journal = main_window.journal
self.config = self.journal.config
self.dialog = OptionsDialog(self.builder.get_object('options_dialog'))
self.dialog.set_default_size(600, 300)
self.dialog.add_category('general', self.builder.get_object('general_vbox'))
def on_options_dialog(self):
self.dialog.clear()
# Make the config globally available
Option.config = self.config
Option.main_window = self.main_window
self.options = []
if platform.system() == 'Linux' and os.path.exists('/usr/bin/rednotebook'):
logging.debug('Running on Linux. Is installed. Adding autostart option')
self.options.insert(0, AutostartOption())
self.options.append(TickOption(_('Close to system tray'), 'closeToTray',
tooltip=_('Closing the window will send RedNotebook to the tray')))
able_to_spell_check = self.main_window.day_text_field.can_spell_check()
tooltip = _('Underline misspelled words') if able_to_spell_check else \
_('Requires gtkspell.') + ' ' + \
_('This is included in the python-gtkspell or python-gnome2-extras package')
spell_check_option = TickOption(_('Check Spelling'), 'spellcheck',
tooltip=tooltip)
if not sys.platform == 'win32':
self.options.append(spell_check_option)
spell_check_option.set_sensitive(able_to_spell_check)
#webkit_available = bool(browser.webkit)
#tooltip = _('Webkit offers a nicer preview') if webkit_available else \
# _('Requires pywebkitgtk (python-webkit)')
#webkit_label = _('Use webkit for previews') + ' ' + _('(Restart required)')
#webkit_option = TickOption(webkit_label, 'useWebkit',
# tooltip=tooltip)
#if not sys.platform == 'win32':
#self.options.append(webkit_option)
#webkit_option.set_sensitive(webkit_available)
# Check for new version
check_version_option = TickOption(_('Check for new version at startup'), 'checkForNewVersion')
def check_version_action(widget):
utils.check_new_version(self.main_window, info.version)
# Apply changes from dialog to options window
check = bool(self.journal.config.get('checkForNewVersion'))
check_version_option.check_button.set_active(check)
check_version_button = ActionButton(_('Check now'), check_version_action)
check_version_option.pack_start(check_version_button, False, False)
self.options.append(check_version_option)
self.options.extend([
FontSizeOption(_('Font Size'), 'mainFontSize'),
DateFormatOption(_('Date/Time format'), 'dateTimeString'),
CsvTextOption(_('Exclude from clouds'), 'cloudIgnoreList', \
tooltip=_('Do not show those comma separated words in any cloud')),
CsvTextOption(_('Allow small words in clouds'), 'cloudIncludeList', \
tooltip=_('Allow those words with 4 letters or less in the text cloud')),
])
self.add_all_options()
response = self.dialog.run()
if response == gtk.RESPONSE_OK:
self.save_options()
# Apply some options
self.main_window.cloud.update_lists()
self.main_window.cloud.update(force_update=True)
spell_check_enabled = self.config.read('spellcheck', 0)
self.main_window.day_text_field.enable_spell_check(spell_check_enabled)
visible = (self.config.read('closeToTray', 0) == 1)
self.main_window.tray_icon.set_visible(visible)
else:
# Reset some options
self.main_window.set_font_size(self.config.read('mainFontSize', -1))
self.dialog.hide()
def add_all_options(self):
for option in self.options:
self.dialog.add_option('general', option)
def save_options(self):
logging.debug('Saving Options')
for option in self.options:
value = option.get_string_value()
if option.option_name is not None:
logging.debug('Setting %s = %s' % (option.option_name, value))
self.config[option.option_name] = value
else:
# We don't save the autostart setting in the config file
option.set()
| gpl-2.0 | 562,703,980,913,689,340 | 34.935657 | 105 | 0.571844 | false |
20tab/django-filer | filer/admin/folderadmin.py | 2 | 52396 | #-*- coding: utf-8 -*-
from django import forms
from django import template
from django.core.exceptions import ValidationError
from django.contrib.admin import helpers
from django.contrib.admin.util import quote, unquote, capfirst
from django.contrib import messages
from django.template.defaultfilters import urlencode
from filer.admin.patched.admin_utils import get_deleted_objects
from django.core.exceptions import PermissionDenied
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from django.core.urlresolvers import reverse
from django.db import router
from django.db.models import Q
from django.http import HttpResponseRedirect, HttpResponse
from django.shortcuts import render_to_response, get_object_or_404
from django.template import RequestContext
from django.utils.encoding import force_unicode
from django.utils.html import escape
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext as _
from django.utils.translation import ungettext, ugettext_lazy
from filer import settings
from filer.admin.forms import (CopyFilesAndFoldersForm, ResizeImagesForm,
RenameFilesForm)
from filer.admin.permissions import PrimitivePermissionAwareModelAdmin
from filer.views import (popup_status, popup_param, selectfolder_status,
selectfolder_param)
from filer.admin.tools import (userperms_for_request,
check_folder_edit_permissions,
check_files_edit_permissions,
check_files_read_permissions,
check_folder_read_permissions)
from filer.models import (Folder, FolderRoot, UnfiledImages, File, tools,
ImagesWithMissingData, FolderPermission, Image)
from filer.settings import FILER_STATICMEDIA_PREFIX, FILER_PAGINATE_BY
from filer.utils.filer_easy_thumbnails import FilerActionThumbnailer
from filer.thumbnail_processors import normalize_subject_location
from django.conf import settings as django_settings
import urllib
import os
import itertools
import inspect
class AddFolderPopupForm(forms.ModelForm):
folder = forms.HiddenInput()
class Meta:
model = Folder
fields = ('name',)
class FolderAdmin(PrimitivePermissionAwareModelAdmin):
list_display = ('name',)
exclude = ('parent',)
list_per_page = 20
list_filter = ('owner',)
search_fields = ['name', 'files__name']
raw_id_fields = ('owner',)
save_as = True # see ImageAdmin
actions = ['move_to_clipboard', 'files_set_public', 'files_set_private',
'delete_files_or_folders', 'move_files_and_folders',
'copy_files_and_folders', 'resize_images', 'rename_files']
def get_form(self, request, obj=None, **kwargs):
"""
Returns a Form class for use in the admin add view. This is used by
add_view and change_view.
"""
parent_id = request.REQUEST.get('parent_id', None)
if parent_id:
return AddFolderPopupForm
else:
folder_form = super(FolderAdmin, self).get_form(
request, obj=None, **kwargs)
def folder_form_clean(form_obj):
cleaned_data = form_obj.cleaned_data
if Folder.objects.filter(parent=form_obj.instance.parent,
name=cleaned_data['name']):
raise ValidationError('Folder with this name already exists.')
return cleaned_data
# attach clean to the default form rather than defining a new form class
folder_form.clean = folder_form_clean
return folder_form
def save_form(self, request, form, change):
"""
Given a ModelForm return an unsaved instance. ``change`` is True if
the object is being changed, and False if it's being added.
"""
r = form.save(commit=False)
parent_id = request.REQUEST.get('parent_id', None)
if parent_id:
parent = Folder.objects.get(id=parent_id)
r.parent = parent
return r
def response_change(self, request, obj):
"""
Overrides the default to be able to forward to the directory listing
instead of the default change_list_view
"""
r = super(FolderAdmin, self).response_change(request, obj)
## Code borrowed from django ModelAdmin to determine changelist on the fly
if r['Location']:
# it was a successful save
if (r['Location'] in ['../'] or
r['Location'] == self._get_post_url(obj)):
if obj.parent:
url = reverse('admin:filer-directory_listing',
kwargs={'folder_id': obj.parent.id})
else:
url = reverse('admin:filer-directory_listing-root')
url = "%s%s%s" % (url,popup_param(request),
selectfolder_param(request,"&"))
return HttpResponseRedirect(url)
else:
# this means it probably was a save_and_continue_editing
pass
return r
def render_change_form(self, request, context, add=False, change=False,
form_url='', obj=None):
extra_context = {'show_delete': True,
'is_popup': popup_status(request),
'select_folder': selectfolder_status(request),}
context.update(extra_context)
return super(FolderAdmin, self).render_change_form(
request=request, context=context, add=False,
change=False, form_url=form_url, obj=obj)
def delete_view(self, request, object_id, extra_context=None):
"""
Overrides the default to enable redirecting to the directory view after
deletion of a folder.
we need to fetch the object and find out who the parent is
before super, because super will delete the object and make it
impossible to find out the parent folder to redirect to.
"""
parent_folder = None
try:
obj = self.queryset(request).get(pk=unquote(object_id))
parent_folder = obj.parent
except self.model.DoesNotExist:
obj = None
r = super(FolderAdmin, self).delete_view(
request=request, object_id=object_id,
extra_context=extra_context)
url = r.get("Location", None)
if url in ["../../../../", "../../"] or url == self._get_post_url(obj):
if parent_folder:
url = reverse('admin:filer-directory_listing',
kwargs={'folder_id': parent_folder.id})
else:
url = reverse('admin:filer-directory_listing-root')
url = "%s%s%s" % (url,popup_param(request),
selectfolder_param(request,"&"))
return HttpResponseRedirect(url)
return r
def icon_img(self, xs):
return mark_safe(('<img src="%simg/icons/plainfolder_32x32.png" ' + \
'alt="Folder Icon" />') % FILER_STATICMEDIA_PREFIX)
icon_img.allow_tags = True
def get_urls(self):
try:
# django >=1.4
from django.conf.urls import patterns, url
except ImportError:
# django <1.4
from django.conf.urls.defaults import patterns, url
urls = super(FolderAdmin, self).get_urls()
from filer import views
url_patterns = patterns('',
# we override the default list view with our own directory listing
# of the root directories
url(r'^$',
self.admin_site.admin_view(self.directory_listing),
name='filer-directory_listing-root'),
url(r'^last/$',
self.admin_site.admin_view(self.directory_listing),
{'viewtype': 'last'},
name='filer-directory_listing-last'),
url(r'^(?P<folder_id>\d+)/list/$',
self.admin_site.admin_view(self.directory_listing),
name='filer-directory_listing'),
url(r'^(?P<folder_id>\d+)/make_folder/$',
self.admin_site.admin_view(views.make_folder),
name='filer-directory_listing-make_folder'),
url(r'^make_folder/$',
self.admin_site.admin_view(views.make_folder),
name='filer-directory_listing-make_root_folder'),
url(r'^images_with_missing_data/$',
self.admin_site.admin_view(self.directory_listing),
{'viewtype': 'images_with_missing_data'},
name='filer-directory_listing-images_with_missing_data'),
url(r'^unfiled_images/$',
self.admin_site.admin_view(self.directory_listing),
{'viewtype': 'unfiled_images'},
name='filer-directory_listing-unfiled_images'),
)
url_patterns.extend(urls)
return url_patterns
# custom views
def directory_listing(self, request, folder_id=None, viewtype=None):
clipboard = tools.get_user_clipboard(request.user)
if viewtype == 'images_with_missing_data':
folder = ImagesWithMissingData()
elif viewtype == 'unfiled_images':
folder = UnfiledImages()
elif viewtype == 'last':
last_folder_id = request.session.get('filer_last_folder_id')
try:
Folder.objects.get(id=last_folder_id)
except Folder.DoesNotExist:
url = reverse('admin:filer-directory_listing-root')
url = "%s%s%s" % (url, popup_param(request), selectfolder_param(request,"&"))
else:
url = reverse('admin:filer-directory_listing', kwargs={'folder_id': last_folder_id})
url = "%s%s%s" % (url, popup_param(request), selectfolder_param(request,"&"))
return HttpResponseRedirect(url)
elif folder_id == None:
folder = FolderRoot()
else:
folder = get_object_or_404(Folder, id=folder_id)
request.session['filer_last_folder_id'] = folder_id
# Check actions to see if any are available on this changelist
actions = self.get_actions(request)
# Remove action checkboxes if there aren't any actions available.
list_display = list(self.list_display)
if not actions:
try:
list_display.remove('action_checkbox')
except ValueError:
pass
# search
def filter_folder(qs, terms=[]):
for term in terms:
qs = qs.filter(Q(name__icontains=term) | \
Q(owner__username__icontains=term) | \
Q(owner__first_name__icontains=term) | \
Q(owner__last_name__icontains=term))
return qs
def filter_file(qs, terms=[]):
for term in terms:
qs = qs.filter(Q(name__icontains=term) | \
Q(description__icontains=term) | \
Q(original_filename__icontains=term) | \
Q(owner__username__icontains=term) | \
Q(owner__first_name__icontains=term) | \
Q(owner__last_name__icontains=term))
return qs
q = request.GET.get('q', None)
if q:
search_terms = unquote(q).split(" ")
else:
search_terms = []
q = ''
limit_search_to_folder = request.GET.get('limit_search_to_folder',
False) in (True, 'on')
if len(search_terms) > 0:
if folder and limit_search_to_folder and not folder.is_root:
folder_qs = folder.get_descendants()
file_qs = File.objects.filter(
folder__in=folder.get_descendants())
else:
folder_qs = Folder.objects.all()
file_qs = File.objects.all()
folder_qs = filter_folder(folder_qs, search_terms)
file_qs = filter_file(file_qs, search_terms)
show_result_count = True
else:
folder_qs = folder.children.all()
file_qs = folder.files.all()
show_result_count = False
folder_qs = folder_qs.order_by('name')
file_qs = file_qs.order_by('name')
folder_children = []
folder_files = []
if folder.is_root:
folder_children += folder.virtual_folders
for f in folder_qs:
f.perms = userperms_for_request(f, request)
if hasattr(f, 'has_read_permission'):
if f.has_read_permission(request):
folder_children.append(f)
else:
pass
else:
folder_children.append(f)
for f in file_qs:
f.perms = userperms_for_request(f, request)
if hasattr(f, 'has_read_permission'):
if f.has_read_permission(request):
folder_files.append(f)
else:
pass
else:
folder_files.append(f)
try:
permissions = {
'has_edit_permission': folder.has_edit_permission(request),
'has_read_permission': folder.has_read_permission(request),
'has_add_children_permission': \
folder.has_add_children_permission(request),
}
except:
permissions = {}
folder_files.sort()
items = folder_children + folder_files
paginator = Paginator(items, FILER_PAGINATE_BY)
# Are we moving to clipboard?
if request.method == 'POST' and '_save' not in request.POST:
for f in folder_files:
if "move-to-clipboard-%d" % (f.id,) in request.POST:
clipboard = tools.get_user_clipboard(request.user)
if f.has_edit_permission(request):
tools.move_file_to_clipboard([f], clipboard)
return HttpResponseRedirect(request.get_full_path())
else:
raise PermissionDenied
selected = request.POST.getlist(helpers.ACTION_CHECKBOX_NAME)
# Actions with no confirmation
if (actions and request.method == 'POST' and
'index' in request.POST and '_save' not in request.POST):
if selected:
response = self.response_action(request, files_queryset=file_qs, folders_queryset=folder_qs)
if response:
return response
else:
msg = _("Items must be selected in order to perform "
"actions on them. No items have been changed.")
self.message_user(request, msg)
# Actions with confirmation
if (actions and request.method == 'POST' and
helpers.ACTION_CHECKBOX_NAME in request.POST and
'index' not in request.POST and '_save' not in request.POST):
if selected:
response = self.response_action(request, files_queryset=file_qs, folders_queryset=folder_qs)
if response:
return response
# Build the action form and populate it with available actions.
if actions:
action_form = self.action_form(auto_id=None)
action_form.fields['action'].choices = self.get_action_choices(request)
else:
action_form = None
selection_note_all = ungettext('%(total_count)s selected',
'All %(total_count)s selected', paginator.count)
# If page request (9999) is out of range, deliver last page of results.
try:
paginated_items = paginator.page(request.GET.get('page', 1))
except PageNotAnInteger:
paginated_items = paginator.page(1)
except EmptyPage:
paginated_items = paginator.page(paginator.num_pages)
return render_to_response(
'admin/filer/folder/directory_listing.html',
{
'folder': folder,
'clipboard_files': File.objects.filter(
in_clipboards__clipboarditem__clipboard__user=request.user
).distinct(),
'paginator': paginator,
'paginated_items': paginated_items,
'permissions': permissions,
'permstest': userperms_for_request(folder, request),
'current_url': request.path,
'title': u'Directory listing for %s' % folder.name,
'search_string': ' '.join(search_terms),
'q': urlencode(q),
'show_result_count': show_result_count,
'limit_search_to_folder': limit_search_to_folder,
'is_popup': popup_status(request),
'select_folder': selectfolder_status(request),
# needed in the admin/base.html template for logout links
'root_path': reverse('admin:index'),
'action_form': action_form,
'actions_on_top': self.actions_on_top,
'actions_on_bottom': self.actions_on_bottom,
'actions_selection_counter': self.actions_selection_counter,
'selection_note': _('0 of %(cnt)s selected') % {'cnt': len(paginated_items.object_list)},
'selection_note_all': selection_note_all % {'total_count': paginator.count},
'media': self.media,
'enable_permissions': settings.FILER_ENABLE_PERMISSIONS
}, context_instance=RequestContext(request))
def response_action(self, request, files_queryset, folders_queryset):
"""
Handle an admin action. This is called if a request is POSTed to the
changelist; it returns an HttpResponse if the action was handled, and
None otherwise.
"""
# There can be multiple action forms on the page (at the top
# and bottom of the change list, for example). Get the action
# whose button was pushed.
try:
action_index = int(request.POST.get('index', 0))
except ValueError:
action_index = 0
# Construct the action form.
data = request.POST.copy()
data.pop(helpers.ACTION_CHECKBOX_NAME, None)
data.pop("index", None)
# Use the action whose button was pushed
try:
data.update({'action': data.getlist('action')[action_index]})
except IndexError:
# If we didn't get an action from the chosen form that's invalid
# POST data, so by deleting action it'll fail the validation check
# below. So no need to do anything here
pass
action_form = self.action_form(data, auto_id=None)
action_form.fields['action'].choices = self.get_action_choices(request)
# If the form's valid we can handle the action.
if action_form.is_valid():
action = action_form.cleaned_data['action']
select_across = action_form.cleaned_data['select_across']
func, name, description = self.get_actions(request)[action]
# Get the list of selected PKs. If nothing's selected, we can't
# perform an action on it, so bail. Except we want to perform
# the action explicitly on all objects.
selected = request.POST.getlist(helpers.ACTION_CHECKBOX_NAME)
if not selected and not select_across:
# Reminder that something needs to be selected or nothing will happen
msg = _("Items must be selected in order to perform "
"actions on them. No items have been changed.")
self.message_user(request, msg)
return None
if not select_across:
selected_files = []
selected_folders = []
for pk in selected:
if pk[:5] == "file-":
selected_files.append(pk[5:])
else:
selected_folders.append(pk[7:])
# Perform the action only on the selected objects
files_queryset = files_queryset.filter(pk__in=selected_files)
folders_queryset = folders_queryset.filter(pk__in=selected_folders)
response = func(self, request, files_queryset, folders_queryset)
# Actions may return an HttpResponse, which will be used as the
# response from the POST. If not, we'll be a good little HTTP
# citizen and redirect back to the changelist page.
if isinstance(response, HttpResponse):
return response
else:
return HttpResponseRedirect(request.get_full_path())
else:
msg = _("No action selected.")
self.message_user(request, msg)
return None
def get_actions(self, request):
actions = super(FolderAdmin, self).get_actions(request)
if 'delete_selected' in actions:
del actions['delete_selected']
return actions
def move_to_clipboard(self, request, files_queryset, folders_queryset):
"""
Action which moves the selected files and files in selected folders to clipboard.
"""
if not self.has_change_permission(request):
raise PermissionDenied
if request.method != 'POST':
return None
clipboard = tools.get_user_clipboard(request.user)
check_files_edit_permissions(request, files_queryset)
check_folder_edit_permissions(request, folders_queryset)
# TODO: Display a confirmation page if moving more than X files to clipboard?
files_count = [0] # We define it like that so that we can modify it inside the move_files function
def move_files(files):
files_count[0] += tools.move_file_to_clipboard(files, clipboard)
def move_folders(folders):
for f in folders:
move_files(f.files)
move_folders(f.children.all())
move_files(files_queryset)
move_folders(folders_queryset)
self.message_user(request, _("Successfully moved %(count)d files to clipboard.") % {
"count": files_count[0],
})
return None
move_to_clipboard.short_description = ugettext_lazy("Move selected files to clipboard")
def files_set_public_or_private(self, request, set_public, files_queryset, folders_queryset):
"""
Action which enables or disables permissions for selected files and files in selected folders to clipboard (set them private or public).
"""
if not self.has_change_permission(request):
raise PermissionDenied
if request.method != 'POST':
return None
check_files_edit_permissions(request, files_queryset)
check_folder_edit_permissions(request, folders_queryset)
files_count = [0] # We define it like that so that we can modify it inside the set_files function
def set_files(files):
for f in files:
if f.is_public != set_public:
f.is_public = set_public
f.save()
files_count[0] += 1
def set_folders(folders):
for f in folders:
set_files(f.files)
set_folders(f.children.all())
set_files(files_queryset)
set_folders(folders_queryset)
if set_public:
self.message_user(request, _("Successfully disabled permissions for %(count)d files.") % {
"count": files_count[0],
})
else:
self.message_user(request, _("Successfully enabled permissions for %(count)d files.") % {
"count": files_count[0],
})
return None
def files_set_private(self, request, files_queryset, folders_queryset):
return self.files_set_public_or_private(request, False, files_queryset, folders_queryset)
files_set_private.short_description = ugettext_lazy("Enable permissions for selected files")
def files_set_public(self, request, files_queryset, folders_queryset):
return self.files_set_public_or_private(request, True, files_queryset, folders_queryset)
files_set_public.short_description = ugettext_lazy("Disable permissions for selected files")
def delete_files_or_folders(self, request, files_queryset, folders_queryset):
"""
Action which deletes the selected files and/or folders.
This action first displays a confirmation page whichs shows all the
deleteable files and/or folders, or, if the user has no permission on one of the related
childs (foreignkeys), a "permission denied" message.
Next, it delets all selected files and/or folders and redirects back to the folder.
"""
opts = self.model._meta
app_label = opts.app_label
# Check that the user has delete permission for the actual model
if not self.has_delete_permission(request):
raise PermissionDenied
current_folder = self._get_current_action_folder(request, files_queryset, folders_queryset)
all_protected = []
# Populate deletable_objects, a data structure of all related objects that
# will also be deleted.
# Hopefully this also checks for necessary permissions.
# TODO: Check if permissions are really verified
(args, varargs, keywords, defaults) = inspect.getargspec(get_deleted_objects)
if 'levels_to_root' in args:
# Django 1.2
deletable_files, perms_needed_files = get_deleted_objects(files_queryset, files_queryset.model._meta, request.user, self.admin_site, levels_to_root=2)
deletable_folders, perms_needed_folders = get_deleted_objects(folders_queryset, folders_queryset.model._meta, request.user, self.admin_site, levels_to_root=2)
else:
# Django 1.3
using = router.db_for_write(self.model)
deletable_files, perms_needed_files, protected_files = get_deleted_objects(files_queryset, files_queryset.model._meta, request.user, self.admin_site, using)
deletable_folders, perms_needed_folders, protected_folders = get_deleted_objects(folders_queryset, folders_queryset.model._meta, request.user, self.admin_site, using)
all_protected.extend(protected_files)
all_protected.extend(protected_folders)
all_deletable_objects = [deletable_files, deletable_folders]
all_perms_needed = perms_needed_files.union(perms_needed_folders)
# The user has already confirmed the deletion.
# Do the deletion and return a None to display the change list view again.
if request.POST.get('post'):
if all_perms_needed:
raise PermissionDenied
n = files_queryset.count() + folders_queryset.count()
if n:
# delete all explicitly selected files
for f in files_queryset:
self.log_deletion(request, f, force_unicode(f))
f.delete()
# delete all files in all selected folders and their children
# This would happen automatically by ways of the delete cascade, but then the individual .delete()
# methods won't be called and the files won't be deleted from the filesystem.
folder_ids = set()
for folder in folders_queryset:
folder_ids.add(folder.id)
folder_ids.update(folder.get_descendants().values_list('id', flat=True))
for f in File.objects.filter(folder__in=folder_ids):
self.log_deletion(request, f, force_unicode(f))
f.delete()
# delete all folders
for f in folders_queryset:
self.log_deletion(request, f, force_unicode(f))
f.delete()
self.message_user(request, _("Successfully deleted %(count)d files and/or folders.") % {
"count": n,
})
# Return None to display the change list page again.
return None
if all_perms_needed or all_protected:
title = _("Cannot delete files and/or folders")
else:
title = _("Are you sure?")
context = {
"title": title,
"instance": current_folder,
"breadcrumbs_action": _("Delete files and/or folders"),
"deletable_objects": all_deletable_objects,
"files_queryset": files_queryset,
"folders_queryset": folders_queryset,
"perms_lacking": all_perms_needed,
"protected": all_protected,
"opts": opts,
'is_popup': popup_status(request),
'select_folder': selectfolder_status(request),
"root_path": reverse('admin:index'),
"app_label": app_label,
"action_checkbox_name": helpers.ACTION_CHECKBOX_NAME,
}
# Display the destination folder selection page
return render_to_response([
"admin/filer/delete_selected_files_confirmation.html"
], context, context_instance=template.RequestContext(request))
delete_files_or_folders.short_description = ugettext_lazy("Delete selected files and/or folders")
# Copied from django.contrib.admin.util
def _format_callback(self, obj, user, admin_site, perms_needed):
has_admin = obj.__class__ in admin_site._registry
opts = obj._meta
if has_admin:
admin_url = reverse('%s:%s_%s_change'
% (admin_site.name,
opts.app_label,
opts.object_name.lower()),
None, (quote(obj._get_pk_val()),))
p = '%s.%s' % (opts.app_label,
opts.get_delete_permission())
if not user.has_perm(p):
perms_needed.add(opts.verbose_name)
# Display a link to the admin page.
return mark_safe(u'%s: <a href="%s">%s</a>' %
(escape(capfirst(opts.verbose_name)),
admin_url,
escape(obj)))
else:
# Don't display link to edit, because it either has no
# admin or is edited inline.
return u'%s: %s' % (capfirst(opts.verbose_name),
force_unicode(obj))
def _check_copy_perms(self, request, files_queryset, folders_queryset):
try:
check_files_read_permissions(request, files_queryset)
check_folder_read_permissions(request, folders_queryset)
except PermissionDenied:
return True
return False
def _check_move_perms(self, request, files_queryset, folders_queryset):
try:
check_files_read_permissions(request, files_queryset)
check_folder_read_permissions(request, folders_queryset)
check_files_edit_permissions(request, files_queryset)
check_folder_edit_permissions(request, folders_queryset)
except PermissionDenied:
return True
return False
def _get_current_action_folder(self, request, files_queryset, folders_queryset):
if files_queryset:
return files_queryset[0].folder
elif folders_queryset:
return folders_queryset[0].parent
else:
return None
def _list_folders_to_copy_or_move(self, request, folders):
for fo in folders:
yield self._format_callback(fo, request.user, self.admin_site, set())
children = list(self._list_folders_to_copy_or_move(request, fo.children.all()))
children.extend([self._format_callback(f, request.user, self.admin_site, set()) for f in sorted(fo.files)])
if children:
yield children
def _list_all_to_copy_or_move(self, request, files_queryset, folders_queryset):
to_copy_or_move = list(self._list_folders_to_copy_or_move(request, folders_queryset))
to_copy_or_move.extend([self._format_callback(f, request.user, self.admin_site, set()) for f in sorted(files_queryset)])
return to_copy_or_move
def _list_all_destination_folders_recursive(self, request, folders_queryset, current_folder, folders, allow_self, level):
for fo in folders:
if not allow_self and fo in folders_queryset:
# We do not allow moving to selected folders or their descendants
continue
if not fo.has_read_permission(request):
continue
# We do not allow copying/moving back to the folder itself
enabled = (allow_self or fo != current_folder) and fo.has_add_children_permission(request)
yield (fo, (mark_safe((" " * level) + force_unicode(fo)), enabled))
for c in self._list_all_destination_folders_recursive(request, folders_queryset, current_folder, fo.children.all(), allow_self, level + 1):
yield c
def _list_all_destination_folders(self, request, folders_queryset, current_folder, allow_self):
return list(self._list_all_destination_folders_recursive(request, folders_queryset, current_folder, FolderRoot().children, allow_self, 0))
def _move_files_and_folders_impl(self, files_queryset, folders_queryset, destination):
for f in files_queryset:
f.folder = destination
f.save()
for f in folders_queryset:
f.move_to(destination, 'last-child')
f.save()
def move_files_and_folders(self, request, files_queryset, folders_queryset):
opts = self.model._meta
app_label = opts.app_label
current_folder = self._get_current_action_folder(request, files_queryset, folders_queryset)
perms_needed = self._check_move_perms(request, files_queryset, folders_queryset)
to_move = self._list_all_to_copy_or_move(request, files_queryset, folders_queryset)
folders = self._list_all_destination_folders(request, folders_queryset, current_folder, False)
if request.method == 'POST' and request.POST.get('post'):
if perms_needed:
raise PermissionDenied
try:
destination = Folder.objects.get(pk=request.POST.get('destination'))
except Folder.DoesNotExist:
raise PermissionDenied
folders_dict = dict(folders)
if destination not in folders_dict or not folders_dict[destination][1]:
raise PermissionDenied
# We count only topmost files and folders here
n = files_queryset.count() + folders_queryset.count()
conflicting_names = [folder.name for folder in Folder.objects.filter(
parent=destination,
name__in=folders_queryset.values('name'))]
if conflicting_names:
messages.error(request, _(u"Folders with names %s already exist at the selected "
"destination") % u", ".join(conflicting_names))
elif n:
self._move_files_and_folders_impl(files_queryset, folders_queryset, destination)
self.message_user(request, _("Successfully moved %(count)d files and/or folders to folder '%(destination)s'.") % {
"count": n,
"destination": destination,
})
return None
context = {
"title": _("Move files and/or folders"),
"instance": current_folder,
"breadcrumbs_action": _("Move files and/or folders"),
"to_move": to_move,
"destination_folders": folders,
"files_queryset": files_queryset,
"folders_queryset": folders_queryset,
"perms_lacking": perms_needed,
"opts": opts,
"root_path": reverse('admin:index'),
"app_label": app_label,
"action_checkbox_name": helpers.ACTION_CHECKBOX_NAME,
}
# Display the destination folder selection page
return render_to_response([
"admin/filer/folder/choose_move_destination.html"
], context, context_instance=template.RequestContext(request))
move_files_and_folders.short_description = ugettext_lazy("Move selected files and/or folders")
def _rename_file(self, file_obj, form_data, counter, global_counter):
original_basename, original_extension = os.path.splitext(file_obj.original_filename)
if file_obj.name:
current_basename, current_extension = os.path.splitext(file_obj.name)
else:
current_basename = ""
current_extension = ""
file_obj.name = form_data['rename_format'] % {
'original_filename': file_obj.original_filename,
'original_basename': original_basename,
'original_extension': original_extension,
'current_filename': file_obj.name or "",
'current_basename': current_basename,
'current_extension': current_extension,
'current_folder': file_obj.folder.name,
'counter': counter + 1, # 1-based
'global_counter': global_counter + 1, # 1-based
}
file_obj.save()
def _rename_files(self, files, form_data, global_counter):
n = 0
for f in sorted(files):
self._rename_file(f, form_data, n, global_counter + n)
n += 1
return n
def _rename_folder(self, folder, form_data, global_counter):
return self._rename_files_impl(folder.files.all(), folder.children.all(), form_data, global_counter)
def _rename_files_impl(self, files_queryset, folders_queryset, form_data, global_counter):
n = 0
for f in folders_queryset:
n += self._rename_folder(f, form_data, global_counter + n)
n += self._rename_files(files_queryset, form_data, global_counter + n)
return n
def rename_files(self, request, files_queryset, folders_queryset):
opts = self.model._meta
app_label = opts.app_label
current_folder = self._get_current_action_folder(request, files_queryset, folders_queryset)
perms_needed = self._check_move_perms(request, files_queryset, folders_queryset)
to_rename = self._list_all_to_copy_or_move(request, files_queryset, folders_queryset)
if request.method == 'POST' and request.POST.get('post'):
if perms_needed:
raise PermissionDenied
form = RenameFilesForm(request.POST)
if form.is_valid():
if files_queryset.count() + folders_queryset.count():
n = self._rename_files_impl(files_queryset, folders_queryset, form.cleaned_data, 0)
self.message_user(request, _("Successfully renamed %(count)d files.") % {
"count": n,
})
return None
else:
form = RenameFilesForm()
context = {
"title": _("Rename files"),
"instance": current_folder,
"breadcrumbs_action": _("Rename files"),
"to_rename": to_rename,
"rename_form": form,
"files_queryset": files_queryset,
"folders_queryset": folders_queryset,
"perms_lacking": perms_needed,
"opts": opts,
"root_path": reverse('admin:index'),
"app_label": app_label,
"action_checkbox_name": helpers.ACTION_CHECKBOX_NAME,
}
# Display the rename format selection page
return render_to_response([
"admin/filer/folder/choose_rename_format.html"
], context, context_instance=template.RequestContext(request))
rename_files.short_description = ugettext_lazy("Rename files")
def _generate_new_filename(self, filename, suffix):
basename, extension = os.path.splitext(filename)
return basename + suffix + extension
def _copy_file(self, file_obj, destination, suffix, overwrite):
if overwrite:
# Not yet implemented as we have to find a portable (for different storage backends) way to overwrite files
raise NotImplementedError
# We are assuming here that we are operating on an already saved database objects with current database state available
filename = self._generate_new_filename(file_obj.file.name, suffix)
# Due to how inheritance works, we have to set both pk and id to None
file_obj.pk = None
file_obj.id = None
file_obj.save()
file_obj.folder = destination
file_obj.file = file_obj._copy_file(filename)
file_obj.original_filename = self._generate_new_filename(file_obj.original_filename, suffix)
file_obj.save()
def _copy_files(self, files, destination, suffix, overwrite):
for f in files:
self._copy_file(f, destination, suffix, overwrite)
return len(files)
def _get_available_name(self, destination, name):
count = itertools.count(1)
original = name
while destination.contains_folder(name):
name = "%s_%s" % (original, count.next())
return name
def _copy_folder(self, folder, destination, suffix, overwrite):
if overwrite:
# Not yet implemented as we have to find a portable (for different storage backends) way to overwrite files
raise NotImplementedError
# TODO: Should we also allow not to overwrite the folder if it exists, but just copy into it?
# TODO: Is this a race-condition? Would this be a problem?
foldername = self._get_available_name(destination, folder.name)
old_folder = Folder.objects.get(pk=folder.pk)
# Due to how inheritance works, we have to set both pk and id to None
folder.pk = None
folder.id = None
folder.name = foldername
folder.insert_at(destination, 'last-child', True) # We save folder here
for perm in FolderPermission.objects.filter(folder=old_folder):
perm.pk = None
perm.id = None
perm.folder = folder
perm.save()
return 1 + self._copy_files_and_folders_impl(old_folder.files.all(), old_folder.children.all(), folder, suffix, overwrite)
def _copy_files_and_folders_impl(self, files_queryset, folders_queryset, destination, suffix, overwrite):
n = self._copy_files(files_queryset, destination, suffix, overwrite)
for f in folders_queryset:
n += self._copy_folder(f, destination, suffix, overwrite)
return n
def copy_files_and_folders(self, request, files_queryset, folders_queryset):
opts = self.model._meta
app_label = opts.app_label
current_folder = self._get_current_action_folder(request, files_queryset, folders_queryset)
perms_needed = self._check_copy_perms(request, files_queryset, folders_queryset)
to_copy = self._list_all_to_copy_or_move(request, files_queryset, folders_queryset)
folders = self._list_all_destination_folders(request, folders_queryset, current_folder, True)
if request.method == 'POST' and request.POST.get('post'):
if perms_needed:
raise PermissionDenied
form = CopyFilesAndFoldersForm(request.POST)
if form.is_valid():
try:
destination = Folder.objects.get(pk=request.POST.get('destination'))
except Folder.DoesNotExist:
raise PermissionDenied
folders_dict = dict(folders)
if destination not in folders_dict or not folders_dict[destination][1]:
raise PermissionDenied
if files_queryset.count() + folders_queryset.count():
# We count all files and folders here (recursivelly)
n = self._copy_files_and_folders_impl(files_queryset, folders_queryset, destination, form.cleaned_data['suffix'], False)
self.message_user(request, _("Successfully copied %(count)d files and/or folders to folder '%(destination)s'.") % {
"count": n,
"destination": destination,
})
return None
else:
form = CopyFilesAndFoldersForm()
try:
selected_destination_folder = int(request.POST.get('destination', 0))
except ValueError:
if current_folder:
selected_destination_folder = current_folder.pk
else:
selected_destination_folder = 0
context = {
"title": _("Copy files and/or folders"),
"instance": current_folder,
"breadcrumbs_action": _("Copy files and/or folders"),
"to_copy": to_copy,
"destination_folders": folders,
"selected_destination_folder": selected_destination_folder,
"copy_form": form,
"files_queryset": files_queryset,
"folders_queryset": folders_queryset,
"perms_lacking": perms_needed,
"opts": opts,
"root_path": reverse('admin:index'),
"app_label": app_label,
"action_checkbox_name": helpers.ACTION_CHECKBOX_NAME,
}
# Display the destination folder selection page
return render_to_response([
"admin/filer/folder/choose_copy_destination.html"
], context, context_instance=template.RequestContext(request))
copy_files_and_folders.short_description = ugettext_lazy("Copy selected files and/or folders")
def _check_resize_perms(self, request, files_queryset, folders_queryset):
try:
check_files_read_permissions(request, files_queryset)
check_folder_read_permissions(request, folders_queryset)
check_files_edit_permissions(request, files_queryset)
except PermissionDenied:
return True
return False
def _list_folders_to_resize(self, request, folders):
for fo in folders:
children = list(self._list_folders_to_resize(request, fo.children.all()))
children.extend([self._format_callback(f, request.user, self.admin_site, set()) for f in sorted(fo.files) if isinstance(f, Image)])
if children:
yield self._format_callback(fo, request.user, self.admin_site, set())
yield children
def _list_all_to_resize(self, request, files_queryset, folders_queryset):
to_resize = list(self._list_folders_to_resize(request, folders_queryset))
to_resize.extend([self._format_callback(f, request.user, self.admin_site, set()) for f in sorted(files_queryset) if isinstance(f, Image)])
return to_resize
def _new_subject_location(self, original_width, original_height, new_width, new_height, x, y, crop):
# TODO: We could probably do better
return (round(new_width / 2), round(new_height / 2))
def _resize_image(self, image, form_data):
original_width = float(image.width)
original_height = float(image.height)
thumbnailer = FilerActionThumbnailer(file=image.file.file, name=image.file.name, source_storage=image.file.source_storage, thumbnail_storage=image.file.source_storage)
# This should overwrite the original image
new_image = thumbnailer.get_thumbnail({
'size': (form_data['width'], form_data['height']),
'crop': form_data['crop'],
'upscale': form_data['upscale'],
'subject_location': image.subject_location,
})
from django.db.models.fields.files import ImageFieldFile
image.file.file = new_image.file
image.generate_sha1()
image.save() # Also gets new width and height
subject_location = normalize_subject_location(image.subject_location)
if subject_location:
(x, y) = subject_location
x = float(x)
y = float(y)
new_width = float(image.width)
new_height = float(image.height)
(new_x, new_y) = self._new_subject_location(original_width, original_height, new_width, new_height, x, y, form_data['crop'])
image.subject_location = "%d,%d" % (new_x, new_y)
image.save()
def _resize_images(self, files, form_data):
n = 0
for f in files:
if isinstance(f, Image):
self._resize_image(f, form_data)
n += 1
return n
def _resize_folder(self, folder, form_data):
return self._resize_images_impl(folder.files.all(), folder.children.all(), form_data)
def _resize_images_impl(self, files_queryset, folders_queryset, form_data):
n = self._resize_images(files_queryset, form_data)
for f in folders_queryset:
n += self._resize_folder(f, form_data)
return n
def resize_images(self, request, files_queryset, folders_queryset):
opts = self.model._meta
app_label = opts.app_label
current_folder = self._get_current_action_folder(request, files_queryset, folders_queryset)
perms_needed = self._check_resize_perms(request, files_queryset, folders_queryset)
to_resize = self._list_all_to_resize(request, files_queryset, folders_queryset)
if request.method == 'POST' and request.POST.get('post'):
if perms_needed:
raise PermissionDenied
form = ResizeImagesForm(request.POST)
if form.is_valid():
if form.cleaned_data.get('thumbnail_option'):
form.cleaned_data['width'] = form.cleaned_data['thumbnail_option'].width
form.cleaned_data['height'] = form.cleaned_data['thumbnail_option'].height
form.cleaned_data['crop'] = form.cleaned_data['thumbnail_option'].crop
form.cleaned_data['upscale'] = form.cleaned_data['thumbnail_option'].upscale
if files_queryset.count() + folders_queryset.count():
# We count all files here (recursivelly)
n = self._resize_images_impl(files_queryset, folders_queryset, form.cleaned_data)
self.message_user(request, _("Successfully resized %(count)d images.") % {
"count": n,
})
return None
else:
form = ResizeImagesForm()
context = {
"title": _("Resize images"),
"instance": current_folder,
"breadcrumbs_action": _("Resize images"),
"to_resize": to_resize,
"resize_form": form,
"cmsplugin_enabled": 'cmsplugin_filer_image' in django_settings.INSTALLED_APPS,
"files_queryset": files_queryset,
"folders_queryset": folders_queryset,
"perms_lacking": perms_needed,
"opts": opts,
"root_path": reverse('admin:index'),
"app_label": app_label,
"action_checkbox_name": helpers.ACTION_CHECKBOX_NAME,
}
# Display the resize options page
return render_to_response([
"admin/filer/folder/choose_images_resize_options.html"
], context, context_instance=template.RequestContext(request))
resize_images.short_description = ugettext_lazy("Resize selected images")
| bsd-3-clause | 5,403,584,838,329,932,000 | 43.706485 | 178 | 0.58701 | false |
hyrole/scrapy | scrapy/settings/deprecated.py | 160 | 1383 | import warnings
from scrapy.exceptions import ScrapyDeprecationWarning
DEPRECATED_SETTINGS = [
('TRACK_REFS', 'no longer needed (trackref is always enabled)'),
('RESPONSE_CLASSES', 'no longer supported'),
('DEFAULT_RESPONSE_ENCODING', 'no longer supported'),
('BOT_VERSION', 'no longer used (user agent defaults to Scrapy now)'),
('ENCODING_ALIASES', 'no longer needed (encoding discovery uses w3lib now)'),
('STATS_ENABLED', 'no longer supported (change STATS_CLASS instead)'),
('SQLITE_DB', 'no longer supported'),
('SELECTORS_BACKEND', 'use SCRAPY_SELECTORS_BACKEND environment variable instead'),
('AUTOTHROTTLE_MIN_DOWNLOAD_DELAY', 'use DOWNLOAD_DELAY instead'),
('AUTOTHROTTLE_MAX_CONCURRENCY', 'use CONCURRENT_REQUESTS_PER_DOMAIN instead'),
('AUTOTHROTTLE_MAX_CONCURRENCY', 'use CONCURRENT_REQUESTS_PER_DOMAIN instead'),
('REDIRECT_MAX_METAREFRESH_DELAY', 'use METAREFRESH_MAXDELAY instead'),
]
def check_deprecated_settings(settings):
deprecated = [x for x in DEPRECATED_SETTINGS if settings[x[0]] is not None]
if deprecated:
msg = "You are using the following settings which are deprecated or obsolete"
msg += " (ask [email protected] for alternatives):"
msg = msg + "\n " + "\n ".join("%s: %s" % x for x in deprecated)
warnings.warn(msg, ScrapyDeprecationWarning)
| bsd-3-clause | 3,784,398,596,325,435,000 | 52.192308 | 87 | 0.698482 | false |
rschnapka/odoo | addons/document_webdav/__init__.py | 58 | 1119 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import logging
import webdav
import webdav_server
import document_webdav
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | -497,559,327,671,061,950 | 40.444444 | 78 | 0.627346 | false |
aayushidwivedi01/spark-tk | python/sparktk/frame/ops/quantiles.py | 12 | 2626 | # vim: set encoding=utf-8
# Copyright (c) 2016 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
def quantiles(self, column_name, quantiles):
"""
Returns a new frame with Quantiles and their values.
Parameters
----------
:param column_name: (str) The column to calculate quantiles on
:param quantiles: (List[float]) The quantiles being requested
:return: (Frame) A new frame with two columns (float): requested Quantiles and their respective values.
Calculates quantiles on the given column.
Examples
--------
<hide>
>>> data = [[100],[250],[95],[179],[315],[660],[540],[420],[250],[335]]
>>> schema = [('final_sale_price', int)]
>>> my_frame = tc.frame.create(data, schema)
<progress>
</hide>
Consider Frame *my_frame*, which accesses a frame that contains a single
column *final_sale_price*:
>>> my_frame.inspect()
[#] final_sale_price
=====================
[0] 100
[1] 250
[2] 95
[3] 179
[4] 315
[5] 660
[6] 540
[7] 420
[8] 250
[9] 335
To calculate 10th, 50th, and 100th quantile:
>>> quantiles_frame = my_frame.quantiles('final_sale_price', [10, 50, 100])
<progress>
A new Frame containing the requested Quantiles and their respective values
will be returned:
>>> quantiles_frame.inspect()
[#] Quantiles final_sale_price_QuantileValue
==============================================
[0] 10.0 95.0
[1] 50.0 250.0
[2] 100.0 660.0
"""
from sparktk.frame.frame import Frame
return Frame(self._tc, self._scala.quantiles(column_name, self._tc.jutils.convert.to_scala_list_double(quantiles)))
| apache-2.0 | -487,987,058,644,337,900 | 31.666667 | 119 | 0.547881 | false |
Instagram/django | tests/regressiontests/comment_tests/tests/__init__.py | 88 | 3272 | from django.contrib.auth.models import User
from django.contrib.comments.forms import CommentForm
from django.contrib.comments.models import Comment
from django.contrib.contenttypes.models import ContentType
from django.contrib.sites.models import Site
from django.test import TestCase
from regressiontests.comment_tests.models import Article, Author
# Shortcut
CT = ContentType.objects.get_for_model
# Helper base class for comment tests that need data.
class CommentTestCase(TestCase):
fixtures = ["comment_tests"]
urls = 'django.contrib.comments.urls'
def createSomeComments(self):
# Two anonymous comments on two different objects
c1 = Comment.objects.create(
content_type = CT(Article),
object_pk = "1",
user_name = "Joe Somebody",
user_email = "[email protected]",
user_url = "http://example.com/~joe/",
comment = "First!",
site = Site.objects.get_current(),
)
c2 = Comment.objects.create(
content_type = CT(Author),
object_pk = "1",
user_name = "Joe Somebody",
user_email = "[email protected]",
user_url = "http://example.com/~joe/",
comment = "First here, too!",
site = Site.objects.get_current(),
)
# Two authenticated comments: one on the same Article, and
# one on a different Author
user = User.objects.create(
username = "frank_nobody",
first_name = "Frank",
last_name = "Nobody",
email = "[email protected]",
password = "",
is_staff = False,
is_active = True,
is_superuser = False,
)
c3 = Comment.objects.create(
content_type = CT(Article),
object_pk = "1",
user = user,
user_url = "http://example.com/~frank/",
comment = "Damn, I wanted to be first.",
site = Site.objects.get_current(),
)
c4 = Comment.objects.create(
content_type = CT(Author),
object_pk = "2",
user = user,
user_url = "http://example.com/~frank/",
comment = "You get here first, too?",
site = Site.objects.get_current(),
)
return c1, c2, c3, c4
def getData(self):
return {
'name' : 'Jim Bob',
'email' : '[email protected]',
'url' : '',
'comment' : 'This is my comment',
}
def getValidData(self, obj):
f = CommentForm(obj)
d = self.getData()
d.update(f.initial)
return d
from regressiontests.comment_tests.tests.app_api_tests import *
from regressiontests.comment_tests.tests.feed_tests import *
from regressiontests.comment_tests.tests.model_tests import *
from regressiontests.comment_tests.tests.comment_form_tests import *
from regressiontests.comment_tests.tests.templatetag_tests import *
from regressiontests.comment_tests.tests.comment_view_tests import *
from regressiontests.comment_tests.tests.moderation_view_tests import *
from regressiontests.comment_tests.tests.comment_utils_moderators_tests import *
| bsd-3-clause | 1,980,281,448,776,372,200 | 35.355556 | 80 | 0.595049 | false |
bbozhev/flask-test | flask/lib/python2.7/site-packages/werkzeug/testsuite/datastructures.py | 145 | 28212 | # -*- coding: utf-8 -*-
"""
werkzeug.testsuite.datastructures
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Tests the functionality of the provided Werkzeug
datastructures.
TODO:
- FileMultiDict
- Immutable types undertested
- Split up dict tests
:copyright: (c) 2014 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
from __future__ import with_statement
import unittest
import pickle
from contextlib import contextmanager
from copy import copy, deepcopy
from werkzeug import datastructures
from werkzeug._compat import iterkeys, itervalues, iteritems, iterlists, \
iterlistvalues, text_type, PY2
from werkzeug.testsuite import WerkzeugTestCase
from werkzeug.exceptions import BadRequestKeyError
class NativeItermethodsTestCase(WerkzeugTestCase):
def test_basic(self):
@datastructures.native_itermethods(['keys', 'values', 'items'])
class StupidDict(object):
def keys(self, multi=1):
return iter(['a', 'b', 'c'] * multi)
def values(self, multi=1):
return iter([1, 2, 3] * multi)
def items(self, multi=1):
return iter(zip(iterkeys(self, multi=multi),
itervalues(self, multi=multi)))
d = StupidDict()
expected_keys = ['a', 'b', 'c']
expected_values = [1, 2, 3]
expected_items = list(zip(expected_keys, expected_values))
self.assert_equal(list(iterkeys(d)), expected_keys)
self.assert_equal(list(itervalues(d)), expected_values)
self.assert_equal(list(iteritems(d)), expected_items)
self.assert_equal(list(iterkeys(d, 2)), expected_keys * 2)
self.assert_equal(list(itervalues(d, 2)), expected_values * 2)
self.assert_equal(list(iteritems(d, 2)), expected_items * 2)
class MutableMultiDictBaseTestCase(WerkzeugTestCase):
storage_class = None
def test_pickle(self):
cls = self.storage_class
for protocol in range(pickle.HIGHEST_PROTOCOL + 1):
d = cls()
d.setlist(b'foo', [1, 2, 3, 4])
d.setlist(b'bar', b'foo bar baz'.split())
s = pickle.dumps(d, protocol)
ud = pickle.loads(s)
self.assert_equal(type(ud), type(d))
self.assert_equal(ud, d)
self.assert_equal(pickle.loads(
s.replace(b'werkzeug.datastructures', b'werkzeug')), d)
ud[b'newkey'] = b'bla'
self.assert_not_equal(ud, d)
def test_basic_interface(self):
md = self.storage_class()
assert isinstance(md, dict)
mapping = [('a', 1), ('b', 2), ('a', 2), ('d', 3),
('a', 1), ('a', 3), ('d', 4), ('c', 3)]
md = self.storage_class(mapping)
# simple getitem gives the first value
self.assert_equal(md['a'], 1)
self.assert_equal(md['c'], 3)
with self.assert_raises(KeyError):
md['e']
self.assert_equal(md.get('a'), 1)
# list getitem
self.assert_equal(md.getlist('a'), [1, 2, 1, 3])
self.assert_equal(md.getlist('d'), [3, 4])
# do not raise if key not found
self.assert_equal(md.getlist('x'), [])
# simple setitem overwrites all values
md['a'] = 42
self.assert_equal(md.getlist('a'), [42])
# list setitem
md.setlist('a', [1, 2, 3])
self.assert_equal(md['a'], 1)
self.assert_equal(md.getlist('a'), [1, 2, 3])
# verify that it does not change original lists
l1 = [1, 2, 3]
md.setlist('a', l1)
del l1[:]
self.assert_equal(md['a'], 1)
# setdefault, setlistdefault
self.assert_equal(md.setdefault('u', 23), 23)
self.assert_equal(md.getlist('u'), [23])
del md['u']
md.setlist('u', [-1, -2])
# delitem
del md['u']
with self.assert_raises(KeyError):
md['u']
del md['d']
self.assert_equal(md.getlist('d'), [])
# keys, values, items, lists
self.assert_equal(list(sorted(md.keys())), ['a', 'b', 'c'])
self.assert_equal(list(sorted(iterkeys(md))), ['a', 'b', 'c'])
self.assert_equal(list(sorted(itervalues(md))), [1, 2, 3])
self.assert_equal(list(sorted(itervalues(md))), [1, 2, 3])
self.assert_equal(list(sorted(md.items())),
[('a', 1), ('b', 2), ('c', 3)])
self.assert_equal(list(sorted(md.items(multi=True))),
[('a', 1), ('a', 2), ('a', 3), ('b', 2), ('c', 3)])
self.assert_equal(list(sorted(iteritems(md))),
[('a', 1), ('b', 2), ('c', 3)])
self.assert_equal(list(sorted(iteritems(md, multi=True))),
[('a', 1), ('a', 2), ('a', 3), ('b', 2), ('c', 3)])
self.assert_equal(list(sorted(md.lists())),
[('a', [1, 2, 3]), ('b', [2]), ('c', [3])])
self.assert_equal(list(sorted(iterlists(md))),
[('a', [1, 2, 3]), ('b', [2]), ('c', [3])])
# copy method
c = md.copy()
self.assert_equal(c['a'], 1)
self.assert_equal(c.getlist('a'), [1, 2, 3])
# copy method 2
c = copy(md)
self.assert_equal(c['a'], 1)
self.assert_equal(c.getlist('a'), [1, 2, 3])
# deepcopy method
c = md.deepcopy()
self.assert_equal(c['a'], 1)
self.assert_equal(c.getlist('a'), [1, 2, 3])
# deepcopy method 2
c = deepcopy(md)
self.assert_equal(c['a'], 1)
self.assert_equal(c.getlist('a'), [1, 2, 3])
# update with a multidict
od = self.storage_class([('a', 4), ('a', 5), ('y', 0)])
md.update(od)
self.assert_equal(md.getlist('a'), [1, 2, 3, 4, 5])
self.assert_equal(md.getlist('y'), [0])
# update with a regular dict
md = c
od = {'a': 4, 'y': 0}
md.update(od)
self.assert_equal(md.getlist('a'), [1, 2, 3, 4])
self.assert_equal(md.getlist('y'), [0])
# pop, poplist, popitem, popitemlist
self.assert_equal(md.pop('y'), 0)
assert 'y' not in md
self.assert_equal(md.poplist('a'), [1, 2, 3, 4])
assert 'a' not in md
self.assert_equal(md.poplist('missing'), [])
# remaining: b=2, c=3
popped = md.popitem()
assert popped in [('b', 2), ('c', 3)]
popped = md.popitemlist()
assert popped in [('b', [2]), ('c', [3])]
# type conversion
md = self.storage_class({'a': '4', 'b': ['2', '3']})
self.assert_equal(md.get('a', type=int), 4)
self.assert_equal(md.getlist('b', type=int), [2, 3])
# repr
md = self.storage_class([('a', 1), ('a', 2), ('b', 3)])
assert "('a', 1)" in repr(md)
assert "('a', 2)" in repr(md)
assert "('b', 3)" in repr(md)
# add and getlist
md.add('c', '42')
md.add('c', '23')
self.assert_equal(md.getlist('c'), ['42', '23'])
md.add('c', 'blah')
self.assert_equal(md.getlist('c', type=int), [42, 23])
# setdefault
md = self.storage_class()
md.setdefault('x', []).append(42)
md.setdefault('x', []).append(23)
self.assert_equal(md['x'], [42, 23])
# to dict
md = self.storage_class()
md['foo'] = 42
md.add('bar', 1)
md.add('bar', 2)
self.assert_equal(md.to_dict(), {'foo': 42, 'bar': 1})
self.assert_equal(md.to_dict(flat=False), {'foo': [42], 'bar': [1, 2]})
# popitem from empty dict
with self.assert_raises(KeyError):
self.storage_class().popitem()
with self.assert_raises(KeyError):
self.storage_class().popitemlist()
# key errors are of a special type
with self.assert_raises(BadRequestKeyError):
self.storage_class()[42]
# setlist works
md = self.storage_class()
md['foo'] = 42
md.setlist('foo', [1, 2])
self.assert_equal(md.getlist('foo'), [1, 2])
class ImmutableDictBaseTestCase(WerkzeugTestCase):
storage_class = None
def test_follows_dict_interface(self):
cls = self.storage_class
data = {'foo': 1, 'bar': 2, 'baz': 3}
d = cls(data)
self.assert_equal(d['foo'], 1)
self.assert_equal(d['bar'], 2)
self.assert_equal(d['baz'], 3)
self.assert_equal(sorted(d.keys()), ['bar', 'baz', 'foo'])
self.assert_true('foo' in d)
self.assert_true('foox' not in d)
self.assert_equal(len(d), 3)
def test_copies_are_mutable(self):
cls = self.storage_class
immutable = cls({'a': 1})
with self.assert_raises(TypeError):
immutable.pop('a')
mutable = immutable.copy()
mutable.pop('a')
self.assert_true('a' in immutable)
self.assert_true(mutable is not immutable)
self.assert_true(copy(immutable) is immutable)
def test_dict_is_hashable(self):
cls = self.storage_class
immutable = cls({'a': 1, 'b': 2})
immutable2 = cls({'a': 2, 'b': 2})
x = set([immutable])
self.assert_true(immutable in x)
self.assert_true(immutable2 not in x)
x.discard(immutable)
self.assert_true(immutable not in x)
self.assert_true(immutable2 not in x)
x.add(immutable2)
self.assert_true(immutable not in x)
self.assert_true(immutable2 in x)
x.add(immutable)
self.assert_true(immutable in x)
self.assert_true(immutable2 in x)
class ImmutableTypeConversionDictTestCase(ImmutableDictBaseTestCase):
storage_class = datastructures.ImmutableTypeConversionDict
class ImmutableMultiDictTestCase(ImmutableDictBaseTestCase):
storage_class = datastructures.ImmutableMultiDict
def test_multidict_is_hashable(self):
cls = self.storage_class
immutable = cls({'a': [1, 2], 'b': 2})
immutable2 = cls({'a': [1], 'b': 2})
x = set([immutable])
self.assert_true(immutable in x)
self.assert_true(immutable2 not in x)
x.discard(immutable)
self.assert_true(immutable not in x)
self.assert_true(immutable2 not in x)
x.add(immutable2)
self.assert_true(immutable not in x)
self.assert_true(immutable2 in x)
x.add(immutable)
self.assert_true(immutable in x)
self.assert_true(immutable2 in x)
class ImmutableDictTestCase(ImmutableDictBaseTestCase):
storage_class = datastructures.ImmutableDict
class ImmutableOrderedMultiDictTestCase(ImmutableDictBaseTestCase):
storage_class = datastructures.ImmutableOrderedMultiDict
def test_ordered_multidict_is_hashable(self):
a = self.storage_class([('a', 1), ('b', 1), ('a', 2)])
b = self.storage_class([('a', 1), ('a', 2), ('b', 1)])
self.assert_not_equal(hash(a), hash(b))
class MultiDictTestCase(MutableMultiDictBaseTestCase):
storage_class = datastructures.MultiDict
def test_multidict_pop(self):
make_d = lambda: self.storage_class({'foo': [1, 2, 3, 4]})
d = make_d()
self.assert_equal(d.pop('foo'), 1)
assert not d
d = make_d()
self.assert_equal(d.pop('foo', 32), 1)
assert not d
d = make_d()
self.assert_equal(d.pop('foos', 32), 32)
assert d
with self.assert_raises(KeyError):
d.pop('foos')
def test_setlistdefault(self):
md = self.storage_class()
self.assert_equal(md.setlistdefault('u', [-1, -2]), [-1, -2])
self.assert_equal(md.getlist('u'), [-1, -2])
self.assert_equal(md['u'], -1)
def test_iter_interfaces(self):
mapping = [('a', 1), ('b', 2), ('a', 2), ('d', 3),
('a', 1), ('a', 3), ('d', 4), ('c', 3)]
md = self.storage_class(mapping)
self.assert_equal(list(zip(md.keys(), md.listvalues())),
list(md.lists()))
self.assert_equal(list(zip(md, iterlistvalues(md))),
list(iterlists(md)))
self.assert_equal(list(zip(iterkeys(md), iterlistvalues(md))),
list(iterlists(md)))
class OrderedMultiDictTestCase(MutableMultiDictBaseTestCase):
storage_class = datastructures.OrderedMultiDict
def test_ordered_interface(self):
cls = self.storage_class
d = cls()
assert not d
d.add('foo', 'bar')
self.assert_equal(len(d), 1)
d.add('foo', 'baz')
self.assert_equal(len(d), 1)
self.assert_equal(list(iteritems(d)), [('foo', 'bar')])
self.assert_equal(list(d), ['foo'])
self.assert_equal(list(iteritems(d, multi=True)),
[('foo', 'bar'), ('foo', 'baz')])
del d['foo']
assert not d
self.assert_equal(len(d), 0)
self.assert_equal(list(d), [])
d.update([('foo', 1), ('foo', 2), ('bar', 42)])
d.add('foo', 3)
self.assert_equal(d.getlist('foo'), [1, 2, 3])
self.assert_equal(d.getlist('bar'), [42])
self.assert_equal(list(iteritems(d)), [('foo', 1), ('bar', 42)])
expected = ['foo', 'bar']
self.assert_sequence_equal(list(d.keys()), expected)
self.assert_sequence_equal(list(d), expected)
self.assert_sequence_equal(list(iterkeys(d)), expected)
self.assert_equal(list(iteritems(d, multi=True)),
[('foo', 1), ('foo', 2), ('bar', 42), ('foo', 3)])
self.assert_equal(len(d), 2)
self.assert_equal(d.pop('foo'), 1)
assert d.pop('blafasel', None) is None
self.assert_equal(d.pop('blafasel', 42), 42)
self.assert_equal(len(d), 1)
self.assert_equal(d.poplist('bar'), [42])
assert not d
d.get('missingkey') is None
d.add('foo', 42)
d.add('foo', 23)
d.add('bar', 2)
d.add('foo', 42)
self.assert_equal(d, datastructures.MultiDict(d))
id = self.storage_class(d)
self.assert_equal(d, id)
d.add('foo', 2)
assert d != id
d.update({'blah': [1, 2, 3]})
self.assert_equal(d['blah'], 1)
self.assert_equal(d.getlist('blah'), [1, 2, 3])
# setlist works
d = self.storage_class()
d['foo'] = 42
d.setlist('foo', [1, 2])
self.assert_equal(d.getlist('foo'), [1, 2])
with self.assert_raises(BadRequestKeyError):
d.pop('missing')
with self.assert_raises(BadRequestKeyError):
d['missing']
# popping
d = self.storage_class()
d.add('foo', 23)
d.add('foo', 42)
d.add('foo', 1)
self.assert_equal(d.popitem(), ('foo', 23))
with self.assert_raises(BadRequestKeyError):
d.popitem()
assert not d
d.add('foo', 23)
d.add('foo', 42)
d.add('foo', 1)
self.assert_equal(d.popitemlist(), ('foo', [23, 42, 1]))
with self.assert_raises(BadRequestKeyError):
d.popitemlist()
def test_iterables(self):
a = datastructures.MultiDict((("key_a", "value_a"),))
b = datastructures.MultiDict((("key_b", "value_b"),))
ab = datastructures.CombinedMultiDict((a,b))
self.assert_equal(sorted(ab.lists()), [('key_a', ['value_a']), ('key_b', ['value_b'])])
self.assert_equal(sorted(ab.listvalues()), [['value_a'], ['value_b']])
self.assert_equal(sorted(ab.keys()), ["key_a", "key_b"])
self.assert_equal(sorted(iterlists(ab)), [('key_a', ['value_a']), ('key_b', ['value_b'])])
self.assert_equal(sorted(iterlistvalues(ab)), [['value_a'], ['value_b']])
self.assert_equal(sorted(iterkeys(ab)), ["key_a", "key_b"])
class CombinedMultiDictTestCase(WerkzeugTestCase):
storage_class = datastructures.CombinedMultiDict
def test_basic_interface(self):
d1 = datastructures.MultiDict([('foo', '1')])
d2 = datastructures.MultiDict([('bar', '2'), ('bar', '3')])
d = self.storage_class([d1, d2])
# lookup
self.assert_equal(d['foo'], '1')
self.assert_equal(d['bar'], '2')
self.assert_equal(d.getlist('bar'), ['2', '3'])
self.assert_equal(sorted(d.items()),
[('bar', '2'), ('foo', '1')])
self.assert_equal(sorted(d.items(multi=True)),
[('bar', '2'), ('bar', '3'), ('foo', '1')])
assert 'missingkey' not in d
assert 'foo' in d
# type lookup
self.assert_equal(d.get('foo', type=int), 1)
self.assert_equal(d.getlist('bar', type=int), [2, 3])
# get key errors for missing stuff
with self.assert_raises(KeyError):
d['missing']
# make sure that they are immutable
with self.assert_raises(TypeError):
d['foo'] = 'blub'
# copies are immutable
d = d.copy()
with self.assert_raises(TypeError):
d['foo'] = 'blub'
# make sure lists merges
md1 = datastructures.MultiDict((("foo", "bar"),))
md2 = datastructures.MultiDict((("foo", "blafasel"),))
x = self.storage_class((md1, md2))
self.assert_equal(list(iterlists(x)), [('foo', ['bar', 'blafasel'])])
class HeadersTestCase(WerkzeugTestCase):
storage_class = datastructures.Headers
def test_basic_interface(self):
headers = self.storage_class()
headers.add('Content-Type', 'text/plain')
headers.add('X-Foo', 'bar')
assert 'x-Foo' in headers
assert 'Content-type' in headers
headers['Content-Type'] = 'foo/bar'
self.assert_equal(headers['Content-Type'], 'foo/bar')
self.assert_equal(len(headers.getlist('Content-Type')), 1)
# list conversion
self.assert_equal(headers.to_wsgi_list(), [
('Content-Type', 'foo/bar'),
('X-Foo', 'bar')
])
self.assert_equal(str(headers), (
"Content-Type: foo/bar\r\n"
"X-Foo: bar\r\n"
"\r\n"))
self.assert_equal(str(self.storage_class()), "\r\n")
# extended add
headers.add('Content-Disposition', 'attachment', filename='foo')
self.assert_equal(headers['Content-Disposition'],
'attachment; filename=foo')
headers.add('x', 'y', z='"')
self.assert_equal(headers['x'], r'y; z="\""')
def test_defaults_and_conversion(self):
# defaults
headers = self.storage_class([
('Content-Type', 'text/plain'),
('X-Foo', 'bar'),
('X-Bar', '1'),
('X-Bar', '2')
])
self.assert_equal(headers.getlist('x-bar'), ['1', '2'])
self.assert_equal(headers.get('x-Bar'), '1')
self.assert_equal(headers.get('Content-Type'), 'text/plain')
self.assert_equal(headers.setdefault('X-Foo', 'nope'), 'bar')
self.assert_equal(headers.setdefault('X-Bar', 'nope'), '1')
self.assert_equal(headers.setdefault('X-Baz', 'quux'), 'quux')
self.assert_equal(headers.setdefault('X-Baz', 'nope'), 'quux')
headers.pop('X-Baz')
# type conversion
self.assert_equal(headers.get('x-bar', type=int), 1)
self.assert_equal(headers.getlist('x-bar', type=int), [1, 2])
# list like operations
self.assert_equal(headers[0], ('Content-Type', 'text/plain'))
self.assert_equal(headers[:1], self.storage_class([('Content-Type', 'text/plain')]))
del headers[:2]
del headers[-1]
self.assert_equal(headers, self.storage_class([('X-Bar', '1')]))
def test_copying(self):
a = self.storage_class([('foo', 'bar')])
b = a.copy()
a.add('foo', 'baz')
self.assert_equal(a.getlist('foo'), ['bar', 'baz'])
self.assert_equal(b.getlist('foo'), ['bar'])
def test_popping(self):
headers = self.storage_class([('a', 1)])
self.assert_equal(headers.pop('a'), 1)
self.assert_equal(headers.pop('b', 2), 2)
with self.assert_raises(KeyError):
headers.pop('c')
def test_set_arguments(self):
a = self.storage_class()
a.set('Content-Disposition', 'useless')
a.set('Content-Disposition', 'attachment', filename='foo')
self.assert_equal(a['Content-Disposition'], 'attachment; filename=foo')
def test_reject_newlines(self):
h = self.storage_class()
for variation in 'foo\nbar', 'foo\r\nbar', 'foo\rbar':
with self.assert_raises(ValueError):
h['foo'] = variation
with self.assert_raises(ValueError):
h.add('foo', variation)
with self.assert_raises(ValueError):
h.add('foo', 'test', option=variation)
with self.assert_raises(ValueError):
h.set('foo', variation)
with self.assert_raises(ValueError):
h.set('foo', 'test', option=variation)
def test_slicing(self):
# there's nothing wrong with these being native strings
# Headers doesn't care about the data types
h = self.storage_class()
h.set('X-Foo-Poo', 'bleh')
h.set('Content-Type', 'application/whocares')
h.set('X-Forwarded-For', '192.168.0.123')
h[:] = [(k, v) for k, v in h if k.startswith(u'X-')]
self.assert_equal(list(h), [
('X-Foo-Poo', 'bleh'),
('X-Forwarded-For', '192.168.0.123')
])
def test_bytes_operations(self):
h = self.storage_class()
h.set('X-Foo-Poo', 'bleh')
h.set('X-Whoops', b'\xff')
self.assert_equal(h.get('x-foo-poo', as_bytes=True), b'bleh')
self.assert_equal(h.get('x-whoops', as_bytes=True), b'\xff')
def test_to_wsgi_list(self):
h = self.storage_class()
h.set(u'Key', u'Value')
for key, value in h.to_wsgi_list():
if PY2:
self.assert_strict_equal(key, b'Key')
self.assert_strict_equal(value, b'Value')
else:
self.assert_strict_equal(key, u'Key')
self.assert_strict_equal(value, u'Value')
class EnvironHeadersTestCase(WerkzeugTestCase):
storage_class = datastructures.EnvironHeaders
def test_basic_interface(self):
# this happens in multiple WSGI servers because they
# use a vary naive way to convert the headers;
broken_env = {
'HTTP_CONTENT_TYPE': 'text/html',
'CONTENT_TYPE': 'text/html',
'HTTP_CONTENT_LENGTH': '0',
'CONTENT_LENGTH': '0',
'HTTP_ACCEPT': '*',
'wsgi.version': (1, 0)
}
headers = self.storage_class(broken_env)
assert headers
self.assert_equal(len(headers), 3)
self.assert_equal(sorted(headers), [
('Accept', '*'),
('Content-Length', '0'),
('Content-Type', 'text/html')
])
assert not self.storage_class({'wsgi.version': (1, 0)})
self.assert_equal(len(self.storage_class({'wsgi.version': (1, 0)})), 0)
def test_return_type_is_unicode(self):
# environ contains native strings; we return unicode
headers = self.storage_class({
'HTTP_FOO': '\xe2\x9c\x93',
'CONTENT_TYPE': 'text/plain',
})
self.assert_equal(headers['Foo'], u"\xe2\x9c\x93")
assert isinstance(headers['Foo'], text_type)
assert isinstance(headers['Content-Type'], text_type)
iter_output = dict(iter(headers))
self.assert_equal(iter_output['Foo'], u"\xe2\x9c\x93")
assert isinstance(iter_output['Foo'], text_type)
assert isinstance(iter_output['Content-Type'], text_type)
def test_bytes_operations(self):
foo_val = '\xff'
h = self.storage_class({
'HTTP_X_FOO': foo_val
})
self.assert_equal(h.get('x-foo', as_bytes=True), b'\xff')
self.assert_equal(h.get('x-foo'), u'\xff')
class HeaderSetTestCase(WerkzeugTestCase):
storage_class = datastructures.HeaderSet
def test_basic_interface(self):
hs = self.storage_class()
hs.add('foo')
hs.add('bar')
assert 'Bar' in hs
self.assert_equal(hs.find('foo'), 0)
self.assert_equal(hs.find('BAR'), 1)
assert hs.find('baz') < 0
hs.discard('missing')
hs.discard('foo')
assert hs.find('foo') < 0
self.assert_equal(hs.find('bar'), 0)
with self.assert_raises(IndexError):
hs.index('missing')
self.assert_equal(hs.index('bar'), 0)
assert hs
hs.clear()
assert not hs
class ImmutableListTestCase(WerkzeugTestCase):
storage_class = datastructures.ImmutableList
def test_list_hashable(self):
t = (1, 2, 3, 4)
l = self.storage_class(t)
self.assert_equal(hash(t), hash(l))
self.assert_not_equal(t, l)
def make_call_asserter(assert_equal_func, func=None):
"""Utility to assert a certain number of function calls.
>>> assert_calls, func = make_call_asserter(self.assert_equal)
>>> with assert_calls(2):
func()
func()
"""
calls = [0]
@contextmanager
def asserter(count, msg=None):
calls[0] = 0
yield
assert_equal_func(calls[0], count, msg)
def wrapped(*args, **kwargs):
calls[0] += 1
if func is not None:
return func(*args, **kwargs)
return asserter, wrapped
class CallbackDictTestCase(WerkzeugTestCase):
storage_class = datastructures.CallbackDict
def test_callback_dict_reads(self):
assert_calls, func = make_call_asserter(self.assert_equal)
initial = {'a': 'foo', 'b': 'bar'}
dct = self.storage_class(initial=initial, on_update=func)
with assert_calls(0, 'callback triggered by read-only method'):
# read-only methods
dct['a']
dct.get('a')
self.assert_raises(KeyError, lambda: dct['x'])
'a' in dct
list(iter(dct))
dct.copy()
with assert_calls(0, 'callback triggered without modification'):
# methods that may write but don't
dct.pop('z', None)
dct.setdefault('a')
def test_callback_dict_writes(self):
assert_calls, func = make_call_asserter(self.assert_equal)
initial = {'a': 'foo', 'b': 'bar'}
dct = self.storage_class(initial=initial, on_update=func)
with assert_calls(8, 'callback not triggered by write method'):
# always-write methods
dct['z'] = 123
dct['z'] = 123 # must trigger again
del dct['z']
dct.pop('b', None)
dct.setdefault('x')
dct.popitem()
dct.update([])
dct.clear()
with assert_calls(0, 'callback triggered by failed del'):
self.assert_raises(KeyError, lambda: dct.__delitem__('x'))
with assert_calls(0, 'callback triggered by failed pop'):
self.assert_raises(KeyError, lambda: dct.pop('x'))
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(MultiDictTestCase))
suite.addTest(unittest.makeSuite(OrderedMultiDictTestCase))
suite.addTest(unittest.makeSuite(CombinedMultiDictTestCase))
suite.addTest(unittest.makeSuite(ImmutableTypeConversionDictTestCase))
suite.addTest(unittest.makeSuite(ImmutableMultiDictTestCase))
suite.addTest(unittest.makeSuite(ImmutableDictTestCase))
suite.addTest(unittest.makeSuite(ImmutableOrderedMultiDictTestCase))
suite.addTest(unittest.makeSuite(HeadersTestCase))
suite.addTest(unittest.makeSuite(EnvironHeadersTestCase))
suite.addTest(unittest.makeSuite(HeaderSetTestCase))
suite.addTest(unittest.makeSuite(NativeItermethodsTestCase))
suite.addTest(unittest.makeSuite(CallbackDictTestCase))
return suite
| mit | 8,503,196,819,380,128,000 | 33.82963 | 98 | 0.549057 | false |
mgit-at/ansible | lib/ansible/modules/net_tools/nios/nios_dns_view.py | 68 | 4192 | #!/usr/bin/python
# Copyright (c) 2018 Red Hat, Inc.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'certified'}
DOCUMENTATION = '''
---
module: nios_dns_view
version_added: "2.5"
author: "Peter Sprygada (@privateip)"
short_description: Configure Infoblox NIOS DNS views
description:
- Adds and/or removes instances of DNS view objects from
Infoblox NIOS servers. This module manages NIOS C(view) objects
using the Infoblox WAPI interface over REST.
- Updates instances of DNS view object from Infoblox NIOS servers.
requirements:
- infoblox-client
extends_documentation_fragment: nios
options:
name:
description:
- Specifies the fully qualified hostname to add or remove from
the system. User can also update the hostname as it is possible
to pass a dict containing I(new_name), I(old_name). See examples.
required: true
aliases:
- view
network_view:
description:
- Specifies the name of the network view to assign the configured
DNS view to. The network view must already be configured on the
target system.
required: true
default: default
extattrs:
description:
- Allows for the configuration of Extensible Attributes on the
instance of the object. This argument accepts a set of key / value
pairs for configuration.
required: false
comment:
description:
- Configures a text string comment to be associated with the instance
of this object. The provided text string will be configured on the
object instance.
required: false
state:
description:
- Configures the intended state of the instance of the object on
the NIOS server. When this value is set to C(present), the object
is configured on the device and when this value is set to C(absent)
the value is removed (if necessary) from the device.
required: false
default: present
choices:
- present
- absent
'''
EXAMPLES = '''
- name: configure a new dns view instance
nios_dns_view:
name: ansible-dns
state: present
provider:
host: "{{ inventory_hostname_short }}"
username: admin
password: admin
connection: local
- name: update the comment for dns view
nios_dns_view:
name: ansible-dns
comment: this is an example comment
state: present
provider:
host: "{{ inventory_hostname_short }}"
username: admin
password: admin
connection: local
- name: remove the dns view instance
nios_dns_view:
name: ansible-dns
state: absent
provider:
host: "{{ inventory_hostname_short }}"
username: admin
password: admin
connection: local
- name: update the dns view instance
nios_dns_view:
name: {new_name: ansible-dns-new, old_name: ansible-dns}
state: present
provider:
host: "{{ inventory_hostname_short }}"
username: admin
password: admin
connection: local
'''
RETURN = ''' # '''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.net_tools.nios.api import WapiModule
from ansible.module_utils.net_tools.nios.api import NIOS_DNS_VIEW
def main():
''' Main entry point for module execution
'''
ib_spec = dict(
name=dict(required=True, aliases=['view'], ib_req=True),
network_view=dict(default='default', ib_req=True),
extattrs=dict(type='dict'),
comment=dict()
)
argument_spec = dict(
provider=dict(required=True),
state=dict(default='present', choices=['present', 'absent'])
)
argument_spec.update(ib_spec)
argument_spec.update(WapiModule.provider_spec)
module = AnsibleModule(argument_spec=argument_spec,
supports_check_mode=True)
wapi = WapiModule(module)
result = wapi.run(NIOS_DNS_VIEW, ib_spec)
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 | -7,896,149,329,633,127,000 | 28.111111 | 92 | 0.662691 | false |
tareqalayan/ansible | lib/ansible/modules/network/f5/bigip_monitor_tcp.py | 17 | 17981 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2017 F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: bigip_monitor_tcp
short_description: Manages F5 BIG-IP LTM tcp monitors
description: Manages F5 BIG-IP LTM tcp monitors via iControl SOAP API.
version_added: 1.4
options:
name:
description:
- Monitor name.
required: True
parent:
description:
- The parent template of this monitor template. Once this value has
been set, it cannot be changed. By default, this value is the C(tcp)
parent on the C(Common) partition.
default: /Common/tcp
send:
description:
- The send string for the monitor call.
receive:
description:
- The receive string for the monitor call.
ip:
description:
- IP address part of the IP/port definition. If this parameter is not
provided when creating a new monitor, then the default value will be
'*'.
- If this value is an IP address, and the C(type) is C(tcp) (the default),
then a C(port) number must be specified.
port:
description:
- Port address part of the IP/port definition. If this parameter is not
provided when creating a new monitor, then the default value will be
'*'. Note that if specifying an IP address, a value between 1 and 65535
must be specified
- This argument is not supported for TCP Echo types.
interval:
description:
- The interval specifying how frequently the monitor instance of this
template will run. If this parameter is not provided when creating
a new monitor, then the default value will be 5. This value B(must)
be less than the C(timeout) value.
timeout:
description:
- The number of seconds in which the node or service must respond to
the monitor request. If the target responds within the set time
period, it is considered up. If the target does not respond within
the set time period, it is considered down. You can change this
number to any number you want, however, it should be 3 times the
interval number of seconds plus 1 second. If this parameter is not
provided when creating a new monitor, then the default value will be 16.
time_until_up:
description:
- Specifies the amount of time in seconds after the first successful
response before a node will be marked up. A value of 0 will cause a
node to be marked up immediately after a valid response is received
from the node. If this parameter is not provided when creating
a new monitor, then the default value will be 0.
partition:
description:
- Device partition to manage resources on.
default: Common
version_added: 2.5
state:
description:
- When C(present), ensures that the monitor exists.
- When C(absent), ensures the monitor is removed.
default: present
choices:
- present
- absent
version_added: 2.5
notes:
- Requires BIG-IP software version >= 12
extends_documentation_fragment: f5
author:
- Tim Rupp (@caphrim007)
'''
EXAMPLES = r'''
- name: Create TCP Monitor
bigip_monitor_tcp:
state: present
server: lb.mydomain.com
user: admin
password: secret
name: my_tcp_monitor
type: tcp
send: tcp string to send
receive: tcp string to receive
delegate_to: localhost
- name: Remove TCP Monitor
bigip_monitor_tcp:
state: absent
server: lb.mydomain.com
user: admin
password: secret
name: my_tcp_monitor
delegate_to: localhost
'''
RETURN = r'''
parent:
description: New parent template of the monitor.
returned: changed
type: string
sample: tcp
send:
description: The new send string for this monitor.
returned: changed
type: string
sample: tcp string to send
receive:
description: The new receive string for this monitor.
returned: changed
type: string
sample: tcp string to receive
ip:
description: The new IP of IP/port definition.
returned: changed
type: string
sample: 10.12.13.14
port:
description: The new port of IP/port definition.
returned: changed
type: string
sample: [email protected]
interval:
description: The new interval in which to run the monitor check.
returned: changed
type: int
sample: 2
timeout:
description: The new timeout in which the remote system must respond to the monitor.
returned: changed
type: int
sample: 10
time_until_up:
description: The new time in which to mark a system as up after first successful response.
returned: changed
type: int
sample: 2
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.basic import env_fallback
try:
from library.module_utils.network.f5.bigip import HAS_F5SDK
from library.module_utils.network.f5.bigip import F5Client
from library.module_utils.network.f5.common import F5ModuleError
from library.module_utils.network.f5.common import AnsibleF5Parameters
from library.module_utils.network.f5.common import cleanup_tokens
from library.module_utils.network.f5.common import fq_name
from library.module_utils.network.f5.common import f5_argument_spec
try:
from library.module_utils.network.f5.common import iControlUnexpectedHTTPError
except ImportError:
HAS_F5SDK = False
except ImportError:
from ansible.module_utils.network.f5.bigip import HAS_F5SDK
from ansible.module_utils.network.f5.bigip import F5Client
from ansible.module_utils.network.f5.common import F5ModuleError
from ansible.module_utils.network.f5.common import AnsibleF5Parameters
from ansible.module_utils.network.f5.common import cleanup_tokens
from ansible.module_utils.network.f5.common import fq_name
from ansible.module_utils.network.f5.common import f5_argument_spec
try:
from ansible.module_utils.network.f5.common import iControlUnexpectedHTTPError
except ImportError:
HAS_F5SDK = False
try:
import netaddr
HAS_NETADDR = True
except ImportError:
HAS_NETADDR = False
class Parameters(AnsibleF5Parameters):
api_map = {
'timeUntilUp': 'time_until_up',
'defaultsFrom': 'parent',
'recv': 'receive'
}
api_attributes = [
'timeUntilUp', 'defaultsFrom', 'interval', 'timeout', 'recv', 'send',
'destination'
]
returnables = [
'parent', 'send', 'receive', 'ip', 'port', 'interval', 'timeout',
'time_until_up'
]
updatables = [
'destination', 'send', 'receive', 'interval', 'timeout', 'time_until_up'
]
def to_return(self):
result = {}
try:
for returnable in self.returnables:
result[returnable] = getattr(self, returnable)
result = self._filter_params(result)
return result
except Exception:
return result
def api_params(self):
result = {}
for api_attribute in self.api_attributes:
if self.api_map is not None and api_attribute in self.api_map:
result[api_attribute] = getattr(self, self.api_map[api_attribute])
else:
result[api_attribute] = getattr(self, api_attribute)
result = self._filter_params(result)
return result
@property
def interval(self):
if self._values['interval'] is None:
return None
if 1 > int(self._values['interval']) > 86400:
raise F5ModuleError(
"Interval value must be between 1 and 86400"
)
return int(self._values['interval'])
@property
def timeout(self):
if self._values['timeout'] is None:
return None
return int(self._values['timeout'])
@property
def ip(self):
if self._values['ip'] is None:
return None
try:
if self._values['ip'] in ['*', '0.0.0.0']:
return '*'
result = str(netaddr.IPAddress(self._values['ip']))
return result
except netaddr.core.AddrFormatError:
raise F5ModuleError(
"The provided 'ip' parameter is not an IP address."
)
@property
def time_until_up(self):
if self._values['time_until_up'] is None:
return None
return int(self._values['time_until_up'])
@property
def parent(self):
if self._values['parent'] is None:
return None
result = fq_name(self.partition, self._values['parent'])
return result
@property
def port(self):
if self._values['port'] is None:
return None
elif self._values['port'] == '*':
return '*'
return int(self._values['port'])
@property
def destination(self):
if self.ip is None and self.port is None:
return None
destination = '{0}:{1}'.format(self.ip, self.port)
return destination
@destination.setter
def destination(self, value):
ip, port = value.split(':')
self._values['ip'] = ip
self._values['port'] = port
@property
def type(self):
return 'tcp'
class Changes(Parameters):
pass
class Difference(object):
def __init__(self, want, have=None):
self.want = want
self.have = have
def compare(self, param):
try:
result = getattr(self, param)
return result
except AttributeError:
result = self.__default(param)
return result
@property
def parent(self):
if self.want.parent != self.have.parent:
raise F5ModuleError(
"The parent monitor cannot be changed"
)
@property
def destination(self):
if self.want.ip is None and self.want.port is None:
return None
if self.want.port is None:
self.want.update({'port': self.have.port})
if self.want.ip is None:
self.want.update({'ip': self.have.ip})
if self.want.port in [None, '*'] and self.want.ip != '*':
raise F5ModuleError(
"Specifying an IP address requires that a port number be specified"
)
if self.want.destination != self.have.destination:
return self.want.destination
@property
def interval(self):
if self.want.timeout is not None and self.want.interval is not None:
if self.want.interval >= self.want.timeout:
raise F5ModuleError(
"Parameter 'interval' must be less than 'timeout'."
)
elif self.want.timeout is not None:
if self.have.interval >= self.want.timeout:
raise F5ModuleError(
"Parameter 'interval' must be less than 'timeout'."
)
elif self.want.interval is not None:
if self.want.interval >= self.have.timeout:
raise F5ModuleError(
"Parameter 'interval' must be less than 'timeout'."
)
if self.want.interval != self.have.interval:
return self.want.interval
def __default(self, param):
attr1 = getattr(self.want, param)
try:
attr2 = getattr(self.have, param)
if attr1 != attr2:
return attr1
except AttributeError:
return attr1
class ModuleManager(object):
def __init__(self, *args, **kwargs):
self.module = kwargs.get('module', None)
self.client = kwargs.get('client', None)
self.have = None
self.want = Parameters(params=self.module.params)
self.changes = Changes()
def _set_changed_options(self):
changed = {}
for key in Parameters.returnables:
if getattr(self.want, key) is not None:
changed[key] = getattr(self.want, key)
if changed:
self.changes = Changes(params=changed)
def _update_changed_options(self):
diff = Difference(self.want, self.have)
updatables = Parameters.updatables
changed = dict()
for k in updatables:
change = diff.compare(k)
if change is None:
continue
else:
changed[k] = change
if changed:
self.changes = Changes(params=changed)
return True
return False
def _announce_deprecations(self):
warnings = []
if self.want:
warnings += self.want._values.get('__warnings', [])
if self.have:
warnings += self.have._values.get('__warnings', [])
for warning in warnings:
self.module.deprecate(
msg=warning['msg'],
version=warning['version']
)
def exec_module(self):
changed = False
result = dict()
state = self.want.state
try:
if state == "present":
changed = self.present()
elif state == "absent":
changed = self.absent()
except iControlUnexpectedHTTPError as e:
raise F5ModuleError(str(e))
changes = self.changes.to_return()
result.update(**changes)
result.update(dict(changed=changed))
self._announce_deprecations()
return result
def present(self):
if self.exists():
return self.update()
else:
return self.create()
def create(self):
self._set_changed_options()
self._set_default_creation_values()
if self.module.check_mode:
return True
self.create_on_device()
return True
def should_update(self):
result = self._update_changed_options()
if result:
return True
return False
def update(self):
self.have = self.read_current_from_device()
if not self.should_update():
return False
if self.module.check_mode:
return True
self.update_on_device()
return True
def absent(self):
if self.exists():
return self.remove()
return False
def remove(self):
if self.module.check_mode:
return True
self.remove_from_device()
if self.exists():
raise F5ModuleError("Failed to delete the monitor.")
return True
def _set_default_creation_values(self):
if self.want.timeout is None:
self.want.update({'timeout': 16})
if self.want.interval is None:
self.want.update({'interval': 5})
if self.want.time_until_up is None:
self.want.update({'time_until_up': 0})
if self.want.ip is None:
self.want.update({'ip': '*'})
if self.want.port is None:
self.want.update({'port': '*'})
def read_current_from_device(self):
resource = self.client.api.tm.ltm.monitor.tcps.tcp.load(
name=self.want.name,
partition=self.want.partition
)
result = resource.attrs
return Parameters(params=result)
def exists(self):
result = self.client.api.tm.ltm.monitor.tcps.tcp.exists(
name=self.want.name,
partition=self.want.partition
)
return result
def update_on_device(self):
params = self.changes.api_params()
result = self.client.api.tm.ltm.monitor.tcps.tcp.load(
name=self.want.name,
partition=self.want.partition
)
result.modify(**params)
def create_on_device(self):
params = self.want.api_params()
self.client.api.tm.ltm.monitor.tcps.tcp.create(
name=self.want.name,
partition=self.want.partition,
**params
)
def remove_from_device(self):
result = self.client.api.tm.ltm.monitor.tcps.tcp.load(
name=self.want.name,
partition=self.want.partition
)
if result:
result.delete()
class ArgumentSpec(object):
def __init__(self):
self.supports_check_mode = True
argument_spec = dict(
name=dict(required=True),
parent=dict(default='/Common/tcp'),
send=dict(),
receive=dict(),
ip=dict(),
port=dict(type='int'),
interval=dict(type='int'),
timeout=dict(type='int'),
time_until_up=dict(type='int'),
state=dict(
default='present',
choices=['present', 'absent']
),
partition=dict(
default='Common',
fallback=(env_fallback, ['F5_PARTITION'])
)
)
self.argument_spec = {}
self.argument_spec.update(f5_argument_spec)
self.argument_spec.update(argument_spec)
def main():
spec = ArgumentSpec()
module = AnsibleModule(
argument_spec=spec.argument_spec,
supports_check_mode=spec.supports_check_mode
)
if not HAS_F5SDK:
module.fail_json(msg="The python f5-sdk module is required")
if not HAS_NETADDR:
module.fail_json(msg="The python netaddr module is required")
try:
client = F5Client(**module.params)
mm = ModuleManager(module=module, client=client)
results = mm.exec_module()
cleanup_tokens(client)
module.exit_json(**results)
except F5ModuleError as ex:
cleanup_tokens(client)
module.fail_json(msg=str(ex))
if __name__ == '__main__':
main()
| gpl-3.0 | -2,863,581,354,208,242,700 | 29.789384 | 92 | 0.601746 | false |
erikdejonge/youtube-dl | youtube_dl/extractor/ccma.py | 19 | 3704 | # coding: utf-8
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import (
clean_html,
int_or_none,
parse_duration,
parse_iso8601,
parse_resolution,
url_or_none,
)
class CCMAIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?ccma\.cat/(?:[^/]+/)*?(?P<type>video|audio)/(?P<id>\d+)'
_TESTS = [{
'url': 'http://www.ccma.cat/tv3/alacarta/lespot-de-la-marato-de-tv3/lespot-de-la-marato-de-tv3/video/5630208/',
'md5': '7296ca43977c8ea4469e719c609b0871',
'info_dict': {
'id': '5630208',
'ext': 'mp4',
'title': 'L\'espot de La Marató de TV3',
'description': 'md5:f12987f320e2f6e988e9908e4fe97765',
'timestamp': 1470918540,
'upload_date': '20160811',
}
}, {
'url': 'http://www.ccma.cat/catradio/alacarta/programa/el-consell-de-savis-analitza-el-derbi/audio/943685/',
'md5': 'fa3e38f269329a278271276330261425',
'info_dict': {
'id': '943685',
'ext': 'mp3',
'title': 'El Consell de Savis analitza el derbi',
'description': 'md5:e2a3648145f3241cb9c6b4b624033e53',
'upload_date': '20171205',
'timestamp': 1512507300,
}
}]
def _real_extract(self, url):
media_type, media_id = re.match(self._VALID_URL, url).groups()
media = self._download_json(
'http://dinamics.ccma.cat/pvideo/media.jsp', media_id, query={
'media': media_type,
'idint': media_id,
})
formats = []
media_url = media['media']['url']
if isinstance(media_url, list):
for format_ in media_url:
format_url = url_or_none(format_.get('file'))
if not format_url:
continue
label = format_.get('label')
f = parse_resolution(label)
f.update({
'url': format_url,
'format_id': label,
})
formats.append(f)
else:
formats.append({
'url': media_url,
'vcodec': 'none' if media_type == 'audio' else None,
})
self._sort_formats(formats)
informacio = media['informacio']
title = informacio['titol']
durada = informacio.get('durada', {})
duration = int_or_none(durada.get('milisegons'), 1000) or parse_duration(durada.get('text'))
timestamp = parse_iso8601(informacio.get('data_emissio', {}).get('utc'))
subtitles = {}
subtitols = media.get('subtitols', {})
if subtitols:
sub_url = subtitols.get('url')
if sub_url:
subtitles.setdefault(
subtitols.get('iso') or subtitols.get('text') or 'ca', []).append({
'url': sub_url,
})
thumbnails = []
imatges = media.get('imatges', {})
if imatges:
thumbnail_url = imatges.get('url')
if thumbnail_url:
thumbnails = [{
'url': thumbnail_url,
'width': int_or_none(imatges.get('amplada')),
'height': int_or_none(imatges.get('alcada')),
}]
return {
'id': media_id,
'title': title,
'description': clean_html(informacio.get('descripcio')),
'duration': duration,
'timestamp': timestamp,
'thumbnails': thumbnails,
'subtitles': subtitles,
'formats': formats,
}
| unlicense | 8,081,314,114,472,602,000 | 32.972477 | 119 | 0.498785 | false |
openstack/heat | heat/tests/convergence/scenarios/update_replace.py | 2 | 2098 | #
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
c_uuid = None
def store_c_uuid():
global c_uuid
c_uuid = next(iter(reality.resources_by_logical_name('C'))).uuid
def check_c_replaced():
test.assertNotEqual(c_uuid,
next(iter(reality.resources_by_logical_name('C'))).uuid)
test.assertIsNotNone(c_uuid)
example_template = Template({
'A': RsrcDef({'a': 'initial'}, []),
'B': RsrcDef({}, []),
'C': RsrcDef({'!a': GetAtt('A', 'a')}, ['B']),
'D': RsrcDef({'c': GetRes('C')}, []),
'E': RsrcDef({'ca': GetAtt('C', '!a')}, []),
})
engine.create_stack('foo', example_template)
engine.noop(5)
engine.call(verify, example_template)
engine.call(store_c_uuid)
example_template_updated = Template({
'A': RsrcDef({'a': 'updated'}, []),
'B': RsrcDef({}, []),
'C': RsrcDef({'!a': GetAtt('A', 'a')}, ['B']),
'D': RsrcDef({'c': GetRes('C')}, []),
'E': RsrcDef({'ca': GetAtt('C', '!a')}, []),
})
engine.update_stack('foo', example_template_updated)
engine.noop(11)
engine.call(verify, example_template_updated)
example_template_long = Template({
'A': RsrcDef({'a': 'updated'}, []),
'B': RsrcDef({}, []),
'C': RsrcDef({'!a': GetAtt('A', 'a')}, ['B']),
'D': RsrcDef({'c': GetRes('C')}, []),
'E': RsrcDef({'ca': GetAtt('C', '!a')}, []),
'F': RsrcDef({}, ['D', 'E']),
})
engine.update_stack('foo', example_template_long)
engine.noop(12)
engine.call(verify, example_template_long)
engine.call(check_c_replaced)
engine.delete_stack('foo')
engine.noop(6)
engine.call(verify, Template({}))
| apache-2.0 | 1,620,001,236,693,267,000 | 31.276923 | 80 | 0.607245 | false |
tal-nino/shinken | shinken/objects/brokerlink.py | 13 | 1701 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2009-2014:
# Gabes Jean, [email protected]
# Gerhard Lausser, [email protected]
# Gregory Starck, [email protected]
# Hartmut Goebel, [email protected]
#
# This file is part of Shinken.
#
# Shinken is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Shinken is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Shinken. If not, see <http://www.gnu.org/licenses/>.
from shinken.objects.satellitelink import SatelliteLink, SatelliteLinks
from shinken.property import IntegerProp, StringProp
class BrokerLink(SatelliteLink):
"""TODO: Add some comment about this class for the doc"""
id = 0
my_type = 'broker'
properties = SatelliteLink.properties.copy()
properties.update({
'broker_name': StringProp(fill_brok=['full_status'], to_send=True),
'port': IntegerProp(default=7772, fill_brok=['full_status']),
})
def get_name(self):
return self.broker_name
def register_to_my_realm(self):
self.realm.brokers.append(self)
class BrokerLinks(SatelliteLinks):
"""TODO: Add some comment about this class for the doc"""
name_property = "broker_name"
inner_class = BrokerLink
| agpl-3.0 | 1,958,381,642,869,193,000 | 33.714286 | 77 | 0.716049 | false |
commial/miasm | miasm/arch/mep/arch.py | 3 | 66072 | # Toshiba MeP-c4 - miasm architecture definition
# Guillaume Valadon <[email protected]>
from builtins import range
from miasm.core.cpu import *
from miasm.core.utils import Disasm_Exception
from miasm.expression.expression import ExprId, ExprInt, ExprLoc, \
ExprMem, ExprOp, is_expr
from miasm.core.asm_ast import AstId, AstMem
from miasm.arch.mep.regs import *
import miasm.arch.mep.regs as mep_regs_module # will be used to set mn_mep.regs
# Note: pyparsing is used to alter the way special operands are parsed
from pyparsing import Literal, Group, Word, hexnums
# These definitions will help parsing dereferencing instructions (i.e. that uses
# parenthesis) with pyparsing
LPARENTHESIS = Literal("(")
RPARENTHESIS = Literal(")")
PLUSSIGN = Literal("+")
HEX_INTEGER = str_int_pos | str_int_neg
def ExprInt2SignedString(expr, pos_fmt="%d", neg_fmt="%d", size=None, offset=0):
"""Return the signed string corresponding to an ExprInt
Note: this function is only useful to mimic objdump output"""
# Apply a mask to the integer
if size is None:
mask_length = expr.size
else:
mask_length = size
mask = (1 << mask_length) - 1
value = int(expr) & mask
# Return a signed integer if necessary
if (value >> mask_length - 1) == 1:
value = offset - ((value ^ mask) + 1)
if value < 0:
return "-" + neg_fmt % -value
else:
value += offset
return pos_fmt % value
class instruction_mep(instruction):
"""Generic MeP-c4 instruction
Notes:
- this object is used to build internal miasm instructions based
on mnemonics
- it must be implemented !
"""
# Default delay slot
# Note:
# - mandatory for the miasm Machine
delayslot = 0
@staticmethod
def arg2str(expr, pos=None, loc_db=None):
"""Convert mnemonics arguments into readable strings according to the
MeP-c4 architecture manual and their internal types
Notes:
- it must be implemented ! However, a simple 'return str(expr)'
could do the trick.
- it is used to mimic objdump output
Args:
expr: argument as a miasm expression
pos: position index in the arguments list
"""
if isinstance(expr, ExprId) or isinstance(expr, ExprInt):
return str(expr)
elif isinstance(expr, ExprLoc):
if loc_db is not None:
return loc_db.pretty_str(expr.loc_key)
else:
return str(expr)
elif isinstance(expr, ExprMem) and (isinstance(expr.ptr, ExprId) or isinstance(expr.ptr, ExprInt)):
return "(%s)" % expr.ptr
elif isinstance(expr, ExprMem) and isinstance(expr.ptr, ExprOp):
return "0x%X(%s)" % (int(expr.ptr.args[1]), expr.ptr.args[0])
# Raise an exception if the expression type was not processed
message = "instruction_mep.arg2str(): don't know what \
to do with a '%s' instance." % type(expr)
raise Disasm_Exception(message)
def __str__(self):
"""Return the mnemonic as a string.
Note:
- it is not mandatory as the instruction class already implement
it. It used to get rid of the padding between the opcode and the
arguments.
- most of this code is copied from miasm/core/cpu.py
"""
o = "%s" % self.name
if self.name == "SSARB":
# The first operand is displayed in decimal, not in hex
o += " %d" % int(self.args[0])
o += self.arg2str(self.args[1])
elif self.name in ["MOV", "ADD"] and isinstance(self.args[1], ExprInt):
# The second operand is displayed in decimal, not in hex
o += " " + self.arg2str(self.args[0])
o += ", %s" % ExprInt2SignedString(self.args[1])
elif "CPI" in self.name:
# The second operand ends with the '+' sign
o += " " + self.arg2str(self.args[0])
deref_reg_str = self.arg2str(self.args[1])
o += ", %s+)" % deref_reg_str[:-1] # GV: looks ugly
elif self.name[0] in ["S", "L"] and self.name[-3:] in ["CPA", "PM0", "PM1"]:
# The second operand ends with the '+' sign
o += " " + self.arg2str(self.args[0])
deref_reg_str = self.arg2str(self.args[1])
o += ", %s+)" % deref_reg_str[:-1] # GV: looks ugly
# The third operand is displayed in decimal, not in hex
o += ", %s" % ExprInt2SignedString(self.args[2])
elif len(self.args) == 2 and self.name in ["SB", "SH", "LBU", "LB", "LH", "LW"] and \
isinstance(self.args[1], ExprMem) and isinstance(self.args[1].ptr, ExprOp): # Major Opcodes #12
# The second operand is an offset to a register
o += " " + self.arg2str(self.args[0])
o += ", %s" % ExprInt2SignedString(self.args[1].ptr.args[1], "0x%X")
o += "(%s)" % self.arg2str(self.args[1].ptr.args[0])
elif len(self.args) == 2 and self.name in ["SWCP", "LWCP", "SMCP", "LMCP"] \
and isinstance(self.args[1], ExprMem) and isinstance(self.args[1].ptr, ExprOp): # Major Opcodes #12
# The second operand is an offset to a register
o += " " + self.arg2str(self.args[0])
o += ", %s" % ExprInt2SignedString(self.args[1].ptr.args[1])
o += "(%s)" % self.arg2str(self.args[1].ptr.args[0])
elif self.name == "SLL" and isinstance(self.args[1], ExprInt): # Major Opcodes #6
# The second operand is displayed in hex, not in decimal
o += " " + self.arg2str(self.args[0])
o += ", 0x%X" % int(self.args[1])
elif self.name in ["ADD3", "SLT3"] and isinstance(self.args[2], ExprInt):
o += " %s" % self.arg2str(self.args[0])
o += ", %s" % self.arg2str(self.args[1])
# The third operand is displayed in decimal, not in hex
o += ", %s" % ExprInt2SignedString(self.args[2], pos_fmt="0x%X")
elif self.name == "(RI)":
return o
else:
args = []
if self.args:
o += " "
for i, arg in enumerate(self.args):
if not is_expr(arg):
raise ValueError('zarb arg type')
x = self.arg2str(arg, pos=i)
args.append(x)
o += self.gen_args(args)
return o
def breakflow(self):
"""Instructions that stop a basic block."""
if self.name in ["BRA", "BEQZ", "BNEZ", "BEQI", "BNEI", "BLTI", "BGEI", "BEQ", "BNE", "BSR"]:
return True
if self.name in ["JMP", "JSR", "RET"]:
return True
if self.name in ["RETI", "HALT", "SLEEP"]:
return True
return False
def splitflow(self):
"""Instructions that splits a basic block, i.e. the CPU can go somewhere else."""
if self.name in ["BEQZ", "BNEZ", "BEQI", "BNEI", "BLTI", "BGEI", "BEQ", "BNE", "BSR"]:
return True
return False
def dstflow(self):
"""Instructions that explicitly provide the destination."""
if self.name in ["BRA", "BEQZ", "BNEZ", "BEQI", "BNEI", "BLTI", "BGEI", "BEQ", "BNE", "BSR"]:
return True
if self.name in ["JMP"]:
return True
return False
def dstflow2label(self, loc_db):
"""Set the label for the current destination.
Note: it is used at disassembly"""
if self.name == "JMP" and isinstance(self.args[0], ExprId):
# 'JMP RM' does not provide the destination
return
# Compute the correct address
num = self.get_dst_num()
addr = int(self.args[num])
if not self.name == "JMP":
addr += self.offset
# Get a new label at the address
label = loc_db.get_or_create_offset_location(addr)
# Assign the label to the correct instruction argument
self.args[num] = ExprLoc(label, self.args[num].size)
def get_dst_num(self):
"""Get the index of the argument that points to the instruction destination."""
if self.name[-1] == "Z":
num = 1
elif self.name in ["BEQI", "BNEI", "BLTI", "BGEI", "BEQ", "BNE"]:
num = 2
else:
num = 0
return num
def getdstflow(self, loc_db):
"""Get the argument that points to the instruction destination."""
num = self.get_dst_num()
return [self.args[num]]
def is_subcall(self):
"""Instructions used to call sub functions."""
return self.name in ["JSR", "BSR"]
def fixDstOffset(self):
"""Fix/correct the instruction immediate according to the current offset
Note: - it is used at assembly
- code inspired by miasm/arch/mips32/arch.py"""
if self.name == "JMP" and isinstance(self.args[0], ExprInt):
# 'JMP IMMEDIATE' does not need to be fixed
return
# Get the argument that needs to be fixed
if not len(self.args):
return
num = self.get_dst_num()
expr = self.args[num]
# Check that the argument can be fixed
if self.offset is None:
raise ValueError("Symbol not resolved %s" % self.l)
if not isinstance(expr, ExprInt):
return
# Adjust the immediate according to the current instruction offset
off = expr.arg - self.offset
if int(off % 2):
raise ValueError("Strange offset! %r" % off)
self.args[num] = ExprInt(off, 32)
class mep_additional_info(object):
"""Additional MeP instructions information
"""
def __init__(self):
self.except_on_instr = False
class mn_mep(cls_mn):
"""Toshiba MeP-c4 disassembler & assembler
"""
# Define variables that stores information used to disassemble & assemble
# Notes: - these variables are mandatory
# - they could be moved to the cls_mn class
num = 0 # holds the number of mnemonics
all_mn = list() # list of mnenomnics, converted to metamn objects
all_mn_mode = defaultdict(list) # mneomnics, converted to metamn objects
# Note:
# - the key is the mode # GV: what is it ?
# - the data is a list of mnemonics
all_mn_name = defaultdict(list) # mnenomnics strings
# Note:
# - the key is the mnemonic string
# - the data is the corresponding
# metamn object
all_mn_inst = defaultdict(list) # mnemonics objects
# Note:
# - the key is the mnemonic Python class
# - the data is an instantiated object
bintree = dict() # Variable storing internal values used to guess a
# mnemonic during disassembly
# Defines the instruction set that will be used
instruction = instruction_mep
# Python module that stores registers information
regs = mep_regs_module
# Default delay slot
# Note:
# - mandatory for the miasm Machine
delayslot = 0
# Architecture name
name = "mep"
# PC name depending on architecture attributes (here, l or b)
pc = {'l': PC, 'b': PC}
def additional_info(self):
"""Define instruction side effects # GV: not fully understood yet
When used, it must return an object that implements specific
variables, such as except_on_instr.
Notes:
- it must be implemented !
- it could be moved to the cls_mn class
"""
return mep_additional_info()
@classmethod
def gen_modes(cls, subcls, name, bases, dct, fields):
"""Ease populating internal variables used to disassemble & assemble, such
as self.all_mn_mode, self.all_mn_name and self.all_mn_inst
Notes:
- it must be implemented !
- it could be moved to the cls_mn class. All miasm architectures
use the same code
Args:
cls: ?
sublcs:
name: mnemonic name
bases: ?
dct: ?
fields: ?
Returns:
a list of ?
"""
dct["mode"] = None
return [(subcls, name, bases, dct, fields)]
@classmethod
def getmn(cls, name):
"""Get the mnemonic name
Notes:
- it must be implemented !
- it could be moved to the cls_mn class. Most miasm architectures
use the same code
Args:
cls: the mnemonic class
name: the mnemonic string
"""
return name.upper()
@classmethod
def getpc(cls, attrib=None):
""""Return the ExprId that represents the Program Counter.
Notes:
- mandatory for the symbolic execution
- PC is defined in regs.py
Args:
attrib: architecture dependent attributes (here, l or b)
"""
return PC
@classmethod
def getsp(cls, attrib=None):
""""Return the ExprId that represents the Stack Pointer.
Notes:
- mandatory for the symbolic execution
- SP is defined in regs.py
Args:
attrib: architecture dependent attributes (here, l or b)
"""
return SP
@classmethod
def getbits(cls, bitstream, attrib, start, n):
"""Return an integer of n bits at the 'start' offset
Note: code from miasm/arch/mips32/arch.py
"""
# Return zero if zero bits are requested
if not n:
return 0
o = 0 # the returned value
while n:
# Get a byte, the offset is adjusted according to the endianness
offset = start // 8 # the offset in bytes
n_offset = cls.endian_offset(attrib, offset) # the adjusted offset
c = cls.getbytes(bitstream, n_offset, 1)
if not c:
raise IOError
# Extract the bits value
c = ord(c)
r = 8 - start % 8
c &= (1 << r) - 1
l = min(r, n)
c >>= (r - l)
o <<= l
o |= c
n -= l
start += l
return o
@classmethod
def endian_offset(cls, attrib, offset):
"""Adjust the byte offset according to the endianness"""
if attrib == "l": # Little Endian
if offset % 2:
return offset - 1
else:
return offset + 1
elif attrib == "b": # Big Endian
return offset
else:
raise NotImplementedError("Bad MeP endianness")
def value(self, mode):
"""Adjust the assembled instruction based on the endianness
Note: code inspired by miasm/arch/mips32/arch.py
"""
# Get the candidated
candidates = super(mn_mep, self).value(mode)
if mode == "l":
# Invert bytes per 16-bits
for i in range(len(candidates)):
tmp = candidates[i][1] + candidates[i][0]
if len(candidates[i]) == 4:
tmp += candidates[i][3] + candidates[i][2]
candidates[i] = tmp
return candidates
elif mode == "b":
return candidates
else:
raise NotImplementedError("Bad MeP endianness (%s)" % mode)
def addop(name, fields, args=None, alias=False):
"""Dynamically create the "name" object
Notes:
- it could be moved to a generic function such as:
addop(name, fields, cls_mn, args=None, alias=False).
- most architectures use the same code
Args:
name: the mnemonic name
fields: used to fill the object.__dict__'fields' attribute # GV: not understood yet
args: used to fill the object.__dict__'fields' attribute # GV: not understood yet
alias: used to fill the object.__dict__'fields' attribute # GV: not understood yet
"""
namespace = {"fields": fields, "alias": alias}
if args is not None:
namespace["args"] = args
# Dynamically create the "name" object
type(name, (mn_mep,), namespace)
# Define specific operand parsers & converters
def deref2expr(s, l, parse_results):
"""Convert a parsed dereferenced register to an ExprMem"""
# Only use the first results
parse_results = parse_results[0]
if type(parse_results[0]) == AstInt and isinstance(parse_results[2], AstId):
return AstMem(parse_results[2] + parse_results[0], 32) # 1 == "(" and 3 == ")"
elif type(parse_results[0]) == int and isinstance(parse_results[2], AstId):
return AstMem(parse_results[2] + AstOp('-', AstInt(-parse_results[0])), 32) # 1 == "(" and 3 == ")"
else:
return AstMem(parse_results[1], 32) # 0 == "(" and 2 == ")"
deref_reg_parser = Group(LPARENTHESIS + gpr_infos.parser + RPARENTHESIS).setParseAction(deref2expr)
deref_inc_reg_parser = Group(LPARENTHESIS + gpr_infos.parser + PLUSSIGN + RPARENTHESIS).setParseAction(deref2expr)
abs24_deref_parser = Group(LPARENTHESIS + HEX_INTEGER + RPARENTHESIS).setParseAction(deref2expr)
offset_deref_reg_parser = Group(HEX_INTEGER + LPARENTHESIS + gpr_infos.parser + RPARENTHESIS).setParseAction(deref2expr)
# Define registers decoders and encoders
class mep_arg(m_arg):
def asm_ast_to_expr(self, arg, loc_db):
"""Convert AST to expressions
Note: - code inspired by miasm/arch/mips32/arch.py"""
if isinstance(arg, AstId):
if isinstance(arg.name, ExprId):
return arg.name
if isinstance(arg.name, str) and arg.name in gpr_names:
return None # GV: why?
loc_key = loc_db.get_or_create_name_location(arg.name.encode())
return ExprLoc(loc_key, 32)
elif isinstance(arg, AstMem):
addr = self.asm_ast_to_expr(arg.ptr, loc_db)
if addr is None:
return None
return ExprMem(addr, 32)
elif isinstance(arg, AstInt):
return ExprInt(arg.value, 32)
elif isinstance(arg, AstOp):
args = [self.asm_ast_to_expr(tmp, loc_db) for tmp in arg.args]
if None in args:
return None
return ExprOp(arg.op, *args)
# Raise an exception if the argument was not processed
message = "mep_arg.asm_ast_to_expr(): don't know what \
to do with a '%s' instance." % type(arg)
raise Exception(message)
class mep_reg(reg_noarg, mep_arg):
"""Generic Toshiba MeP-c4 register
Note:
- the register size will be set using bs()
"""
reg_info = gpr_infos # the list of MeP-c4 registers defined in regs.py
parser = reg_info.parser # GV: not understood yet
class mep_deref_reg(mep_arg):
"""Generic Toshiba MeP-c4 dereferenced register
Note:
- the arg2str() method could be defined to change the output string
"""
parser = deref_reg_parser
def decode(self, v):
"""Transform the decoded value to a ExprMem(ExprId()) expression"""
r = gpr_infos.expr[v] # get the ExprId, i.e. the register expression
self.expr = ExprMem(r, 32)
return True
def encode(self):
"""Ensure that we have a ExprMem(ExprId()) expression, and return the
register value."""
if not isinstance(self.expr, ExprMem):
return False
if not isinstance(self.expr.ptr, ExprId):
return False
# Get the ExprId index, i.e. its value
self.value = gpr_exprs.index(self.expr.ptr)
return True
class mep_reg_sp(mep_reg):
"""Dummy Toshiba MeP-c4 register that represents SP. It is used in
instructions that implicitly use SP, such as ADD3.
"""
implicit_reg = SP
def decode(self, v):
"""Always return 'implicit_reg."""
self.expr = self.implicit_reg
return True
def encode(self):
"""Do nothing"""
return True
class mep_reg_tp(mep_reg_sp):
"""Dummy Toshiba MeP-c4 register that represents TP.
"""
implicit_reg = TP
class mep_deref_reg_offset(mep_arg):
"""Toshiba MeP-c4 dereferenced register that represents SP, plus an
offset.
"""
parser = offset_deref_reg_parser
def decode(self, v):
"""Modify the decoded value using the previously decoded
register id.
"""
# Apply the immediate mask
se = sign_ext(v & 0xFFFF, 16, 32) # GV: might not belong here
int_id = ExprInt(se, 32)
# Get the register expression
reg_id = gpr_infos.expr[self.parent.reg04_deref.value]
# Build the internal expression
self.expr = ExprMem(reg_id + int_id, 32)
return True
def encode(self):
"""Modify the encoded value. One part is stored in this object, and
the other one in reg04_deref.
"""
# Verify the expression
if not isinstance(self.expr, ExprMem):
return False
if not isinstance(self.expr.ptr, ExprOp):
return False
# Get the integer and check the upper bound
v = int(self.expr.ptr.args[1]) & 0xFFFF
# Encode the values
self.parent.reg04_deref.value = gpr_exprs.index(self.expr.ptr.args[0])
self.value = v & 0xFFFF
return True
class mep_deref_sp_offset(mep_deref_reg):
"""Dummy Toshiba MeP-c4 dereferenced register that represents SP, plus an
offset.
Note: it is as generic as possible to ease its use in different instructions
"""
implicit_reg = SP
parser = offset_deref_reg_parser
def decode(self, v):
"""Modify the decoded value using the previously decoded
immediate.
"""
immediate = None
if getattr(self.parent, "imm7_align4", False):
# Apply the immediate mask
v = self.parent.imm7_align4.value & 0x1F
# Shift value such as:
# imm7=iii_ii||00
immediate = v << 2
elif getattr(self.parent, "imm7", False):
# Apply the immediate mask
immediate = self.parent.imm7.value & 0x7F
elif getattr(self.parent, "disp7_align2", False):
# Apply the immediate mask
disp7_align2 = self.parent.disp7_align2.value & 0x3F
# Shift value such as:
# disp7 = ddd_ddd||0
immediate = disp7_align2 << 1
if immediate is not None:
self.expr = ExprMem(self.implicit_reg + ExprInt(immediate, 32), 32)
return True
else:
return False
def encode(self):
"""Modify the encoded value. One part is stored in this object, and
the other one in a parent immediate.
"""
# Verify the expression
if not isinstance(self.expr, ExprMem):
return False
if not isinstance(self.expr.ptr, ExprOp):
return False
if self.expr.ptr.args[0] != self.implicit_reg:
return False
if getattr(self.parent, "imm7_align4", False):
# Get the integer and check the upper bound
v = int(self.expr.ptr.args[1].arg)
if v > 0x80:
return False
# Encode the value
self.parent.imm7_align4.value = v >> 2
return True
elif getattr(self.parent, "imm7", False):
# Get the integer and check the upper bound
v = int(self.expr.ptr.args[1].arg)
if v > 0x80:
return False
# Encode the value
self.parent.imm7.value = v
return True
elif getattr(self.parent, "disp7_align2", False):
# Get the integer and check the upper bound
v = int(self.expr.ptr.args[1].arg)
if v > 0x80:
return False
# Encode the value
self.parent.disp7_align2.value = v >> 1
return True
return False
class mep_deref_tp_offset(mep_deref_sp_offset):
"""Dummy Toshiba MeP-c4 dereferenced register that represents TP, plus an
offset.
"""
implicit_reg = TP
class mep_copro_reg(reg_noarg, mep_arg):
"""Generic Toshiba MeP-c4 coprocessor register
"""
reg_info = copro_gpr_infos # the list of MeP-c4 coprocessor registers defined in regs.py
parser = reg_info.parser # GV: not understood yet
class mep_copro_reg_split(mep_copro_reg):
"""Generic Toshiba MeP-c4 coprocessor register encode into different fields
"""
def decode(self, v):
"""Modify the decoded value using the previously decoded imm4_noarg.
"""
# Apply the immediate mask
v = v & self.lmask
# Shift values such as:
# CRn=NNnnnn
crn = (v << 4) + (self.parent.imm4.value & 0xF)
# Build the internal expression
self.expr = ExprId("C%d" % crn, 32)
return True
def encode(self):
"""Modify the encoded value. One part is stored in this object, and
the other one in imm4_noarg.
"""
if not isinstance(self.expr, ExprId):
return False
# Get the register and check the upper bound
reg_name = self.expr.name
if reg_name[0] != "C":
return False
reg_value = copro_gpr_names.index(reg_name)
if reg_value > 0x3f:
return False
# Encode the value into two parts
self.parent.imm4.value = (reg_value & 0xF)
self.value = (reg_value >> 4) & 0x3
return True
class mep_deref_inc_reg(mep_deref_reg):
"""Generic Toshiba MeP-c4 coprocess dereferenced & incremented register
"""
parser = deref_inc_reg_parser
# Immediate decoders and encoders
class mep_int32_noarg(int32_noarg):
"""Generic Toshiba MeP-c4 signed immediate
Note: encode() is copied from int32_noarg.encode() and modified to allow
small (< 32 bits) signed immediate to be manipulated.
"""
def encode(self):
if not isinstance(self.expr, ExprInt):
return False
v = int(self.expr)
# Note: the following lines were commented on purpose
#if sign_ext(v & self.lmask, self.l, self.intsize) != v:
# return False
v = self.encodeval(v & self.lmask)
self.value = v & self.lmask
return True
class mep_imm(imm_noarg, mep_arg):
"""Generic Toshiba MeP-c4 immediate
Note:
- the immediate size will be set using bs()
"""
parser = base_expr
class mep_imm6(mep_int32_noarg):
"""Toshiba MeP-c4 signed 6 bits immediate."""
parser = base_expr
intsize = 6
intmask = (1 << intsize) - 1
int2expr = lambda self, x: ExprInt(sign_ext(x, self.l, 32), 32)
class mep_imm8(mep_int32_noarg):
"""Toshiba MeP-c4 signed 8 bits immediate."""
parser = base_expr
intsize = 8
intmask = (1 << intsize) - 1
int2expr = lambda self, x: ExprInt(sign_ext(x, self.l, 32), 32)
class mep_imm16(mep_int32_noarg):
"""Toshiba MeP-c4 16 bits immediate."""
parser = base_expr
intsize = 16
intmask = (1 << intsize) - 1
int2expr = lambda self, x: ExprInt(x, 32)
class mep_imm16_signed(mep_int32_noarg):
"""Toshiba MeP-c4 signed 16 bits immediate."""
parser = base_expr
intsize = 16
intmask = (1 << intsize) - 1
int2expr = lambda self, x: ExprInt(sign_ext(x, self.l, 32), 32)
class mep_target24(mep_imm):
"""Toshiba MeP-c4 target24 immediate, as used in JMP
"""
def decode(self, v):
"""Modify the decoded value using the previously decoded imm7.
"""
# Apply the immediate mask
v = v & self.lmask
# Shift values such as:
# target24=tttt_tttt_tttt_tttt||TTT_TTTT||0
target24 = (v << 8) + ((self.parent.imm7.value & 0x7F) << 1)
# Build the internal expression
self.expr = ExprInt(target24, 32)
return True
def encode(self):
"""Modify the encoded value. One part is stored in this object, and
the other one in imm7.
"""
if not isinstance(self.expr, ExprInt):
return False
# Get the integer and apply a mask
v = int(self.expr) & 0x00FFFFFF
# Encode the value into two parts
self.parent.imm7.value = (v & 0xFF) >> 1
self.value = v >> 8
return True
class mep_target24_signed(mep_target24):
"""Toshiba MeP-c4 target24 signed immediate, as used in BSR
"""
def decode(self, v):
"""Perform sign extension
"""
mep_target24.decode(self, v)
v = int(self.expr)
self.expr = ExprInt(sign_ext(v, 24, 32), 32)
return True
class mep_code20(mep_imm):
"""Toshiba MeP-c4 code20 immediate, as used in DSP1
"""
def decode(self, v):
"""Modify the decoded value using the previously decoded imm4_noarg.
"""
# Apply the immediate mask
v = v & self.lmask
# Shift values such as:
# code20=mmmm_cccc_cccc_cccc_cccc
code20 = v + ((self.parent.imm4.value & 0xFF) << 16)
# Build the internal expression
self.expr = ExprInt(code20, 32)
return True
def encode(self):
"""Modify the encoded value. One part is stored in this object, and
the other one in imm4_noarg.
"""
if not isinstance(self.expr, ExprInt):
return False
# Get the integer and check the upper bound
v = int(self.expr.arg)
if v > 0xffffff:
return False
# Encode the value into two parts
self.parent.imm4 = ((v >> 16) & 0xFF)
self.value = v
return True
class mep_code24(mep_imm):
"""Toshiba MeP-c4 code24 immediate, as used in CP
"""
def decode(self, v):
"""Modify the decoded value using the previously decoded imm8_CCCC_CCCC.
"""
# Shift values such as:
# code24=CCCC_CCCC||cccc_cccc_cccc_cccc
code24 = v + ((self.parent.imm8_CCCC_CCCC.value & 0xFF) << 16)
# Build the internal expression
self.expr = ExprInt(code24, 32)
return True
def encode(self):
"""Modify the encoded value. One part is stored in this object, and
the other one in imm8_CCCC_CCCC.
"""
if not isinstance(self.expr, ExprInt):
return False
# Get the integer and check the upper bound
v = int(self.expr.arg)
if v > 0xFFFFFF:
return False
# Encode the value into two parts
self.parent.imm8_CCCC_CCCC.value = ((v >> 16) & 0xFF)
self.value = v & 0xFFFF
return True
class mep_imm7_align4(mep_imm):
"""Toshiba MeP-c4 imm7.align4 immediate, as used in Major #4 opcodes
"""
def decode(self, v):
"""Modify the decoded value.
"""
# Apply the immediate mask
v = v & self.lmask
# Shift value such as:
# imm7=iii_ii||00
imm7_align4 = v << 2
# Build the internal expression
self.expr = ExprInt(imm7_align4, 32)
return True
def encode(self):
"""Modify the encoded value.
"""
if not isinstance(self.expr, ExprInt):
return False
# Get the integer and check the upper bound
v = int(self.expr)
if v > 0x80:
return False
# Encode the value
self.value = v >> 2
return True
class mep_imm5_Iiiii (mep_imm):
"""Toshiba MeP-c4 imm5 immediate, as used in STC & LDC. It encodes a
control/special register.
"""
reg_info = csr_infos # the list of MeP-c4 control/special registers defined in regs.py
parser = reg_info.parser # GV: not understood yet
def decode(self, v):
"""Modify the decoded value using the previously decoded imm4_iiii
"""
# Apply the immediate mask
I = v & self.lmask
# Shift values such as:
# imm5=I||iiii
imm5 = (I << 4) + (self.parent.imm4_iiii.value & 0xF)
# Build the internal register expression
self.expr = ExprId(csr_names[imm5], 32)
return True
def encode(self):
"""Modify the encoded value. One part is stored in this object, and
the other one in imm4_iiii.
"""
if not isinstance(self.expr, ExprId):
return False
# Get the register number and check the upper bound
v = csr_names.index(self.expr.name)
if v > 0x1F:
return False
# Encode the value into two parts
self.parent.imm4_iiii.value = v & 0xF # iiii
self.value = (v >> 4) & 0b1 # I
return True
class mep_disp7_align2(mep_imm):
"""Toshiba MeP-c4 disp7.align2 immediate, as used in Major #8 opcodes
"""
upper_bound = 0x7F
bits_shift = 1
def decode(self, v):
"""Modify the decoded value.
"""
# Apply the immediate mask
v = v & self.lmask
# Shift value such as:
# disp7 = ddd_ddd||0
disp7_align2 = (v << self.bits_shift)
# Sign extension
disp7_align2 = sign_ext(disp7_align2, self.l + self.bits_shift, 32)
# Build the internal expression
self.expr = ExprInt(disp7_align2, 32)
return True
def encode(self):
"""Modify the encoded value.
"""
if not isinstance(self.expr, ExprInt):
return False
# Get the integer
v = int(self.expr) & self.upper_bound
# Encode the value
self.value = (v >> self.bits_shift) & self.upper_bound
self.value = (v & self.upper_bound) >> self.bits_shift
return True
class mep_disp8_align2(mep_disp7_align2):
upper_bound = 0xFF
class mep_disp8_align4(mep_disp7_align2):
upper_bound = 0xFF
bits_shift = 2
class mep_imm8_align8(mep_disp7_align2):
upper_bound = 0xFF
bits_shift = 3
class mep_disp12_align2(mep_disp7_align2):
upper_bound = 0xFFF
class mep_disp12_align2_signed(mep_disp12_align2):
def decode(self, v):
"""Perform sign extension.
"""
mep_disp12_align2.decode(self, v)
v = int(self.expr)
self.expr = ExprInt(sign_ext(v, 12, 32), 32)
return True
class mep_disp17(mep_disp7_align2):
upper_bound = 0x1FFFF
class mep_imm24(mep_imm):
"""Toshiba MeP-c4 imm24 immediate, as used in MOVU
"""
def decode(self, v):
"""Modify the decoded value.
"""
# Apply the immediate mask
v = v & self.lmask
# Shift values such as:
# imm24=iiii_iiii_iiii_iiii||IIII_IIIII
imm24 = ((v & 0xFFFF) << 8) + ((v & 0xFF0000) >> 16)
# Build the internal expression
self.expr = ExprInt(imm24, 32)
return True
def encode(self):
"""Modify the encoded value.
"""
if not isinstance(self.expr, ExprInt):
return False
# Get the integer and check the upper bound
v = int(self.expr)
if v > 0xFFFFFF:
return False
# Encode the value
self.value = ((v & 0xFFFF00) >> 8) + ((v & 0xFF) << 16)
return True
class mep_abs24(mep_imm):
"""Toshiba MeP-c4 abs24 immediate
"""
parser = abs24_deref_parser
def decode(self, v):
"""Modify the decoded value using the previously decoded imm6.
"""
# Apply the immediate mask
v = v & self.lmask
# Shift values such as:
# abs24=dddd_dddd_dddd_dddd||DDDD_DD||00
abs24 = (v << 8) + ((self.parent.imm6.value & 0x3F) << 2)
# Build the internal expression
self.expr = ExprMem(ExprInt(abs24, 32), 32)
return True
def encode(self):
"""Modify the encoded value. One part is stored in this object, and
the other one in imm6.
"""
if not (isinstance(self.expr, ExprMem) and isinstance(self.expr.ptr, ExprInt)):
return False
# Get the integer and check the upper bound
v = int(self.expr.ptr)
if v > 0xffffff:
return False
# Encode the value into two parts
self.parent.imm6.value = (v & 0xFF) >> 2
self.value = v >> 8
return True
# Define MeP-c4 assembly operands
reg04 = bs(l=4, # length in bits
cls=(mep_reg, )) # class implementing decoding & encoding
reg04_l = bs(l=4, cls=(mep_reg, ))
reg04_m = bs(l=4, cls=(mep_reg, ))
reg04_n = bs(l=4, cls=(mep_reg, ))
reg00 = bs(l=0, cls=(mep_reg, ))
reg00_sp = bs(l=0, cls=(mep_reg_sp, ))
reg00_tp = bs(l=0, cls=(mep_reg_tp, ))
reg00_deref_sp = bs(l=0, cls=(mep_deref_sp_offset, ))
reg00_deref_tp = bs(l=0, cls=(mep_deref_tp_offset, ))
reg03 = bs(l=3, cls=(mep_reg, ))
reg04_deref = bs(l=4, cls=(mep_deref_reg,))
reg04_deref_noarg = bs(l=4, fname="reg04_deref")
reg04_inc_deref = bs(l=4, cls=(mep_deref_inc_reg,))
copro_reg04 = bs(l=4, cls=(mep_copro_reg,))
copro_reg05 = bs(l=1, cls=(mep_copro_reg_split,))
copro_reg06 = bs(l=2, cls=(mep_copro_reg_split,))
disp2 = bs(l=2, cls=(mep_imm, ))
imm2 = disp2
imm3 = bs(l=3, cls=(mep_imm, ))
imm4 = bs(l=4, cls=(mep_imm, ))
imm4_noarg = bs(l=4, fname="imm4")
imm4_iiii_noarg = bs(l=4, fname="imm4_iiii")
imm5 = bs(l=5, cls=(mep_imm, ))
imm5_Iiiii = bs(l=1, cls=(mep_imm5_Iiiii, )) # it is not an immediate, but a
# control/special register.
imm6 = bs(l=6, cls=(mep_imm6, mep_arg))
imm6_noarg = bs(l=6, fname="imm6")
imm7 = bs(l=7, cls=(mep_imm, ))
imm7_noarg = bs(l=7, fname="imm7") # Note:
# - will be decoded as a 7 bits immediate
# - fname is used to set the operand name
# used in mep_target24 to merge operands
# values. By default, the bs class fills
# fname with an hex string compute from
# arguments passed to __init__
imm7_align4 = bs(l=5, cls=(mep_imm7_align4,))
imm7_align4_noarg = bs(l=5, fname="imm7_align4")
disp7_align2 = bs(l=6, cls=(mep_disp7_align2,))
disp7_align2_noarg = bs(l=6, fname="disp7_align2")
imm8 = bs(l=8, cls=(mep_imm8, mep_arg))
imm8_noarg = bs(l=8, fname="imm8_CCCC_CCCC")
disp8 = bs(l=7, cls=(mep_disp8_align2, ))
imm8_align2 = bs(l=7, cls=(mep_disp8_align2, ))
imm8_align4 = bs(l=6, cls=(mep_disp8_align4, ))
imm8_align8 = bs(l=5, cls=(mep_imm8_align8, ))
imm12 = bs(l=12, cls=(mep_imm, ))
disp12_signed = bs(l=11, cls=(mep_disp12_align2_signed, ))
imm16 = bs(l=16, cls=(mep_imm16, mep_arg))
imm16_signed = bs(l=16, cls=(mep_imm16_signed, mep_arg))
disp16_reg_deref = bs(l=16, cls=(mep_deref_reg_offset,))
disp17 = bs(l=16, cls=(mep_disp17, ))
imm18 = bs(l=19, cls=(mep_imm, ))
imm_code20 = bs(l=16, cls=(mep_code20, ))
imm24 = bs(l=24, cls=(mep_imm24, ))
imm_target24 = bs(l=16, cls=(mep_target24, ))
imm_target24_signed = bs(l=16, cls=(mep_target24_signed, ))
imm_code24 = bs(l=16, cls=(mep_code24, ))
abs24 = bs(l=16, cls=(mep_abs24, ))
# MeP-c4 mnemonics objects
### <Major Opcode #0>
# MOV Rn,Rm - 0000_nnnn_mmmm_0000
addop("MOV", [bs("0000"), reg04, reg04, bs("0000")])
# NEG Rn,Rm - 0000_nnnn_mmmm_0001
addop("NEG", [bs("0000"), reg04, reg04, bs("0001")])
# SLT3 R0,Rn,Rm - 0000_nnnn_mmmm_0010
addop("SLT3", [bs("0000"), reg00, reg04, reg04, bs("0010")])
# SLTU3 R0,Rn,Rm - 0000_nnnn_mmmm_0011
addop("SLTU3", [bs("0000"), reg00, reg04, reg04, bs("0011")])
# SUB Rn,Rm - 0000_nnnn_mmmm_0100
addop("SUB", [bs("0000"), reg04, reg04, bs("0100")])
# SBVCK3 R0,Rn,Rm - 0000_nnnn_mmmm_0101
addop("SBVCK3", [bs("0000"), reg00, reg04, reg04, bs("0101")])
# (RI) - 0000_xxxx_xxxx_0110
addop("(RI)", [bs("0000"), reg04, reg04, bs("0110")])
# ADVCK3 R0,Rn,Rm - 0000_nnnn_mmmm_0111
addop("ADVCK3", [bs("0000"), reg00, reg04, reg04, bs("0111")])
# SB Rn,(Rm) - 0000_nnnn_mmmm_1000
addop("SB", [bs("0000"), reg04, reg04_deref, bs("1000")])
# SH Rn,(Rm) - 0000_nnnn_mmmm_1001
addop("SH", [bs("0000"), reg04, reg04_deref, bs("1001")])
# SW Rn,(Rm) - 0000_nnnn_mmmm_1010
addop("SW", [bs("0000"), reg04, reg04_deref, bs("1010")])
# LBU Rn,(Rm) - 0000_nnnn_mmmm_1011
addop("LBU", [bs("0000"), reg04, reg04_deref, bs("1011")])
# LB Rn,(Rm) - 0000_nnnn_mmmm_1100
addop("LB", [bs("0000"), reg04, reg04_deref, bs("1100")])
# LH Rn,(Rm) - 0000_nnnn_mmmm_1101
addop("LH", [bs("0000"), reg04, reg04_deref, bs("1101")])
# LW Rn,(Rm) - 0000_nnnn_mmmm_1110
addop("LW", [bs("0000"), reg04, reg04_deref, bs("1110")])
# LHU Rn,(Rm) - 0000_nnnn_mmmm_1111
addop("LHU", [bs("0000"), reg04, reg04_deref, bs("1111")])
### <Major Opcode #1>
# OR Rn,Rm - 0001_nnnn_mmmm_0000
addop("OR", [bs("0001"), reg04, reg04, bs("0000")])
# AND Rn,Rm - 0001_nnnn_mmmm_0001
addop("AND", [bs("0001"), reg04, reg04, bs("0001")])
# XOR Rn,Rm - 0001_nnnn_mmmm_0010
addop("XOR", [bs("0001"), reg04, reg04, bs("0010")])
# NOR Rn,Rm - 0001_nnnn_mmmm_0011
addop("NOR", [bs("0001"), reg04, reg04, bs("0011")])
# MUL Rn,Rm - 0001_nnnn_mmmm_0100
addop("MUL", [bs("0001"), reg04, reg04, bs("0100")])
# MULU Rn,Rm - 0001_nnnn_mmmm_0101
addop("MULU", [bs("0001"), reg04, reg04, bs("0101")])
# MULR Rn,Rm - 0001_nnnn_mmmm_0110
addop("MULR", [bs("0001"), reg04, reg04, bs("0110")])
# MULRU Rn,Rm - 0001_nnnn_mmmm_0111
addop("MULRU", [bs("0001"), reg04, reg04, bs("0111")])
# DIV Rn,Rm - 0001_nnnn_mmmm_1000
addop("DIV", [bs("0001"), reg04, reg04, bs("1000")])
# DIVU Rn,Rm - 0001_nnnn_mmmm_1001
addop("DIVU", [bs("0001"), reg04, reg04, bs("1001")])
# (RI) - 0001_xxxx_xxxx_1010
addop("(RI)", [bs("0001"), reg04, reg04, bs("1010")])
# (RI) - 0001_xxxx_xxxx_1011
addop("(RI)", [bs("0001"), reg04, reg04, bs("1011")])
# SSARB disp2(Rm) - 0001_00dd_mmmm_1100
addop("SSARB", [bs("000100"), disp2, reg04_deref, bs("1100")])
# EXTB Rn - 0001_nnnn_0000_1101
addop("EXTB", [bs("0001"), reg04, bs("00001101")])
# EXTH Rn - 0001_nnnn_0010_1101
addop("EXTH", [bs("0001"), reg04, bs("00101101")])
# EXTUB Rn - 0001_nnnn_1000_1101
addop("EXTUB", [bs("0001"), reg04, bs("10001101")])
# EXTUH Rn - 0001_nnnn_1010_1101
addop("EXTUH", [bs("0001"), reg04, bs("10101101")])
# JMP Rm - 0001_0000_mmmm_1110
addop("JMP", [bs("00010000"), reg04, bs("1110")])
# JSR Rm - 0001_0000_mmmm_1111
addop("JSR", [bs("00010000"), reg04, bs("1111")])
# JSRV Rm - 0001_1000_mmmm_1111
addop("JSRV", [bs("00011000"), reg04, bs("1111")])
### <Major Opcode #2>
# BSETM (Rm),imm3 - 0010_0iii_mmmm_0000
addop("BSETM", [bs("00100"), imm3, reg04_deref, bs("0000")], [reg04_deref, imm3])
# BCLRM (Rn),imm3 - 0010_0iii_mmmm_0001
addop("BCLRM", [bs("00100"), imm3, reg04_deref, bs("0001")], [reg04_deref, imm3])
# BNOTM (Rm),imm3 - 0010_0iii_mmmm_0010
addop("BNOTM", [bs("00100"), imm3, reg04_deref, bs("0010")], [reg04_deref, imm3])
# BTSTM R0,(Rm),imm3 - 0010_0iii_mmmm_0011
addop("BTSTM", [bs("00100"), reg00, imm3, reg04_deref, bs("0011")], [reg00, reg04_deref, imm3])
# TAS Rn,(Rm) - 0010_nnnn_mmmm_0100
addop("TAS", [bs("0010"), reg04, reg04_deref, bs("0100")])
# (RI) - 0010_xxxx_xxxx_0101
addop("(RI)", [bs("0010"), reg04, reg04, bs("0101")])
# SL1AD3 R0,Rn,Rm - 0010_nnnn_mmmm_0110
addop("SL1AD3", [bs("0010"), reg00, reg04, reg04, bs("0110")])
# SL2AD3 R0,Rn,Rm - 0010_nnnn_mmmm_0111
addop("SL2AD3", [bs("0010"), reg00, reg04, reg04, bs("0111")])
# (RI) - 0010_xxxx_xxxx_1000
addop("(RI)", [bs("0010"), reg04, reg04, bs("1000")])
# (RI) - 0010_xxxx_xxxx_1001
addop("(RI)", [bs("0010"), reg04, reg04, bs("1001")])
# (RI) - 0010_xxxx_xxxx_1010
addop("(RI)", [bs("0010"), reg04, reg04, bs("1010")])
# (RI) - 0010_xxxx_xxxx_1011
addop("(RI)", [bs("0010"), reg04, reg04, bs("1011")])
# SRL Rn,Rm - 0010_nnnn_mmmm_1100
addop("SRL", [bs("0010"), reg04, reg04, bs("1100")])
# SRA Rn,Rm - 0010_nnnn_mmmm_1101
addop("SRA", [bs("0010"), reg04, reg04, bs("1101")])
# SLL Rn,Rm - 0010_nnnn_mmmm_1110
addop("SLL", [bs("0010"), reg04, reg04, bs("1110")])
# FSFT Rn,Rm - 0010_nnnn_mmmm_1111
addop("FSFT", [bs("0010"), reg04, reg04, bs("1111")])
### <Major Opcode #3>
# SWCPI CRn,(Rm+) - 0011_nnnn_mmmm_0000
addop("SWCPI", [bs("0011"), copro_reg04, reg04_inc_deref, bs("0000")])
# LWCPI CRn,(Rm+) - 0011_nnnn_mmmm_0001
addop("LWCPI", [bs("0011"), copro_reg04, reg04_inc_deref, bs("0001")])
# SMCPI CRn,(Rm+) - 0011_nnnn_mmmm_0010
addop("SMCPI", [bs("0011"), copro_reg04, reg04_inc_deref, bs("0010")])
# LMCPI CRn,(Rm+) - 0011_nnnn_mmmm_0011
addop("LMCPI", [bs("0011"), copro_reg04, reg04_inc_deref, bs("0011")])
# SWCP CRn,(Rm) - 0011_nnnn_mmmm_1000
addop("SWCP", [bs("0011"), copro_reg04, reg04_deref, bs("1000")])
# LWCP CRn,(Rm) - 0011_nnnn_mmmm_1001
addop("LWCP", [bs("0011"), copro_reg04, reg04_deref, bs("1001")])
# SMCP CRn,(Rm) - 0011_nnnn_mmmm_1010
addop("SMCP", [bs("0011"), copro_reg04, reg04_deref, bs("1010")])
# LMCP CRn,(Rm) - 0011_nnnn_mmmm_1011
addop("LMCP", [bs("0011"), copro_reg04, reg04_deref, bs("1011")])
### <Major Opcode #4>
# ADD3 Rn,SP,imm7.align4 - 0100_nnnn_0iii_ii00
addop("ADD3", [bs("0100"), reg04, reg00_sp, bs("0"), imm7_align4, bs("00")])
# SW Rn,disp7.align4(SP) - 0100_nnnn_0ddd_dd10
# Note: disp7.align4 is the same as imm7.align4
addop("SW", [bs("0100"), reg04, bs("0"), imm7_align4_noarg, reg00_deref_sp, bs("10")])
# LW Rn,disp7.align4(SP) - 0100_nnnn_0ddd_dd11
addop("LW", [bs("0100"), reg04, bs("0"), imm7_align4_noarg, reg00_deref_sp, bs("11")])
# SW Rn[0-7],disp7.align4(TP) - 0100_0nnn_1ddd_dd10
addop("SW", [bs("01000"), reg03, bs("1"), imm7_align4_noarg, reg00_deref_tp, bs("10")])
# LW Rn[0-7],disp7.align4(TP) - 0100_0nnn_1ddd_dd11
addop("LW", [bs("01000"), reg03, bs("1"), imm7_align4_noarg, reg00_deref_tp, bs("11")])
# LBU Rn[0-7],disp7(TP) - 0100_1nnn_1ddd_dddd
addop("LBU", [bs("01001"), reg03, bs("1"), imm7_noarg, reg00_deref_tp], [reg03, reg00_deref_tp])
### <Major Opcode #5>
# MOV Rn,imm8 - 0101_nnnn_iiii_iiii
addop("MOV", [bs("0101"), reg04, imm8])
### <Major Opcode #6>
# ADD Rn,imm6 - 0110_nnnn_iiii_ii00
addop("ADD", # mnemonic name
[bs("0110"), reg04, imm6, bs("00")]) # mnemonic description
# SLT3 R0,Rn,imm5 - 0110_nnnn_iiii_i001
addop("SLT3", [bs("0110"), reg00, reg04, imm5, bs("001")])
# SRL Rn,imm5 - 0110_nnnn_iiii_i010
addop("SRL", [bs("0110"), reg04, imm5, bs("010")])
# SRA Rn,imm5 - 0110_nnnn_iiii_i011
addop("SRA", [bs("0110"), reg04, imm5, bs("011")])
# SLTU3 R0,Rn,imm5 - 0110_nnnn_iiii_i101
addop("SLTU3", [bs("0110"), reg00, reg04, imm5, bs("101")])
# SLL Rn,imm5 - 0110_nnnn_iiii_i110
addop("SLL", [bs("0110"), reg04, imm5, bs("110")])
# SLL3 R0,Rn,imm5 - 0110_nnnn_iiii_i111
addop("SLL3", [bs("0110"), reg00, reg04, imm5, bs("111")])
### <Major Opcode #7>
# DI - 0111_0000_0000_0000
addop("DI", [bs("0111000000000000")])
# EI - 0111_0000_0001_0000
addop("EI", [bs("0111000000010000")])
# SYNCM - 0111_0000_0001_0001
addop("SYNCM", [bs("0111000000010001")])
# SYNCCP - 0111_0000_0010_0001
addop("SYNCCP", [bs("0111000000100001")])
# RET - 0111_0000_0000_0010
addop("RET", [bs("0111000000000010")])
# RETI - 0111_0000_0001_0010
addop("RETI", [bs("0111000000010010")])
# HALT - 0111_0000_0010_0010
addop("HALT", [bs("0111000000100010")])
# BREAK - 0111_0000_0011_0010
addop("BREAK", [bs("0111000000110010")])
# SLEEP - 0111_0000_0110_0010
addop("SLEEP", [bs("0111000001100010")])
# DRET - 0111_0000_0001_0011
addop("DRET", [bs("0111000000010011")])
# DBREAK - 0111_0000_0011_0011
addop("DBREAK", [bs("0111000000110011")])
# CACHE imm4,(Rm) - 0111_iiii_mmmm_0100
addop("CACHE", [bs("0111"), imm4, reg04_deref, bs("0100")])
# (RI) - 0111_xxxx_xxxx_0101
addop("(RI)", [bs("0111"), reg04, reg04, bs("0101")])
# SWI imm2 - 0111_0000_00ii_0110
addop("SWI", [bs("0111000000"), imm2, bs("0110")])
# (RI) - 0111_xxxx_xxxx_0111
addop("(RI)", [bs("0111"), reg04, reg04, bs("0111")])
# STC Rn,imm5 - 0111_nnnn_iiii_100I
addop("STC", [bs("0111"), reg04, imm4_iiii_noarg, bs("100"), imm5_Iiiii])
# LDC Rn,imm5 - 0111_nnnn_iiii_101I
addop("LDC", [bs("0111"), reg04, imm4_iiii_noarg, bs("101"), imm5_Iiiii])
# (RI) - 0111_xxxx_xxxx_1100
addop("(RI)", [bs("0111"), reg04, reg04, bs("1100")])
# (RI) - 0111_xxxx_xxxx_1101
addop("(RI)", [bs("0111"), reg04, reg04, bs("1101")])
# (RI) - 0111_xxxx_xxxx_1110
addop("(RI)", [bs("0111"), reg04, reg04, bs("1110")])
# (RI) - 0111_xxxx_xxxx_1111
addop("(RI)", [bs("0111"), reg04, reg04, bs("1111")])
### <Major Opcode #8>
# SB Rn[0-7],disp7(TP) - 1000_0nnn_0ddd_dddd
addop("SB", [bs("10000"), reg03, bs("0"), imm7_noarg, reg00_deref_tp])
# SH Rn[0-7],disp7.align2(TP) - 1000_0nnn_1ddd_ddd0
# (disp7.align2 = ddd_ddd||0)
addop("SH", [bs("10000"), reg03, bs("1"), disp7_align2_noarg, bs("0"), reg00_deref_tp])
# LB Rn[0-7],disp7(TP) - 1000_1nnn_0ddd_dddd
addop("LB", [bs("10001"), reg03, bs("0"), imm7_noarg, reg00_deref_tp])
# LH Rn[0-7],disp7.align2(TP) - 1000_1nnn_1ddd_ddd0
addop("LH", [bs("10001"), reg03, bs("1"), disp7_align2_noarg, bs("0"), reg00_deref_tp])
# LHU Rn[0-7],disp7.align2(TP) - 1000_1nnn_1ddd_ddd1
addop("LHU", [bs("10001"), reg03, bs("1"), disp7_align2_noarg, bs("1"), reg00_deref_tp])
### <Major Opcode #9>
# ADD3 Rl,Rn,Rm - 1001_nnnn_mmmm_llll
addop("ADD3", [bs("1001"), reg04_n, reg04_m, reg04_l], [reg04_l, reg04_n, reg04_m])
### <Major Opcode #10>
# BEQZ Rn,disp8.align2 - 1010_nnnn_dddd_ddd0
# (disp8=dddd_ddd||0)
addop("BEQZ", [bs("1010"), reg04, disp8, bs("0")])
# BNEZ Rn,disp8.align2 - 1010_nnnn_dddd_ddd1
addop("BNEZ", [bs("1010"), reg04, disp8, bs("1")])
### <Major Opcode #11>
# BRA disp12.align2 - 1011_dddd_dddd_ddd0
# (disp12=dddd_dddd_ddd||0)
addop("BRA", [bs("1011"), disp12_signed, bs("0")])
# BSR disp12.align2 - 1011_dddd_dddd_ddd1
addop("BSR", [bs("1011"), disp12_signed, bs("1")])
### <Major Opcode #12>
# ADD3 Rn,Rm,imm16 - 1100_nnnn_mmmm_0000 iiii_iiii_iiii_iiii
addop("ADD3", [bs("1100"), reg04, reg04, bs("0000"), imm16_signed])
# MOV Rn,imm16 - 1100_nnnn_0000_0001 iiii_iiii_iiii_iiii
addop("MOV", [bs("1100"), reg04, bs("00000001"), imm16])
# MOVU Rn,imm16 - 1100_nnnn_0001_0001 iiii_iiii_iiii_iiii
addop("MOVU", [bs("1100"), reg04, bs("00010001"), imm16])
# MOVH Rn,imm16 - 1100_nnnn_0010_0001 iiii_iiii_iiii_iiii
addop("MOVH", [bs("1100"), reg04, bs("00100001"), imm16])
# SLT3 Rn,Rm,imm16 - 1100_nnnn_mmmm_0010 iiii_iiii_iiii_iiii
addop("SLT3", [bs("1100"), reg04, reg04, bs("0010"), imm16_signed])
# SLTU3 Rn,Rm,imm16 - 1100_nnnn_mmmm_0011 iiii_iiii_iiii_iiii
addop("SLTU3", [bs("1100"), reg04, reg04, bs("0011"), imm16])
# OR3 Rn,Rm,imm16 - 1100_nnnn_mmmm_0100 iiii_iiii_iiii_iiii
addop("OR3", [bs("1100"), reg04, reg04, bs("0100"), imm16])
# AND3 Rn,Rm,imm16 - 1100_nnnn_mmmm_0101 iiii_iiii_iiii_iiii
addop("AND3", [bs("1100"), reg04, reg04, bs("0101"), imm16])
# XOR3 Rn,Rm,imm16 - 1100_nnnn_mmmm_0110 iiii_iiii_iiii_iiii
addop("XOR3", [bs("1100"), reg04, reg04, bs("0110"), imm16])
# (RI) - 1100_xxxx_xxxx_0111 xxxx_xxxx_xxxx_xxxx
addop("(RI)", [bs("1100"), imm8, bs("0111"), imm16])
# SB Rn,disp16(Rm) - 1100_nnnn_mmmm_1000 dddd_dddd_dddd_dddd
addop("SB", [bs("1100"), reg04, reg04_deref_noarg, bs("1000"), disp16_reg_deref], [reg04, disp16_reg_deref])
# SH Rn,disp16(Rm) - 1100_nnnn_mmmm_1001 dddd_dddd_dddd_dddd
addop("SH", [bs("1100"), reg04, reg04_deref_noarg, bs("1001"), disp16_reg_deref], [reg04, disp16_reg_deref])
# SW Rn,disp16(Rm) - 1100_nnnn_mmmm_1010 dddd_dddd_dddd_dddd
addop("SW", [bs("1100"), reg04, reg04_deref_noarg, bs("1010"), disp16_reg_deref], [reg04, disp16_reg_deref])
# LBU Rn,disp16(Rm) - 1100_nnnn_mmmm_1011 dddd_dddd_dddd_dddd
addop("LBU", [bs("1100"), reg04, reg04_deref_noarg, bs("1011"), disp16_reg_deref], [reg04, disp16_reg_deref])
# LB Rn,disp16(Rm) - 1100_nnnn_mmmm_1100 dddd_dddd_dddd_dddd
addop("LB", [bs("1100"), reg04, reg04_deref_noarg, bs("1100"), disp16_reg_deref], [reg04, disp16_reg_deref])
# LH Rn,disp16(Rm) - 1100_nnnn_mmmm_1101 dddd_dddd_dddd_dddd
addop("LH", [bs("1100"), reg04, reg04_deref_noarg, bs("1101"), disp16_reg_deref], [reg04, disp16_reg_deref])
# LW Rn,disp16(Rm) - 1100_nnnn_mmmm_1110 dddd_dddd_dddd_dddd
addop("LW", [bs("1100"), reg04, reg04_deref_noarg, bs("1110"), disp16_reg_deref], [reg04, disp16_reg_deref])
# LHU Rn,disp16(Rm) - 1100_nnnn_mmmm_1111 dddd_dddd_dddd_dddd
addop("LHU", [bs("1100"), reg04, reg04_deref_noarg, bs("1111"), disp16_reg_deref], [reg04, disp16_reg_deref])
### <Major Opcode #13>
# MOVU Rn[0-7],imm24 - 1101_0nnn_IIII_IIII iiii_iiii_iiii_iiii
addop("MOVU", [bs("11010"), reg03, imm24])
# BCPEQ cccc,disp17 - 1101_1000_cccc_0100 dddd_dddd_dddd_dddd
addop("BCPEQ", [bs("11011000"), imm4, bs("0100"), disp17])
# BCPNE cccc,disp17 - 1101_1000_cccc_0101 dddd_dddd_dddd_dddd
addop("BCPNE", [bs("11011000"), imm4, bs("0101"), disp17])
# BCPAT cccc,disp17 - 1101_1000_cccc_0110 dddd_dddd_dddd_dddd
addop("BCPAT", [bs("11011000"), imm4, bs("0110"), disp17])
# BCPAF cccc,disp17 - 1101_1000_cccc_0111 dddd_dddd_dddd_dddd
addop("BCPAF", [bs("11011000"), imm4, bs("0111"), disp17])
# JMP target24 - 1101_1TTT_TTTT_1000 tttt_tttt_tttt_tttt
addop("JMP", [bs("11011"), imm7_noarg, bs("1000"), imm_target24],
[imm_target24]) # the only interesting operand is imm_target24
# BSR disp24 - 1101_1DDD_DDDD_1001 dddd_dddd_dddd_dddd
addop("BSR", [bs("11011"), imm7_noarg, bs("1001"), imm_target24_signed], [imm_target24_signed])
# BSRV disp24 1101_1DDD_DDDD_1011 dddd_dddd_dddd_dddd
addop("BSRV", [bs("11011"), imm7_noarg, bs("1011"), imm_target24], [imm_target24])
### <Major Opcode #14>
# BEQI Rn,imm4,disp17 - 1110_nnnn_iiii_0000 dddd_dddd_dddd_dddd
addop("BEQI", [bs("1110"), reg04, imm4, bs("0000"), disp17])
# BEQ Rn,Rm,disp17 - 1110_nnnn_mmmm_0001 dddd_dddd_dddd_dddd
addop("BEQ", [bs("1110"), reg04, reg04, bs("0001"), disp17])
# BNEI Rn,imm4,disp17 - 1110_nnnn_iiii_0100 dddd_dddd_dddd_dddd
addop("BNEI", [bs("1110"), reg04, imm4, bs("0100"), disp17])
# BNE Rn,Rm,disp17 - 1110_nnnn_mmmm_0101 dddd_dddd_dddd_dddd
addop("BNE", [bs("1110"), reg04, reg04, bs("0101"), disp17])
# BGEI Rn,imm4,disp17 - 1110_nnnn_iiii_1000 dddd_dddd_dddd_dddd
addop("BGEI", [bs("1110"), reg04, imm4, bs("1000"), disp17])
# REPEAT Rn,disp17 - 1110_nnnn_0000_1001 dddd_dddd_dddd_dddd
addop("REPEAT", [bs("1110"), reg04, bs("00001001"), disp17])
# EREPEAT disp17 - 1110_0000_0001_1001 dddd_dddd_dddd_dddd
addop("EREPEAT", [bs("1110000000011001"), disp17])
# BLTI Rn,imm4,disp17 - 1110_nnnn_iiii_1100 dddd_dddd_dddd_dddd
addop("BLTI", [bs("1110"), reg04, imm4, bs("1100"), disp17])
# (RI) - 1110_xxxx_xxxx_1101 xxxx_xxxx_xxxx_xxxx
addop("(RI)", [bs("1110"), imm8, bs("1101"), imm16])
# SW Rn,(abs24) - 1110_nnnn_DDDD_DD10 dddd_dddd_dddd_dddd
addop("SW", [bs("1110"), reg04, imm6_noarg, bs("10"), abs24])
# LW Rn,(abs24) - 1110_nnnn_DDDD_DD11 dddd_dddd_dddd_dddd
addop("LW", [bs("1110"), reg04, imm6_noarg, bs("11"), abs24])
### <Major Opcode #15>
# DSP Rn,Rm,code16 - 1111_nnnn_mmmm_0000 cccc_cccc_cccc_cccc
addop("DSP", [bs("1111"), reg04, reg04, bs("0000"), imm16])
# Note: DSP, DSP0 & DSP1 look exactly the same. This is ambiguous, and prevent
# them for being correctly disassembled. DSP0 & DSP1 are arbitrarily
# disabled.
# DSP0 code24 - 1111_nnnn_mmmm_0000 cccc_cccc_cccc_cccc
#addop("DSP0", [bs("1111"), imm8_noarg, bs("0000"), imm_code24], [imm_code24])
# DSP1 Rn,code20 - 1111_nnnn_mmmm_0000 cccc_cccc_cccc_cccc
#addop("DSP1", [bs("1111"), reg04, imm4_noarg, bs("0000"), imm_code20])
# LDZ Rn,Rm - 1111_nnnn_mmmm_0001 0000_0000_0000_0000
addop("LDZ", [bs("1111"), reg04, reg04, bs("00010000000000000000")])
# AVE Rn,Rm - 1111_nnnn_mmmm_0001 0000_0000_0000_0010
addop("AVE", [bs("1111"), reg04, reg04, bs("00010000000000000010")])
# ABS Rn,Rm - 1111_nnnn_mmmm_0001 0000_0000_0000_0011
addop("ABS", [bs("1111"), reg04, reg04, bs("00010000000000000011")])
# MIN Rn,Rm - 1111_nnnn_mmmm_0001 0000_0000_0000_0100
addop("MIN", [bs("1111"), reg04, reg04, bs("00010000000000000100")])
# MAX Rn,Rm - 1111_nnnn_mmmm_0001 0000_0000_0000_0101
addop("MAX", [bs("1111"), reg04, reg04, bs("00010000000000000101")])
# MINU Rn,Rm - 1111_nnnn_mmmm_0001 0000_0000_0000_0110
addop("MINU", [bs("1111"), reg04, reg04, bs("00010000000000000110")])
# MAXU Rn,Rm - 1111_nnnn_mmmm_0001 0000_0000_0000_0111
addop("MAXU", [bs("1111"), reg04, reg04, bs("00010000000000000111")])
# SADD Rn,Rm - 1111_nnnn_mmmm_0001 0000_0000_0000_1000
addop("SADD", [bs("1111"), reg04, reg04, bs("00010000000000001000")])
# SADDU Rn,Rm - 1111_nnnn_mmmm_0001 0000_0000_0000_1001
addop("SADDU", [bs("1111"), reg04, reg04, bs("00010000000000001001")])
# SSUB Rn,Rm - 1111_nnnn_mmmm_0001 0000_0000_0000_1010
addop("SSUB", [bs("1111"), reg04, reg04, bs("00010000000000001010")])
# SSUBU Rn,Rm - 1111_nnnn_mmmm_0001 0000_0000_0000_1011
addop("SSUBU", [bs("1111"), reg04, reg04, bs("00010000000000001011")])
# CLIP Rn,imm5 - 1111_nnnn_0000_0001 0001_0000_iiii_i000
addop("CLIP", [bs("1111"), reg04, bs("0000000100010000"), imm5, bs("000")])
# CLIPU Rn,imm5 - 1111_nnnn_0000_0001 0001_0000_iiii_i001
addop("CLIPU", [bs("1111"), reg04, bs("0000000100010000"), imm5, bs("001")])
# (RI) - 1111_xxxx_xxxx_0001 0010_xxxx_xxxx_xxxx
addop("(RI)", [bs("1111"), imm8, bs("00010010"), imm12])
# MADD Rn,Rm - 1111_nnnn_mmmm_0001 0011_0000_0000_0100
addop("MADD", [bs("1111"), reg04, reg04, bs("00010011000000000100")])
# MADDU Rn,Rm - 1111_nnnn_mmmm_0001 0011_0000_0000_0101
addop("MADDU", [bs("1111"), reg04, reg04, bs("00010011000000000101")])
# MADDR Rn,Rm - 1111_nnnn_mmmm_0001 0011_0000_0000_0110
addop("MADDR", [bs("1111"), reg04, reg04, bs("00010011000000000110")])
# MADDRU Rn,Rm - 1111_nnnn_mmmm_0001 0011_0000_0000_0111
addop("MADDRU", [bs("1111"), reg04, reg04, bs("00010011000000000111")])
# UCI Rn,Rm,code16 - 1111_nnnn_mmmm_0010 cccc_cccc_cccc_cccc
addop("UCI", [bs("1111"), reg04, reg04, bs("0010"), imm16])
# (RI) - 1111_xxxx_xxxx_0011 xxxx_xxxx_xxxx_xxxx
addop("(RI)", [bs("1111"), imm8, bs("0011"), imm16])
# STCB Rn,abs16 - 1111_nnnn_0000_0100 aaaa_aaaa_aaaa_aaaa
addop("STCB", [bs("1111"), reg04, bs("00000100"), imm16])
# LDCB Rn,abs16 - 1111_nnnn_0001_0100 aaaa_aaaa_aaaa_aaaa
addop("LDCB", [bs("1111"), reg04, bs("00010100"), imm16])
# SBCPA CRn,(Rm+),imm8 - 1111_nnnn_mmmm_0101 0000_0000_iiii_iiii
addop("SBCPA", [bs("1111"), copro_reg04, reg04_inc_deref, bs("010100000000"), imm8])
# SHCPA CRn,(Rm+),imm8.align2 - 1111_nnnn_mmmm_0101 0001_0000_iiii_iii0
addop("SHCPA", [bs("1111"), copro_reg04, reg04_inc_deref, bs("010100010000"), imm8_align2, bs("0")])
# SWCPA CRn,(Rm+),imm8.align4 - 1111_nnnn_mmmm_0101 0010_0000_iiii_ii00
addop("SWCPA", [bs("1111"), copro_reg04, reg04_inc_deref, bs("010100100000"), imm8_align4, bs("00")])
# SMCPA CRn,(Rm+),imm8.align8 - 1111_nnnn_mmmm_0101 0011_0000_iiii_i000
addop("SMCPA", [bs("1111"), copro_reg04, reg04_inc_deref, bs("010100110000"), imm8_align8, bs("000")])
# LBCPA CRn,(Rm+),imm8 - 1111_nnnn_mmmm_0101 0100_0000_iiii_iiii
addop("LBCPA", [bs("1111"), copro_reg04, reg04_inc_deref, bs("010101000000"), imm8])
# LHCPA CRn,(Rm+),imm8.align2 - 1111_nnnn_mmmm_0101 0101_0000_iiii_iii0
addop("LHCPA", [bs("1111"), copro_reg04, reg04_inc_deref, bs("010101010000"), imm8_align2, bs("0")])
# LWCPA CRn,(Rm+),imm8.align4 - 1111_nnnn_mmmm_0101 0110_0000_iiii_ii00
addop("LWCPA", [bs("1111"), copro_reg04, reg04_inc_deref, bs("010101100000"), imm8_align4, bs("00")])
# LMCPA CRn,(Rm+),imm8.align8 - 1111_nnnn_mmmm_0101 0111_0000_iiii_i000
addop("LMCPA", [bs("1111"), copro_reg04, reg04_inc_deref, bs("010101110000"), imm8_align8, bs("000")])
# SBCPM0 CRn,(Rm+),imm8 - 1111_nnnn_mmmm_0101 0000_1000_iiii_iiii
addop("SBCPM0", [bs("1111"), copro_reg04, reg04_inc_deref, bs("010100001000"), imm8])
# SHCPM0 CRn,(Rm+),imm8.align2 - 1111_nnnn_mmmm_0101 0001_1000_iiii_iii0
addop("SHCPM0", [bs("1111"), copro_reg04, reg04_inc_deref, bs("010100011000"), imm8_align2, bs("0")])
# SWCPM0 CRn,(Rm+),imm8.align4 - 1111_nnnn_mmmm_0101 0010_1000_iiii_ii00
addop("SWCPM0", [bs("1111"), copro_reg04, reg04_inc_deref, bs("010100101000"), imm8_align4, bs("00")])
# SMCPM0 CRn,(Rm+),imm8.align8 - 1111_nnnn_mmmm_0101 0011_1000_iiii_i000
addop("SMCPM0", [bs("1111"), copro_reg04, reg04_inc_deref, bs("010100111000"), imm8_align8, bs("000")])
# LBCPM0 CRn,(Rm+),imm8 - 1111_nnnn_mmmm_0101 0100_1000_iiii_iiii
addop("LBCPM0", [bs("1111"), copro_reg04, reg04_inc_deref, bs("010101001000"), imm8])
# LHCPM0 CRn,(Rm+),imm8.align2 - 1111_nnnn_mmmm_0101 0101_1000_iiii_iii0
addop("LHCPM0", [bs("1111"), copro_reg04, reg04_inc_deref, bs("010101011000"), imm8_align2, bs("0")])
# LWCPM0 CRn,(Rm+),imm8.align4 - 1111_nnnn_mmmm_0101 0110_1000_iiii_ii00
addop("LWCPM0", [bs("1111"), copro_reg04, reg04_inc_deref, bs("010101101000"), imm8_align4, bs("00")])
# LMCPM0 CRn,(Rm+),imm8.align8 - 1111_nnnn_mmmm_0101 0111_1000_iiii_i000
addop("LMCPM0", [bs("1111"), copro_reg04, reg04_inc_deref, bs("010101111000"), imm8_align8, bs("000")])
# SBCPM1 CRn,(Rm+),imm8 - 1111_nnnn_mmmm_0101 0000_1100_iiii_iiii
addop("SBCPM1", [bs("1111"), copro_reg04, reg04_inc_deref, bs("010100001100"), imm8])
# SHCPM1 CRn,(Rm+),imm8.align2 - 1111_nnnn_mmmm_0101 0001_1100_iiii_iii0
addop("SHCPM1", [bs("1111"), copro_reg04, reg04_inc_deref, bs("010100011100"), imm8_align2, bs("0")])
# SWCPM1 CRn,(Rm+),imm8.align4 - 1111_nnnn_mmmm_0101 0010_1100_iiii_ii00
addop("SWCPM1", [bs("1111"), copro_reg04, reg04_inc_deref, bs("010100101100"), imm8_align4, bs("00")])
# SMCPM1 CRn,(Rm+),imm8.align8 - 1111_nnnn_mmmm_0101 0011_1100_iiii_i000
addop("SMCPM1", [bs("1111"), copro_reg04, reg04_inc_deref, bs("010100111100"), imm8_align8, bs("000")])
# LBCPM1 CRn,(Rm+),imm8 - 1111_nnnn_mmmm_0101 0100_1100_iiii_iiii
addop("LBCPM1", [bs("1111"), copro_reg04, reg04_inc_deref, bs("010101001100"), imm8])
# LHCPM1 CRn,(Rm+),imm8.align2 - 1111_nnnn_mmmm_0101 0101_1100_iiii_iii0
addop("LHCPM1", [bs("1111"), copro_reg04, reg04_inc_deref, bs("010101011100"), imm8_align2, bs("0")])
# LWCPM1 CRn,(Rm+),imm8.align4 - 1111_nnnn_mmmm_0101 0110_1100_iiii_ii00
addop("LWCPM1", [bs("1111"), copro_reg04, reg04_inc_deref, bs("010101101100"), imm8_align4, bs("00")])
# LMCPM1 CRn,(Rm+),imm8.align8 - 1111_nnnn_mmmm_0101 0111_1100_iiii_i000
addop("LMCPM1", [bs("1111"), copro_reg04, reg04_inc_deref, bs("010101111100"), imm8_align8, bs("000")])
# (RI) - 1111_xxxx_xxxx_0110 xxxx_xxxx_xxxx_xxxx
addop("(RI)", [bs("1111"), imm8, bs("0110"), imm16])
# CP code24 - 1111_CCCC_CCCC_0111 cccc_cccc_cccc_cccc
#addop("CP", [bs("1111"), imm8_noarg, bs("0111"), imm_code24], [imm_code24])
# Note: CP & CMOV* look exactly the same. This is ambiguous, and prevent
# them for being correctly disassembled. CP was arbitrarily disabled.
# CP code56 - 1111_CCCC_CCCC_0111 cccc_cccc_cccc_cccc cccc_cccc_cccc_cccc
# 64-bit VLIW operation mode - not implemented
# CMOV CRn,Rm - 1111_nnnn_mmmm_0111 1111_0000_0000_0000
#addop("CMOV", [bs("1111"), copro_reg04, reg04, bs("01111111000000000000")])
# CMOV Rm,CRn - 1111_nnnn_mmmm_0111 1111_0000_0000_0001
#addop("CMOV", [bs("1111"), copro_reg04, reg04, bs("01111111000000000001")], [reg04, copro_reg04])
# CMOVC CCRn,Rm - 1111_nnnn_mmmm_0111 1111_0000_0000_NN10
# CRn=NNnnnn
addop("CMOVC", [bs("1111"), imm4_noarg, reg04, bs("0111111100000000"), copro_reg06, bs("10")], [copro_reg06, reg04])
# CMOVC Rm,CCRn - 1111_nnnn_mmmm_0111 1111_0000_0000_NN11
# CRn=NNnnnn
addop("CMOVC", [bs("1111"), imm4_noarg, reg04, bs("0111111100000000"), copro_reg06, bs("11")], [reg04, copro_reg06])
# CMOVH CRn,Rm - 1111_nnnn_mmmm_0111 1111_0001_0000_0000
#addop("CMOVH", [bs("1111"), copro_reg04, reg04, bs("01111111000100000000")])
# CMOVH Rm,CRn - 1111_nnnn_mmmm_0111 1111_0001_0000_0001
#addop("CMOVH", [bs("1111"), copro_reg04, reg04, bs("01111111000100000001")], [reg04, copro_reg04])
# Note: the following CMOV* instructions are extensions used when the processor
# has more than 16 coprocessor general-purpose registers. They can be
# used to assemble and disassemble both CMOV* instructuons sets.
# CMOV CRn,Rm - 1111_nnnn_mmmm_0111 1111_0000_0000_N000
# CRn=Nnnnn
addop("CMOV", [bs("1111"), imm4_noarg, reg04, bs("0111111100000000"), copro_reg05, bs("000")], [copro_reg05, reg04])
# CMOV Rm,CRn - 1111_nnnn_mmmm_0111 1111_0000_0000_N001
addop("CMOV", [bs("1111"), imm4_noarg, reg04, bs("0111111100000000"), copro_reg05, bs("001")], [reg04, copro_reg05])
# CMOVH CRn,Rm - 1111_nnnn_mmmm_0111 1111_0001_0000_N000
addop("CMOVH", [bs("1111"), imm4_noarg, reg04, bs("0111111100010000"), copro_reg05, bs("000")], [copro_reg05, reg04])
# CMOVH Rm,CRn - 1111_nnnn_mmmm_0111 1111_0001_0000_N001
addop("CMOVH", [bs("1111"), imm4_noarg, reg04, bs("0111111100010000"), copro_reg05, bs("001")], [reg04, copro_reg05])
# (RI) - 1111_xxxx_xxxx_10xx xxxx_xxxx_xxxx_xxxx
addop("(RI)", [bs("1111"), imm8, bs("10"), imm18])
# SWCP CRn,disp16(Rm) - 1111_nnnn_mmmm_1100 dddd_dddd_dddd_dddd
addop("SWCP", [bs("1111"), copro_reg04, reg04_deref_noarg, bs("1100"), disp16_reg_deref], [copro_reg04, disp16_reg_deref])
# LWCP CRn,disp16(Rm) - 1111_nnnn_mmmm_1101 dddd_dddd_dddd_dddd
addop("LWCP", [bs("1111"), copro_reg04, reg04_deref_noarg, bs("1101"), disp16_reg_deref], [copro_reg04, disp16_reg_deref, reg04_deref])
# SMCP CRn,disp16(Rm) - 1111_nnnn_mmmm_1110 dddd_dddd_dddd_dddd
addop("SMCP", [bs("1111"), copro_reg04, reg04_deref_noarg, bs("1110"), disp16_reg_deref], [copro_reg04, disp16_reg_deref, reg04_deref])
# LMCP CRn,disp16(Rm) - 1111_nnnn_mmmm_1111 dddd_dddd_dddd_dddd
addop("LMCP", [bs("1111"), copro_reg04, reg04_deref_noarg, bs("1111"), disp16_reg_deref], [copro_reg04, disp16_reg_deref])
| gpl-2.0 | 5,419,627,185,831,486,000 | 31.19883 | 135 | 0.601268 | false |
CodigoSur/cyclope | cyclope/apps/social/frontend_views.py | 2 | 1653 | # -*- coding: utf-8 -*-
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.template.loader import render_to_string
from django.core.paginator import Paginator
from actstream.models import Action, target_stream, user_stream
from cyclope.core import frontend
import cyclope.utils
from models import Social
class GlobalActivity(frontend.FrontendView):
name = 'global_activity'
verbose_name = _("Global activity or user's feed if authenticated")
is_default = True
is_instance_view = False
is_region_view = False
is_content_view = True
template = "social/actions_list.html"
def get_response(self, request, req_context, options):
actions = self.get_actions(request)
page = self.build_page(request, actions)
return render_to_string(self.template, {
'page': page,
}, req_context)
def get_actions(self, request):
if request.user.is_authenticated():
actions = target_stream(request.user) | user_stream(request.user)
else:
actions = Action.objects.public()
return actions
def build_page(self, request, actions):
paginator = Paginator(actions, per_page=10)
page = cyclope.utils.get_page(paginator, request)
return page
class GlobalOnlyActivity(GlobalActivity):
name = 'global_only_activity'
verbose_name = _('Global activity of the site')
is_default = False
def get_actions(self, request):
return Action.objects.public()
frontend.site.register_view(Social, GlobalActivity)
frontend.site.register_view(Social, GlobalOnlyActivity)
| gpl-3.0 | -3,396,516,659,143,593 | 31.411765 | 77 | 0.69268 | false |
ahamilton55/ansible | lib/ansible/module_utils/facts/system/selinux.py | 52 | 3061 | # Collect facts related to selinux
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.module_utils.facts.collector import BaseFactCollector
try:
import selinux
HAVE_SELINUX = True
except ImportError:
HAVE_SELINUX = False
SELINUX_MODE_DICT = {1: 'enforcing',
0: 'permissive',
-1: 'disabled'}
class SelinuxFactCollector(BaseFactCollector):
name = 'selinux'
_fact_ids = set()
def collect(self, module=None, collected_facts=None):
facts_dict = {}
selinux_facts = {}
# This is weird. The value of the facts 'selinux' key can be False or a dict
if not HAVE_SELINUX:
facts_dict['selinux'] = False
facts_dict['selinux_python_present'] = False
return facts_dict
facts_dict['selinux_python_present'] = True
if not selinux.is_selinux_enabled():
selinux_facts['status'] = 'disabled'
# NOTE: this could just return in the above clause and the rest of this is up an indent -akl
else:
selinux_facts['status'] = 'enabled'
try:
selinux_facts['policyvers'] = selinux.security_policyvers()
except (AttributeError, OSError):
selinux_facts['policyvers'] = 'unknown'
try:
(rc, configmode) = selinux.selinux_getenforcemode()
if rc == 0:
selinux_facts['config_mode'] = SELINUX_MODE_DICT.get(configmode, 'unknown')
else:
selinux_facts['config_mode'] = 'unknown'
except (AttributeError, OSError):
selinux_facts['config_mode'] = 'unknown'
try:
mode = selinux.security_getenforce()
selinux_facts['mode'] = SELINUX_MODE_DICT.get(mode, 'unknown')
except (AttributeError, OSError):
selinux_facts['mode'] = 'unknown'
try:
(rc, policytype) = selinux.selinux_getpolicytype()
if rc == 0:
selinux_facts['type'] = policytype
else:
selinux_facts['type'] = 'unknown'
except (AttributeError, OSError):
selinux_facts['type'] = 'unknown'
facts_dict['selinux'] = selinux_facts
return facts_dict
| gpl-3.0 | -387,949,802,047,458,100 | 34.593023 | 100 | 0.598171 | false |
Feverup/ansible-modules-extras | cloud/vmware/vmware_target_canonical_facts.py | 16 | 3194 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2015, Joseph Callen <jcallen () csc.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: vmware_target_canonical_facts
short_description: Return canonical (NAA) from an ESXi host
description:
- Return canonical (NAA) from an ESXi host based on SCSI target ID
version_added: "2.0"
author: Joseph Callen
notes:
requirements:
- Tested on vSphere 5.5
- PyVmomi installed
options:
hostname:
description:
- The hostname or IP address of the vSphere vCenter
required: True
username:
description:
- The username of the vSphere vCenter
required: True
aliases: ['user', 'admin']
password:
description:
- The password of the vSphere vCenter
required: True
aliases: ['pass', 'pwd']
target_id:
description:
- The target id based on order of scsi device
required: True
'''
EXAMPLES = '''
# Example vmware_target_canonical_facts command from Ansible Playbooks
- name: Get Canonical name
local_action: >
vmware_target_canonical_facts
hostname="{{ ansible_ssh_host }}" username=root password=vmware
target_id=7
'''
try:
from pyVmomi import vim, vmodl
HAS_PYVMOMI = True
except ImportError:
HAS_PYVMOMI = False
def find_hostsystem(content):
host_system = get_all_objs(content, [vim.HostSystem])
for host in host_system:
return host
return None
def main():
argument_spec = vmware_argument_spec()
argument_spec.update(dict(target_id=dict(required=True, type='int')))
module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=False)
if not HAS_PYVMOMI:
module.fail_json(msg='pyvmomi is required for this module')
content = connect_to_api(module)
host = find_hostsystem(content)
target_lun_uuid = {}
scsilun_canonical = {}
# Associate the scsiLun key with the canonicalName (NAA)
for scsilun in host.config.storageDevice.scsiLun:
scsilun_canonical[scsilun.key] = scsilun.canonicalName
# Associate target number with LUN uuid
for target in host.config.storageDevice.scsiTopology.adapter[0].target:
for lun in target.lun:
target_lun_uuid[target.target] = lun.scsiLun
module.exit_json(changed=False, canonical=scsilun_canonical[target_lun_uuid[module.params['target_id']]])
from ansible.module_utils.basic import *
from ansible.module_utils.vmware import *
if __name__ == '__main__':
main()
| gpl-3.0 | 4,996,045,332,766,970,000 | 28.574074 | 109 | 0.68253 | false |
cstipkovic/spidermonkey-research | testing/mozbase/moznetwork/moznetwork/moznetwork.py | 1 | 5591 | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
import argparse
import array
import re
import socket
import struct
import subprocess
import sys
import mozinfo
import mozlog
if mozinfo.isLinux:
import fcntl
class NetworkError(Exception):
"""Exception thrown when unable to obtain interface or IP."""
def _get_logger():
logger = mozlog.get_default_logger(component='moznetwork')
if not logger:
logger = mozlog.unstructured.getLogger('moznetwork')
return logger
def _get_interface_list():
"""Provides a list of available network interfaces
as a list of tuples (name, ip)"""
logger = _get_logger()
logger.debug('Gathering interface list')
max_iface = 32 # Maximum number of interfaces(Aribtrary)
bytes = max_iface * 32
is_32bit = (8 * struct.calcsize("P")) == 32 # Set Architecture
struct_size = 32 if is_32bit else 40
try:
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
names = array.array('B', '\0' * bytes)
outbytes = struct.unpack('iL', fcntl.ioctl(
s.fileno(),
0x8912, # SIOCGIFCONF
struct.pack('iL', bytes, names.buffer_info()[0])
))[0]
namestr = names.tostring()
return [(namestr[i:i + 32].split('\0', 1)[0],
socket.inet_ntoa(namestr[i + 20:i + 24]))
for i in range(0, outbytes, struct_size)]
except IOError:
raise NetworkError('Unable to call ioctl with SIOCGIFCONF')
def _proc_matches(args, regex):
"""Helper returns the matches of regex in the output of a process created with
the given arguments"""
output = subprocess.Popen(args=args,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT).stdout.read()
return re.findall(regex, output)
def _parse_ifconfig():
"""Parse the output of running ifconfig on mac in cases other methods
have failed"""
logger = _get_logger()
logger.debug('Parsing ifconfig')
# Attempt to determine the default interface in use.
default_iface = _proc_matches(['route', '-n', 'get', 'default'],
'interface: (\w+)')
if default_iface:
addr_list = _proc_matches(['ifconfig', default_iface[0]],
'inet (\d+.\d+.\d+.\d+)')
if addr_list:
logger.debug('Default interface: [%s] %s' % (default_iface[0],
addr_list[0]))
if not addr_list[0].startswith('127.'):
return addr_list[0]
# Iterate over plausible interfaces if we didn't find a suitable default.
for iface in ['en%s' % i for i in range(10)]:
addr_list = _proc_matches(['ifconfig', iface],
'inet (\d+.\d+.\d+.\d+)')
if addr_list:
logger.debug('Interface: [%s] %s' % (iface, addr_list[0]))
if not addr_list[0].startswith('127.'):
return addr_list[0]
# Just return any that isn't localhost. If we can't find one, we have
# failed.
addrs = _proc_matches(['ifconfig'],
'inet (\d+.\d+.\d+.\d+)')
try:
return [addr for addr in addrs if not addr.startswith('127.')][0]
except IndexError:
return None
def get_ip():
"""Provides an available network interface address, for example
"192.168.1.3".
A `NetworkError` exception is raised in case of failure."""
logger = _get_logger()
try:
hostname = socket.gethostname()
try:
logger.debug('Retrieving IP for %s' % hostname)
ips = socket.gethostbyname_ex(hostname)[2]
except socket.gaierror: # for Mac OS X
hostname += '.local'
logger.debug('Retrieving IP for %s' % hostname)
ips = socket.gethostbyname_ex(hostname)[2]
if len(ips) == 1:
ip = ips[0]
elif len(ips) > 1:
logger.debug('Multiple addresses found: %s' % ips)
# no fallback on Windows so take the first address
ip = ips[0] if mozinfo.isWin else None
else:
ip = None
except socket.gaierror:
# sometimes the hostname doesn't resolve to an ip address, in which
# case this will always fail
ip = None
if ip is None or ip.startswith("127."):
if mozinfo.isLinux:
interfaces = _get_interface_list()
for ifconfig in interfaces:
logger.debug('Interface: [%s] %s' % (ifconfig[0], ifconfig[1]))
if ifconfig[0] == 'lo':
continue
else:
return ifconfig[1]
elif mozinfo.isMac:
ip = _parse_ifconfig()
if ip is None:
raise NetworkError('Unable to obtain network address')
return ip
def get_lan_ip():
"""Deprecated. Please use get_ip() instead."""
return get_ip()
def cli(args=sys.argv[1:]):
parser = argparse.ArgumentParser(
description='Retrieve IP address')
structured.commandline.add_logging_group(
parser,
include_formatters=structured.commandline.TEXT_FORMATTERS
)
args = parser.parse_args()
structured.commandline.setup_logging(
'mozversion', args, {'mach': sys.stdout})
_get_logger().info('IP address: %s' % get_ip())
if __name__ == '__main__':
cli()
| mpl-2.0 | -1,904,276,421,798,624,300 | 31.505814 | 82 | 0.572706 | false |
hamzehd/edx-platform | lms/djangoapps/mobile_api/video_outlines/views.py | 121 | 4888 | """
Video Outlines
We only provide the listing view for a video outline, and video outlines are
only displayed at the course level. This is because it makes it a lot easier to
optimize and reason about, and it avoids having to tackle the bigger problem of
general XBlock representation in this rather specialized formatting.
"""
from functools import partial
from django.http import Http404, HttpResponse
from mobile_api.models import MobileApiConfig
from rest_framework import generics
from rest_framework.response import Response
from opaque_keys.edx.locator import BlockUsageLocator
from xmodule.exceptions import NotFoundError
from xmodule.modulestore.django import modulestore
from ..utils import mobile_view, mobile_course_access
from .serializers import BlockOutline, video_summary
@mobile_view()
class VideoSummaryList(generics.ListAPIView):
"""
**Use Case**
Get a list of all videos in the specified course. You can use the
video_url value to access the video file.
**Example Request**
GET /api/mobile/v0.5/video_outlines/courses/{organization}/{course_number}/{course_run}
**Response Values**
If the request is successful, the request returns an HTTP 200 "OK"
response along with an array of videos in the course. The array
includes the following information for each video.
* named_path: An array that consists of the display names of the
courseware objects in the path to the video.
* path: An array that specifies the complete path to the video in
the courseware hierarchy. The array contains the following
values.
* category: The type of division in the course outline.
Possible values are "chapter", "sequential", and "vertical".
* name: The display name for the object.
* id: The The unique identifier for the video.
* section_url: The URL to the first page of the section that
contains the video in the Learning Management System.
* summary: An array of data about the video that includes the
following values.
* category: The type of component. This value will always be "video".
* duration: The length of the video, if available.
* id: The unique identifier for the video.
* language: The language code for the video.
* name: The display name of the video.
* size: The size of the video file.
* transcripts: An array of language codes and URLs to available
video transcripts. Use the URL value to access a transcript
for the video.
* video_thumbnail_url: The URL to the thumbnail image for the
video, if available.
* video_url: The URL to the video file. Use this value to access
the video.
* unit_url: The URL to the unit that contains the video in the Learning
Management System.
"""
@mobile_course_access(depth=None)
def list(self, request, course, *args, **kwargs):
video_profiles = MobileApiConfig.get_video_profiles()
video_outline = list(
BlockOutline(
course.id,
course,
{"video": partial(video_summary, video_profiles)},
request,
video_profiles,
)
)
return Response(video_outline)
@mobile_view()
class VideoTranscripts(generics.RetrieveAPIView):
"""
**Use Case**
Get a transcript for a specified video and language.
**Example request**
GET /api/mobile/v0.5/video_outlines/transcripts/{organization}/{course_number}/{course_run}/{video ID}/{language code}
**Response Values**
If the request is successful, the request returns an HTTP 200 "OK"
response along with an .srt file that you can download.
"""
@mobile_course_access()
def get(self, request, course, *args, **kwargs):
block_id = kwargs['block_id']
lang = kwargs['lang']
usage_key = BlockUsageLocator(
course.id, block_type="video", block_id=block_id
)
try:
video_descriptor = modulestore().get_item(usage_key)
transcripts = video_descriptor.get_transcripts_info()
content, filename, mimetype = video_descriptor.get_transcript(transcripts, lang=lang)
except (NotFoundError, ValueError, KeyError):
raise Http404(u"Transcript not found for {}, lang: {}".format(block_id, lang))
response = HttpResponse(content, content_type=mimetype)
response['Content-Disposition'] = 'attachment; filename="{}"'.format(filename.encode('utf-8'))
return response
| agpl-3.0 | -3,024,214,260,666,718,000 | 37.1875 | 126 | 0.639321 | false |
ingokegel/intellij-community | python/testData/inspections/PyTypeCheckerInspection/Generator.py | 30 | 3112 | def test():
def gen(n):
for x in xrange(n):
yield str(x)
def f_1(xs):
"""
:type xs: list of int
"""
return xs
def f_2(xs):
"""
:type xs: collections.Sequence of int
"""
return xs
def f_3(xs):
"""
:type xs: collections.Container of int
"""
return xs
def f_4(xs):
"""
:type xs: collections.Iterator of int
"""
return xs
def f_5(xs):
"""
:type xs: collections.Iterable of int
"""
return xs
def f_6(xs):
"""
:type xs: list
"""
return xs
def f_7(xs):
"""
:type xs: collections.Sequence
"""
return xs
def f_8(xs):
"""
:type xs: collections.Container
"""
return xs
def f_9(xs):
"""
:type xs: collections.Iterator
"""
return xs
def f_10(xs):
"""
:type xs: collections.Iterable
"""
return xs
def f_11(xs):
"""
:type xs: list of string
"""
return xs
def f_12(xs):
"""
:type xs: collections.Sequence of string
"""
return xs
def f_13(xs):
"""
:type xs: collections.Container of string
"""
return xs
def f_14(xs):
"""
:type xs: collections.Iterator of string
"""
return xs
def f_15(xs):
"""
:type xs: collections.Iterable of string
"""
return xs
return [
''.join(gen(10)),
f_1(<warning descr="Expected type 'List[int]', got 'Generator[str, Any, None]' instead">gen(11)</warning>),
f_2(<warning descr="Expected type 'Sequence[int]', got 'Generator[str, Any, None]' instead">gen(11)</warning>),
f_3(<warning descr="Expected type 'Container[int]', got 'Generator[str, Any, None]' instead">gen(11)</warning>),
f_4(<warning descr="Expected type 'Iterator[int]', got 'Generator[str, Any, None]' instead">gen(11)</warning>),
f_5(<warning descr="Expected type 'Iterable[int]', got 'Generator[str, Any, None]' instead">gen(11)</warning>),
f_6(<warning descr="Expected type 'list', got 'Generator[str, Any, None]' instead">gen(11)</warning>),
f_7(<warning descr="Expected type 'Sequence', got 'Generator[str, Any, None]' instead">gen(11)</warning>),
f_8(<warning descr="Expected type 'Container', got 'Generator[str, Any, None]' instead">gen(11)</warning>),
f_9(gen(11)),
f_10(gen(11)),
f_11(<warning descr="Expected type 'List[Union[str, unicode]]', got 'Generator[str, Any, None]' instead">gen(11)</warning>),
f_12(<warning descr="Expected type 'Sequence[Union[str, unicode]]', got 'Generator[str, Any, None]' instead">gen(11)</warning>),
f_13(<warning descr="Expected type 'Container[Union[str, unicode]]', got 'Generator[str, Any, None]' instead">gen(11)</warning>),
f_14(gen(11)),
f_15(gen(11)),
f_15('foo'.split('o')),
]
| apache-2.0 | 7,211,044,012,061,668,000 | 30.755102 | 137 | 0.514781 | false |
xzregg/yunwei | yw/core/bf.py | 2 | 3673 | #!/usr/bin/env python
# coding:utf-8
# 并发进程的类 by xzr
import multiprocessing
import time
import subprocess
import os
import sys
import traceback
import threading
#import Queue
def get_now():
tf = '%Y-%m-%d %H:%M:%S'
return time.strftime(tf, time.localtime())
_Cpus = multiprocessing.cpu_count()
class xBF:
'''
一个并发进程线程的类
@f 函数
@tuplist 函数的参数列表
@bfn 限制的任务数
@threadnums 每个进程开启的线程数
@printret 打印结果
'''
def __init__(self, f=None, tuplist=[], bfn=0, printret=False):
self.cpus = _Cpus
bfn = int(bfn)
self.bfn = bfn or 150
self.Manager = multiprocessing.Manager()
self.retdict = self.Manager.dict()
self.Q = multiprocessing.Queue()
self.printret = printret
self.funcs = []
self.mul = threading.Thread
if f and tuplist: # 初始化时有函数
for n, o in enumerate(tuplist):
n += 1 # 下限为1
process = self.mul(
target=self.get_fun_ret, args=(n, self.f, o))
self.funcs.append(process)
def append(self, f, opttup):
n = len(self.funcs) + 1
process = self.mul(target=self.get_fun_ret, args=(n, f, opttup))
self.funcs.append(process)
def get_fun_ret(self, n, f, tup):
self.retdict[n] = f(*tup)
if self.printret == True:
print '<%s>\n%s' % (n, self.retdict[n])
def startprocess(self, threadjobs, n): # 进程启动线程
for t in threadjobs:
t.start()
for t in threadjobs:
t.join()
self.Q.put(n)
def start(self, Print=True):
stime = get_now()
tp = len(self.funcs)
ltp = min(tp, self.bfn)
# 按限制任务数或任务数分线程,使cpus个进程的总线程数接近线程。
self.threadnums = ltp / self.cpus or 1
self.threadnums += 1 if ltp % self.cpus else 0
GroupbyPl = [self.funcs[i:i + self.threadnums]
for i in xrange(0, tp, self.threadnums)]
pp = []
for i, threadjobs in enumerate(GroupbyPl):
process = multiprocessing.Process(
target=self.startprocess, args=(threadjobs, i))
pp.append(process)
process.start()
if i >= self.cpus - 1:
n = self.Q.get()
if n != None:
pp[n].join()
for p in pp:
p.join()
if Print:
print '[%s]' % stime, '-' * 30, '任务数 %s 限制任务数:%s 进程:%s 线程数为:%s 开始' % (tp, self.bfn, self.cpus, self.threadnums)
print '[%s]' % get_now(), '-' * 30, '任务数 %s 限制任务数:%s 进程:%s 线程数为:%s 结束' % (tp, self.bfn, self.cpus, self.threadnums)
def dict(self):
d = dict(self.retdict)
self.Manager.shutdown()
return d
def Print(self):
print '任务数 %s 限制任务数:%s 进程:%s 线程数为:%s' % (len(self.funcs), self.bfn, self.cpus, self.threadnums)
def f(a, b):
time.sleep(10)
return 'f' + str(a) + str(b)
def test(x):
time.sleep(1)
# print x
return x
def test1(x):
time.sleep(1)
return x
if __name__ == "__main__":
# a=xBF(bfn=9,thread=False,look=False)
# a=xBF(bfn=1220,threadnums=100,look=False)
a = xBF(bfn=12, look=True)
for x in xrange(40):
a.append(test, (x,))
a.start()
a.Print()
dd = a.dict()
# for k in dd:
# print'<%s>' % k,'-'*20
# print dd[k]
| lgpl-3.0 | 5,872,661,661,233,947,000 | 25.395349 | 127 | 0.525404 | false |
cloudbase/maas | src/maasserver/tests/test_messages.py | 1 | 6341 | # Copyright 2012, 2013 Canonical Ltd. This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).
"""Test maasserver messages."""
from __future__ import (
absolute_import,
print_function,
unicode_literals,
)
str = None
__metaclass__ = type
__all__ = []
import json
import socket
from maasserver.exceptions import NoRabbit
from maasserver.messages import (
MAASMessenger,
MESSENGER_EVENT,
MessengerBase,
)
from maasserver.models import Node
from maasserver.testing.factory import factory
from maasserver.testing.testcase import MAASServerTestCase
from maasserver.tests.models import MessagesTestModel
from maastesting.djangotestcase import TestModelMixin
class FakeProducer:
"""A fake RabbitProducer that simply records published messages."""
def __init__(self):
self.messages = []
def publish(self, message):
self.messages.append(message)
class TestMessenger(MessengerBase):
def create_msg(self, event_name, instance):
return [event_name, instance]
class MessengerBaseTest(TestModelMixin, MAASServerTestCase):
app = 'maasserver.tests'
def test_update_obj_publishes_message_if_created(self):
producer = FakeProducer()
messenger = TestMessenger(MessagesTestModel, producer)
instance = factory.getRandomString()
messenger.update_obj(MessagesTestModel, instance, True)
self.assertEqual(
[[MESSENGER_EVENT.CREATED, instance]], producer.messages)
def test_update_obj_publishes_message_if_not_created(self):
producer = FakeProducer()
messenger = TestMessenger(MessagesTestModel, producer)
instance = factory.getRandomString()
messenger.update_obj(MessagesTestModel, instance, False)
self.assertEqual(
[[MESSENGER_EVENT.UPDATED, instance]], producer.messages)
def test_delete_obj_publishes_message(self):
producer = FakeProducer()
messenger = TestMessenger(MessagesTestModel, producer)
instance = factory.getRandomString()
messenger.delete_obj(MessagesTestModel, instance)
self.assertEqual(
[[MESSENGER_EVENT.DELETED, instance]], producer.messages)
def test_register_registers_update_signal(self):
producer = FakeProducer()
messenger = TestMessenger(MessagesTestModel, producer)
obj = MessagesTestModel(name=factory.getRandomString())
obj.save()
messenger.register()
obj.save()
self.assertEqual(
[[MESSENGER_EVENT.UPDATED, obj]], producer.messages)
def test_register_registers_created_signal(self):
producer = FakeProducer()
messenger = TestMessenger(MessagesTestModel, producer)
messenger.register()
obj = MessagesTestModel(name=factory.getRandomString())
obj.save()
self.assertEqual(
[[MESSENGER_EVENT.CREATED, obj]], producer.messages)
def test_register_registers_delete_signal(self):
obj = MessagesTestModel(name=factory.getRandomString())
obj.save()
producer = FakeProducer()
messenger = TestMessenger(MessagesTestModel, producer)
messenger.register()
obj.delete()
self.assertEqual(
[[MESSENGER_EVENT.DELETED, obj]], producer.messages)
def test_publish_message_publishes_message(self):
event = factory.getRandomString()
instance = {factory.getRandomString(): factory.getRandomString()}
messenger = TestMessenger(MessagesTestModel, FakeProducer())
messenger.publish_message(messenger.create_msg(event, instance))
self.assertEqual([[event, instance]], messenger.producer.messages)
def test_publish_message_swallows_missing_rabbit(self):
event = factory.getRandomString()
instance = {factory.getRandomString(): factory.getRandomString()}
def fail_for_lack_of_rabbit(*args, **kwargs):
raise NoRabbit("I'm pretending not to have a RabbitMQ.")
messenger = TestMessenger(MessagesTestModel, FakeProducer())
messenger.producer.publish = fail_for_lack_of_rabbit
messenger.publish_message(messenger.create_msg(event, instance))
self.assertEqual([], messenger.producer.messages)
def test_publish_message_propagates_exceptions(self):
event = factory.getRandomString()
instance = {factory.getRandomString(): factory.getRandomString()}
def fail_despite_having_a_rabbit(*args, **kwargs):
raise socket.error("I have a rabbit but I fail anyway.")
messenger = TestMessenger(MessagesTestModel, FakeProducer())
messenger.producer.publish = fail_despite_having_a_rabbit
self.assertRaises(
socket.error,
messenger.publish_message, messenger.create_msg(event, instance))
self.assertEqual([], messenger.producer.messages)
class MAASMessengerTest(TestModelMixin, MAASServerTestCase):
app = 'maasserver.tests'
def test_event_key(self):
producer = FakeProducer()
event_name = factory.getRandomString()
obj = MessagesTestModel(name=factory.getRandomString())
messenger = MAASMessenger(MessagesTestModel, producer)
self.assertEqual(
'%s.%s' % ('MessagesTestModel', event_name),
messenger.event_key(event_name, obj))
def test_create_msg(self):
producer = FakeProducer()
messenger = MAASMessenger(Node, producer)
event_name = factory.getRandomString()
obj_name = factory.getRandomString()
obj = MessagesTestModel(name=obj_name)
obj.save()
msg = messenger.create_msg(event_name, obj)
decoded_msg = json.loads(msg)
self.assertItemsEqual(['instance', 'event_key'], list(decoded_msg))
self.assertItemsEqual(
['id', 'name'], list(decoded_msg['instance']))
self.assertEqual(
obj_name, decoded_msg['instance']['name'])
def test_msg_containing_node_representation(self):
node = factory.make_node()
messenger = MAASMessenger(Node, FakeProducer())
msg = messenger.create_msg(factory.getRandomString(), node)
decoded_msg = json.loads(msg)
self.assertItemsEqual(['instance', 'event_key'], list(decoded_msg))
| agpl-3.0 | -3,485,687,752,995,680,000 | 35.234286 | 77 | 0.677653 | false |
poojavade/Genomics_Docker | Dockerfiles/gedlab-khmer-filter-abund/pymodules/python2.7/lib/python/scipy/linalg/basic.py | 9 | 39330 | #
# Author: Pearu Peterson, March 2002
#
# w/ additions by Travis Oliphant, March 2002
# and Jake Vanderplas, August 2012
from __future__ import division, print_function, absolute_import
__all__ = ['solve', 'solve_triangular', 'solveh_banded', 'solve_banded',
'solve_toeplitz', 'solve_circulant', 'inv', 'det', 'lstsq',
'pinv', 'pinv2', 'pinvh']
import warnings
import numpy as np
from .flinalg import get_flinalg_funcs
from .lapack import get_lapack_funcs, _compute_lwork
from .misc import LinAlgError, _datacopied
from .decomp import _asarray_validated
from . import decomp, decomp_svd
from ._solve_toeplitz import levinson
# Linear equations
def solve(a, b, sym_pos=False, lower=False, overwrite_a=False,
overwrite_b=False, debug=False, check_finite=True):
"""
Solve the equation ``a x = b`` for ``x``.
Parameters
----------
a : (M, M) array_like
A square matrix.
b : (M,) or (M, N) array_like
Right-hand side matrix in ``a x = b``.
sym_pos : bool, optional
Assume `a` is symmetric and positive definite.
lower : bool, optional
Use only data contained in the lower triangle of `a`, if `sym_pos` is
true. Default is to use upper triangle.
overwrite_a : bool, optional
Allow overwriting data in `a` (may enhance performance).
Default is False.
overwrite_b : bool, optional
Allow overwriting data in `b` (may enhance performance).
Default is False.
check_finite : bool, optional
Whether to check that the input matrices contain only finite numbers.
Disabling may give a performance gain, but may result in problems
(crashes, non-termination) if the inputs do contain infinities or NaNs.
Returns
-------
x : (M,) or (M, N) ndarray
Solution to the system ``a x = b``. Shape of the return matches the
shape of `b`.
Raises
------
LinAlgError
If `a` is singular.
ValueError
If `a` is not square
Examples
--------
Given `a` and `b`, solve for `x`:
>>> a = np.array([[3, 2, 0], [1, -1, 0], [0, 5, 1]])
>>> b = np.array([2, 4, -1])
>>> from scipy import linalg
>>> x = linalg.solve(a, b)
>>> x
array([ 2., -2., 9.])
>>> np.dot(a, x) == b
array([ True, True, True], dtype=bool)
"""
a1 = _asarray_validated(a, check_finite=check_finite)
b1 = _asarray_validated(b, check_finite=check_finite)
if len(a1.shape) != 2 or a1.shape[0] != a1.shape[1]:
raise ValueError('expected square matrix')
if a1.shape[0] != b1.shape[0]:
raise ValueError('incompatible dimensions')
overwrite_a = overwrite_a or _datacopied(a1, a)
overwrite_b = overwrite_b or _datacopied(b1, b)
if debug:
print('solve:overwrite_a=', overwrite_a)
print('solve:overwrite_b=', overwrite_b)
if sym_pos:
posv, = get_lapack_funcs(('posv',), (a1, b1))
c, x, info = posv(a1, b1, lower=lower,
overwrite_a=overwrite_a,
overwrite_b=overwrite_b)
else:
gesv, = get_lapack_funcs(('gesv',), (a1, b1))
lu, piv, x, info = gesv(a1, b1, overwrite_a=overwrite_a,
overwrite_b=overwrite_b)
if info == 0:
return x
if info > 0:
raise LinAlgError("singular matrix")
raise ValueError('illegal value in %d-th argument of internal gesv|posv' %
-info)
def solve_triangular(a, b, trans=0, lower=False, unit_diagonal=False,
overwrite_b=False, debug=False, check_finite=True):
"""
Solve the equation `a x = b` for `x`, assuming a is a triangular matrix.
Parameters
----------
a : (M, M) array_like
A triangular matrix
b : (M,) or (M, N) array_like
Right-hand side matrix in `a x = b`
lower : bool, optional
Use only data contained in the lower triangle of `a`.
Default is to use upper triangle.
trans : {0, 1, 2, 'N', 'T', 'C'}, optional
Type of system to solve:
======== =========
trans system
======== =========
0 or 'N' a x = b
1 or 'T' a^T x = b
2 or 'C' a^H x = b
======== =========
unit_diagonal : bool, optional
If True, diagonal elements of `a` are assumed to be 1 and
will not be referenced.
overwrite_b : bool, optional
Allow overwriting data in `b` (may enhance performance)
check_finite : bool, optional
Whether to check that the input matrices contain only finite numbers.
Disabling may give a performance gain, but may result in problems
(crashes, non-termination) if the inputs do contain infinities or NaNs.
Returns
-------
x : (M,) or (M, N) ndarray
Solution to the system `a x = b`. Shape of return matches `b`.
Raises
------
LinAlgError
If `a` is singular
Notes
-----
.. versionadded:: 0.9.0
"""
a1 = _asarray_validated(a, check_finite=check_finite)
b1 = _asarray_validated(b, check_finite=check_finite)
if len(a1.shape) != 2 or a1.shape[0] != a1.shape[1]:
raise ValueError('expected square matrix')
if a1.shape[0] != b1.shape[0]:
raise ValueError('incompatible dimensions')
overwrite_b = overwrite_b or _datacopied(b1, b)
if debug:
print('solve:overwrite_b=', overwrite_b)
trans = {'N': 0, 'T': 1, 'C': 2}.get(trans, trans)
trtrs, = get_lapack_funcs(('trtrs',), (a1, b1))
x, info = trtrs(a1, b1, overwrite_b=overwrite_b, lower=lower,
trans=trans, unitdiag=unit_diagonal)
if info == 0:
return x
if info > 0:
raise LinAlgError("singular matrix: resolution failed at diagonal %s" %
info-1)
raise ValueError('illegal value in %d-th argument of internal trtrs' %
-info)
def solve_banded(l_and_u, ab, b, overwrite_ab=False, overwrite_b=False,
debug=False, check_finite=True):
"""
Solve the equation a x = b for x, assuming a is banded matrix.
The matrix a is stored in `ab` using the matrix diagonal ordered form::
ab[u + i - j, j] == a[i,j]
Example of `ab` (shape of a is (6,6), `u` =1, `l` =2)::
* a01 a12 a23 a34 a45
a00 a11 a22 a33 a44 a55
a10 a21 a32 a43 a54 *
a20 a31 a42 a53 * *
Parameters
----------
(l, u) : (integer, integer)
Number of non-zero lower and upper diagonals
ab : (`l` + `u` + 1, M) array_like
Banded matrix
b : (M,) or (M, K) array_like
Right-hand side
overwrite_ab : bool, optional
Discard data in `ab` (may enhance performance)
overwrite_b : bool, optional
Discard data in `b` (may enhance performance)
check_finite : bool, optional
Whether to check that the input matrices contain only finite numbers.
Disabling may give a performance gain, but may result in problems
(crashes, non-termination) if the inputs do contain infinities or NaNs.
Returns
-------
x : (M,) or (M, K) ndarray
The solution to the system a x = b. Returned shape depends on the
shape of `b`.
"""
a1 = _asarray_validated(ab, check_finite=check_finite, as_inexact=True)
b1 = _asarray_validated(b, check_finite=check_finite, as_inexact=True)
# Validate shapes.
if a1.shape[-1] != b1.shape[0]:
raise ValueError("shapes of ab and b are not compatible.")
(l, u) = l_and_u
if l + u + 1 != a1.shape[0]:
raise ValueError("invalid values for the number of lower and upper "
"diagonals: l+u+1 (%d) does not equal ab.shape[0] "
"(%d)" % (l+u+1, ab.shape[0]))
overwrite_b = overwrite_b or _datacopied(b1, b)
if a1.shape[-1] == 1:
b2 = np.array(b1, copy=overwrite_b)
b2 /= a1[1, 0]
return b2
if l == u == 1:
overwrite_ab = overwrite_ab or _datacopied(a1, ab)
gtsv, = get_lapack_funcs(('gtsv',), (a1, b1))
du = a1[0, 1:]
d = a1[1, :]
dl = a1[2, :-1]
du2, d, du, x, info = gtsv(dl, d, du, b1, overwrite_ab, overwrite_ab,
overwrite_ab, overwrite_b)
else:
gbsv, = get_lapack_funcs(('gbsv',), (a1, b1))
a2 = np.zeros((2*l+u+1, a1.shape[1]), dtype=gbsv.dtype)
a2[l:, :] = a1
lu, piv, x, info = gbsv(l, u, a2, b1, overwrite_ab=True,
overwrite_b=overwrite_b)
if info == 0:
return x
if info > 0:
raise LinAlgError("singular matrix")
raise ValueError('illegal value in %d-th argument of internal gbsv/gtsv' %
-info)
def solveh_banded(ab, b, overwrite_ab=False, overwrite_b=False, lower=False,
check_finite=True):
"""
Solve equation a x = b. a is Hermitian positive-definite banded matrix.
The matrix a is stored in `ab` either in lower diagonal or upper
diagonal ordered form:
ab[u + i - j, j] == a[i,j] (if upper form; i <= j)
ab[ i - j, j] == a[i,j] (if lower form; i >= j)
Example of `ab` (shape of a is (6, 6), `u` =2)::
upper form:
* * a02 a13 a24 a35
* a01 a12 a23 a34 a45
a00 a11 a22 a33 a44 a55
lower form:
a00 a11 a22 a33 a44 a55
a10 a21 a32 a43 a54 *
a20 a31 a42 a53 * *
Cells marked with * are not used.
Parameters
----------
ab : (`u` + 1, M) array_like
Banded matrix
b : (M,) or (M, K) array_like
Right-hand side
overwrite_ab : bool, optional
Discard data in `ab` (may enhance performance)
overwrite_b : bool, optional
Discard data in `b` (may enhance performance)
lower : bool, optional
Is the matrix in the lower form. (Default is upper form)
check_finite : bool, optional
Whether to check that the input matrices contain only finite numbers.
Disabling may give a performance gain, but may result in problems
(crashes, non-termination) if the inputs do contain infinities or NaNs.
Returns
-------
x : (M,) or (M, K) ndarray
The solution to the system a x = b. Shape of return matches shape
of `b`.
"""
a1 = _asarray_validated(ab, check_finite=check_finite)
b1 = _asarray_validated(b, check_finite=check_finite)
# Validate shapes.
if a1.shape[-1] != b1.shape[0]:
raise ValueError("shapes of ab and b are not compatible.")
overwrite_b = overwrite_b or _datacopied(b1, b)
overwrite_ab = overwrite_ab or _datacopied(a1, ab)
if a1.shape[0] == 2:
ptsv, = get_lapack_funcs(('ptsv',), (a1, b1))
if lower:
d = a1[0, :].real
e = a1[1, :-1]
else:
d = a1[1, :].real
e = a1[0, 1:].conj()
d, du, x, info = ptsv(d, e, b1, overwrite_ab, overwrite_ab,
overwrite_b)
else:
pbsv, = get_lapack_funcs(('pbsv',), (a1, b1))
c, x, info = pbsv(a1, b1, lower=lower, overwrite_ab=overwrite_ab,
overwrite_b=overwrite_b)
if info > 0:
raise LinAlgError("%d-th leading minor not positive definite" % info)
if info < 0:
raise ValueError('illegal value in %d-th argument of internal pbsv' %
-info)
return x
def solve_toeplitz(c_or_cr, b, check_finite=True):
"""Solve a Toeplitz system using Levinson Recursion
The Toeplitz matrix has constant diagonals, with c as its first column
and r as its first row. If r is not given, ``r == conjugate(c)`` is
assumed.
Parameters
----------
c_or_cr : array_like or tuple of (array_like, array_like)
The vector ``c``, or a tuple of arrays (``c``, ``r``). Whatever the
actual shape of ``c``, it will be converted to a 1-D array. If not
supplied, ``r = conjugate(c)`` is assumed; in this case, if c[0] is
real, the Toeplitz matrix is Hermitian. r[0] is ignored; the first row
of the Toeplitz matrix is ``[c[0], r[1:]]``. Whatever the actual shape
of ``r``, it will be converted to a 1-D array.
b : (M,) or (M, K) array_like
Right-hand side in ``T x = b``.
check_finite : bool, optional
Whether to check that the input matrices contain only finite numbers.
Disabling may give a performance gain, but may result in problems
(result entirely NaNs) if the inputs do contain infinities or NaNs.
Returns
-------
x : (M,) or (M, K) ndarray
The solution to the system ``T x = b``. Shape of return matches shape
of `b`.
Notes
-----
The solution is computed using Levinson-Durbin recursion, which is faster
than generic least-squares methods, but can be less numerically stable.
"""
# If numerical stability of this algorithm is a problem, a future
# developer might consider implementing other O(N^2) Toeplitz solvers,
# such as GKO (http://www.jstor.org/stable/2153371) or Bareiss.
if isinstance(c_or_cr, tuple):
c, r = c_or_cr
c = _asarray_validated(c, check_finite=check_finite).ravel()
r = _asarray_validated(r, check_finite=check_finite).ravel()
else:
c = _asarray_validated(c_or_cr, check_finite=check_finite).ravel()
r = c.conjugate()
# Form a 1D array of values to be used in the matrix, containing a reversed
# copy of r[1:], followed by c.
vals = np.concatenate((r[-1:0:-1], c))
if b is None:
raise ValueError('illegal value, `b` is a required argument')
if vals.shape[0] != (2*b.shape[0] - 1):
raise ValueError('incompatible dimensions')
b = _asarray_validated(b)
if np.iscomplexobj(vals) or np.iscomplexobj(b):
vals = np.asarray(vals, dtype=np.complex128, order='c')
b = np.asarray(b, dtype=np.complex128)
else:
vals = np.asarray(vals, dtype=np.double, order='c')
b = np.asarray(b, dtype=np.double)
if b.ndim == 1:
x, _ = levinson(vals, np.ascontiguousarray(b))
else:
b_shape = b.shape
b = b.reshape(b.shape[0], -1)
x = np.column_stack(
(levinson(vals, np.ascontiguousarray(b[:, i]))[0])
for i in range(b.shape[1]))
x = x.reshape(*b_shape)
return x
def _get_axis_len(aname, a, axis):
ax = axis
if ax < 0:
ax += a.ndim
if 0 <= ax < a.ndim:
return a.shape[ax]
raise ValueError("'%saxis' entry is out of bounds" % (aname,))
def solve_circulant(c, b, singular='raise', tol=None,
caxis=-1, baxis=0, outaxis=0):
"""Solve C x = b for x, where C is a circulant matrix.
`C` is the circulant matrix associated with the vector `c`.
The system is solved by doing division in Fourier space. The
calculation is::
x = ifft(fft(b) / fft(c))
where `fft` and `ifft` are the fast Fourier transform and its inverse,
respectively. For a large vector `c`, this is *much* faster than
solving the system with the full circulant matrix.
Parameters
----------
c : array_like
The coefficients of the circulant matrix.
b : array_like
Right-hand side matrix in ``a x = b``.
singular : str, optional
This argument controls how a near singular circulant matrix is
handled. If `singular` is "raise" and the circulant matrix is
near singular, a `LinAlgError` is raised. If `singular` is
"lstsq", the least squares solution is returned. Default is "raise".
tol : float, optional
If any eigenvalue of the circulant matrix has an absolute value
that is less than or equal to `tol`, the matrix is considered to be
near singular. If not given, `tol` is set to::
tol = abs_eigs.max() * abs_eigs.size * np.finfo(np.float64).eps
where `abs_eigs` is the array of absolute values of the eigenvalues
of the circulant matrix.
caxis : int
When `c` has dimension greater than 1, it is viewed as a collection
of circulant vectors. In this case, `caxis` is the axis of `c` that
holds the vectors of circulant coefficients.
baxis : int
When `b` has dimension greater than 1, it is viewed as a collection
of vectors. In this case, `baxis` is the axis of `b` that holds the
right-hand side vectors.
outaxis : int
When `c` or `b` are multidimensional, the value returned by
`solve_circulant` is multidimensional. In this case, `outaxis` is
the axis of the result that holds the solution vectors.
Returns
-------
x : ndarray
Solution to the system ``C x = b``.
Raises
------
LinAlgError
If the circulant matrix associated with `c` is near singular.
See Also
--------
circulant
Notes
-----
For a one-dimensional vector `c` with length `m`, and an array `b`
with shape ``(m, ...)``,
solve_circulant(c, b)
returns the same result as
solve(circulant(c), b)
where `solve` and `circulant` are from `scipy.linalg`.
.. versionadded:: 0.16.0
Examples
--------
>>> from scipy.linalg import solve_circulant, solve, circulant, lstsq
>>> c = np.array([2, 2, 4])
>>> b = np.array([1, 2, 3])
>>> solve_circulant(c, b)
array([ 0.75, -0.25, 0.25])
Compare that result to solving the system with `scipy.linalg.solve`:
>>> solve(circulant(c), b)
array([ 0.75, -0.25, 0.25])
A singular example:
>>> c = np.array([1, 1, 0, 0])
>>> b = np.array([1, 2, 3, 4])
Calling ``solve_circulant(c, b)`` will raise a `LinAlgError`. For the
least square solution, use the option ``singular='lstsq'``:
>>> solve_circulant(c, b, singular='lstsq')
array([ 0.25, 1.25, 2.25, 1.25])
Compare to `scipy.linalg.lstsq`:
>>> x, resid, rnk, s = lstsq(circulant(c), b)
>>> x
array([ 0.25, 1.25, 2.25, 1.25])
A broadcasting example:
Suppose we have the vectors of two circulant matrices stored in an array
with shape (2, 5), and three `b` vectors stored in an array with shape
(3, 5). For example,
>>> c = np.array([[1.5, 2, 3, 0, 0], [1, 1, 4, 3, 2]])
>>> b = np.arange(15).reshape(-1, 5)
We want to solve all combinations of circulant matrices and `b` vectors,
with the result stored in an array with shape (2, 3, 5). When we
disregard the axes of `c` and `b` that hold the vectors of coefficients,
the shapes of the collections are (2,) and (3,), respectively, which are
not compatible for broadcasting. To have a broadcast result with shape
(2, 3), we add a trivial dimension to `c`: ``c[:, np.newaxis, :]`` has
shape (2, 1, 5). The last dimension holds the coefficients of the
circulant matrices, so when we call `solve_circulant`, we can use the
default ``caxis=-1``. The coefficients of the `b` vectors are in the last
dimension of the array `b`, so we use ``baxis=-1``. If we use the
default `outaxis`, the result will have shape (5, 2, 3), so we'll use
``outaxis=-1`` to put the solution vectors in the last dimension.
>>> x = solve_circulant(c[:, np.newaxis, :], b, baxis=-1, outaxis=-1)
>>> x.shape
(2, 3, 5)
>>> np.set_printoptions(precision=3) # For compact output of numbers.
>>> x
array([[[-0.118, 0.22 , 1.277, -0.142, 0.302],
[ 0.651, 0.989, 2.046, 0.627, 1.072],
[ 1.42 , 1.758, 2.816, 1.396, 1.841]],
[[ 0.401, 0.304, 0.694, -0.867, 0.377],
[ 0.856, 0.758, 1.149, -0.412, 0.831],
[ 1.31 , 1.213, 1.603, 0.042, 1.286]]])
Check by solving one pair of `c` and `b` vectors (cf. ``x[1, 1, :]``):
>>> solve_circulant(c[1], b[1, :])
array([ 0.856, 0.758, 1.149, -0.412, 0.831])
"""
c = np.atleast_1d(c)
nc = _get_axis_len("c", c, caxis)
b = np.atleast_1d(b)
nb = _get_axis_len("b", b, baxis)
if nc != nb:
raise ValueError('Incompatible c and b axis lengths')
fc = np.fft.fft(np.rollaxis(c, caxis, c.ndim), axis=-1)
abs_fc = np.abs(fc)
if tol is None:
# This is the same tolerance as used in np.linalg.matrix_rank.
tol = abs_fc.max(axis=-1) * nc * np.finfo(np.float64).eps
if tol.shape != ():
tol.shape = tol.shape + (1,)
else:
tol = np.atleast_1d(tol)
near_zeros = abs_fc <= tol
is_near_singular = np.any(near_zeros)
if is_near_singular:
if singular == 'raise':
raise LinAlgError("near singular circulant matrix.")
else:
# Replace the small values with 1 to avoid errors in the
# division fb/fc below.
fc[near_zeros] = 1
fb = np.fft.fft(np.rollaxis(b, baxis, b.ndim), axis=-1)
q = fb / fc
if is_near_singular:
# `near_zeros` is a boolean array, same shape as `c`, that is
# True where `fc` is (near) zero. `q` is the broadcasted result
# of fb / fc, so to set the values of `q` to 0 where `fc` is near
# zero, we use a mask that is the broadcast result of an array
# of True values shaped like `b` with `near_zeros`.
mask = np.ones_like(b, dtype=bool) & near_zeros
q[mask] = 0
x = np.fft.ifft(q, axis=-1)
if not (np.iscomplexobj(c) or np.iscomplexobj(b)):
x = x.real
if outaxis != -1:
x = np.rollaxis(x, -1, outaxis)
return x
# matrix inversion
def inv(a, overwrite_a=False, check_finite=True):
"""
Compute the inverse of a matrix.
Parameters
----------
a : array_like
Square matrix to be inverted.
overwrite_a : bool, optional
Discard data in `a` (may improve performance). Default is False.
check_finite : bool, optional
Whether to check that the input matrix contains only finite numbers.
Disabling may give a performance gain, but may result in problems
(crashes, non-termination) if the inputs do contain infinities or NaNs.
Returns
-------
ainv : ndarray
Inverse of the matrix `a`.
Raises
------
LinAlgError :
If `a` is singular.
ValueError :
If `a` is not square, or not 2-dimensional.
Examples
--------
>>> from scipy import linalg
>>> a = np.array([[1., 2.], [3., 4.]])
>>> linalg.inv(a)
array([[-2. , 1. ],
[ 1.5, -0.5]])
>>> np.dot(a, linalg.inv(a))
array([[ 1., 0.],
[ 0., 1.]])
"""
a1 = _asarray_validated(a, check_finite=check_finite)
if len(a1.shape) != 2 or a1.shape[0] != a1.shape[1]:
raise ValueError('expected square matrix')
overwrite_a = overwrite_a or _datacopied(a1, a)
#XXX: I found no advantage or disadvantage of using finv.
## finv, = get_flinalg_funcs(('inv',),(a1,))
## if finv is not None:
## a_inv,info = finv(a1,overwrite_a=overwrite_a)
## if info==0:
## return a_inv
## if info>0: raise LinAlgError, "singular matrix"
## if info<0: raise ValueError,\
## 'illegal value in %d-th argument of internal inv.getrf|getri'%(-info)
getrf, getri, getri_lwork = get_lapack_funcs(('getrf', 'getri',
'getri_lwork'),
(a1,))
lu, piv, info = getrf(a1, overwrite_a=overwrite_a)
if info == 0:
lwork = _compute_lwork(getri_lwork, a1.shape[0])
# XXX: the following line fixes curious SEGFAULT when
# benchmarking 500x500 matrix inverse. This seems to
# be a bug in LAPACK ?getri routine because if lwork is
# minimal (when using lwork[0] instead of lwork[1]) then
# all tests pass. Further investigation is required if
# more such SEGFAULTs occur.
lwork = int(1.01 * lwork)
inv_a, info = getri(lu, piv, lwork=lwork, overwrite_lu=1)
if info > 0:
raise LinAlgError("singular matrix")
if info < 0:
raise ValueError('illegal value in %d-th argument of internal '
'getrf|getri' % -info)
return inv_a
### Determinant
def det(a, overwrite_a=False, check_finite=True):
"""
Compute the determinant of a matrix
The determinant of a square matrix is a value derived arithmetically
from the coefficients of the matrix.
The determinant for a 3x3 matrix, for example, is computed as follows::
a b c
d e f = A
g h i
det(A) = a*e*i + b*f*g + c*d*h - c*e*g - b*d*i - a*f*h
Parameters
----------
a : (M, M) array_like
A square matrix.
overwrite_a : bool, optional
Allow overwriting data in a (may enhance performance).
check_finite : bool, optional
Whether to check that the input matrix contains only finite numbers.
Disabling may give a performance gain, but may result in problems
(crashes, non-termination) if the inputs do contain infinities or NaNs.
Returns
-------
det : float or complex
Determinant of `a`.
Notes
-----
The determinant is computed via LU factorization, LAPACK routine z/dgetrf.
Examples
--------
>>> from scipy import linalg
>>> a = np.array([[1,2,3], [4,5,6], [7,8,9]])
>>> linalg.det(a)
0.0
>>> a = np.array([[0,2,3], [4,5,6], [7,8,9]])
>>> linalg.det(a)
3.0
"""
a1 = _asarray_validated(a, check_finite=check_finite)
if len(a1.shape) != 2 or a1.shape[0] != a1.shape[1]:
raise ValueError('expected square matrix')
overwrite_a = overwrite_a or _datacopied(a1, a)
fdet, = get_flinalg_funcs(('det',), (a1,))
a_det, info = fdet(a1, overwrite_a=overwrite_a)
if info < 0:
raise ValueError('illegal value in %d-th argument of internal '
'det.getrf' % -info)
return a_det
### Linear Least Squares
class LstsqLapackError(LinAlgError):
pass
def lstsq(a, b, cond=None, overwrite_a=False, overwrite_b=False,
check_finite=True, lapack_driver=None):
"""
Compute least-squares solution to equation Ax = b.
Compute a vector x such that the 2-norm ``|b - A x|`` is minimized.
Parameters
----------
a : (M, N) array_like
Left hand side matrix (2-D array).
b : (M,) or (M, K) array_like
Right hand side matrix or vector (1-D or 2-D array).
cond : float, optional
Cutoff for 'small' singular values; used to determine effective
rank of a. Singular values smaller than
``rcond * largest_singular_value`` are considered zero.
overwrite_a : bool, optional
Discard data in `a` (may enhance performance). Default is False.
overwrite_b : bool, optional
Discard data in `b` (may enhance performance). Default is False.
check_finite : bool, optional
Whether to check that the input matrices contain only finite numbers.
Disabling may give a performance gain, but may result in problems
(crashes, non-termination) if the inputs do contain infinities or NaNs.
lapack_driver: str, optional
Which LAPACK driver is used to solve the least-squares problem.
Options are ``'gelsd'``, ``'gelsy'``, ``'gelss'``. Default
(``'gelsd'``) is a good choice. However, ``'gelsy'`` can be slightly
faster on many problems. ``'gelss'`` was used historically. It is
generally slow but uses less memory.
.. versionadded:: 0.17.0
Returns
-------
x : (N,) or (N, K) ndarray
Least-squares solution. Return shape matches shape of `b`.
residues : () or (1,) or (K,) ndarray
Sums of residues, squared 2-norm for each column in ``b - a x``.
If rank of matrix a is ``< N`` or ``> M``, or ``'gelsy'`` is used,
this is an empty array. If b was 1-D, this is an (1,) shape array,
otherwise the shape is (K,).
rank : int
Effective rank of matrix `a`.
s : (min(M,N),) ndarray or None
Singular values of `a`. The condition number of a is
``abs(s[0] / s[-1])``. None is returned when ``'gelsy'`` is used.
Raises
------
LinAlgError :
If computation does not converge.
ValueError :
When parameters are wrong.
See Also
--------
optimize.nnls : linear least squares with non-negativity constraint
"""
a1 = _asarray_validated(a, check_finite=check_finite)
b1 = _asarray_validated(b, check_finite=check_finite)
if len(a1.shape) != 2:
raise ValueError('expected matrix')
m, n = a1.shape
if len(b1.shape) == 2:
nrhs = b1.shape[1]
else:
nrhs = 1
if m != b1.shape[0]:
raise ValueError('incompatible dimensions')
driver = lapack_driver
if driver is None:
driver = lstsq.default_lapack_driver
if driver not in ('gelsd', 'gelsy', 'gelss'):
raise ValueError('LAPACK driver "%s" is not found' % driver)
lapack_func, lapack_lwork = get_lapack_funcs((driver,
'%s_lwork' % driver), (a1, b1))
real_data = True if (lapack_func.dtype.kind == 'f') else False
if m < n:
# need to extend b matrix as it will be filled with
# a larger solution matrix
if len(b1.shape) == 2:
b2 = np.zeros((n, nrhs), dtype=lapack_func.dtype)
b2[:m, :] = b1
else:
b2 = np.zeros(n, dtype=lapack_func.dtype)
b2[:m] = b1
b1 = b2
overwrite_a = overwrite_a or _datacopied(a1, a)
overwrite_b = overwrite_b or _datacopied(b1, b)
if cond is None:
cond = np.finfo(lapack_func.dtype).eps
if driver in ('gelss', 'gelsd'):
if driver == 'gelss':
lwork = _compute_lwork(lapack_lwork, m, n, nrhs, cond)
v, x, s, rank, work, info = lapack_func(a1, b1, cond, lwork,
overwrite_a=overwrite_a,
overwrite_b=overwrite_b)
elif driver == 'gelsd':
if real_data:
lwork, iwork = _compute_lwork(lapack_lwork, m, n, nrhs, cond)
if iwork == 0:
# this is LAPACK bug 0038: dgelsd does not provide the
# size of the iwork array in query mode. This bug was
# fixed in LAPACK 3.2.2, released July 21, 2010.
mesg = ("internal gelsd driver lwork query error, "
"required iwork dimension not returned. "
"This is likely the result of LAPACK bug "
"0038, fixed in LAPACK 3.2.2 (released "
"July 21, 2010). ")
if lapack_driver is None:
# restart with gelss
lstsq.default_lapack_driver = 'gelss'
mesg += "Falling back to 'gelss' driver."
warnings.warn(mesg, RuntimeWarning)
return lstsq(a, b, cond, overwrite_a, overwrite_b,
check_finite, lapack_driver='gelss')
# can't proceed, bail out
mesg += ("Use a different lapack_driver when calling lstsq "
"or upgrade LAPACK.")
raise LstsqLapackError(mesg)
x, s, rank, info = lapack_func(a1, b1, lwork,
iwork, cond, False, False)
else: # complex data
lwork, rwork, iwork = _compute_lwork(lapack_lwork, m, n,
nrhs, cond)
x, s, rank, info = lapack_func(a1, b1, lwork, rwork, iwork,
cond, False, False)
if info > 0:
raise LinAlgError("SVD did not converge in Linear Least Squares")
if info < 0:
raise ValueError('illegal value in %d-th argument of internal %s'
% (-info, lapack_driver))
resids = np.asarray([], dtype=x.dtype)
if m > n:
x1 = x[:n]
if rank == n:
resids = np.sum(np.abs(x[n:])**2, axis=0)
x = x1
return x, resids, rank, s
elif driver == 'gelsy':
lwork = _compute_lwork(lapack_lwork, m, n, nrhs, cond)
jptv = np.zeros((a1.shape[1],1), dtype=np.int32)
v, x, j, rank, info = lapack_func(a1, b1, jptv, cond,
lwork, False, False)
if info < 0:
raise ValueError("illegal value in %d-th argument of internal "
"gelsy" % -info)
if m > n:
x1 = x[:n]
x = x1
return x, np.array([], x.dtype), rank, None
lstsq.default_lapack_driver = 'gelsd'
def pinv(a, cond=None, rcond=None, return_rank=False, check_finite=True):
"""
Compute the (Moore-Penrose) pseudo-inverse of a matrix.
Calculate a generalized inverse of a matrix using a least-squares
solver.
Parameters
----------
a : (M, N) array_like
Matrix to be pseudo-inverted.
cond, rcond : float, optional
Cutoff for 'small' singular values in the least-squares solver.
Singular values smaller than ``rcond * largest_singular_value``
are considered zero.
return_rank : bool, optional
if True, return the effective rank of the matrix
check_finite : bool, optional
Whether to check that the input matrix contains only finite numbers.
Disabling may give a performance gain, but may result in problems
(crashes, non-termination) if the inputs do contain infinities or NaNs.
Returns
-------
B : (N, M) ndarray
The pseudo-inverse of matrix `a`.
rank : int
The effective rank of the matrix. Returned if return_rank == True
Raises
------
LinAlgError
If computation does not converge.
Examples
--------
>>> from scipy import linalg
>>> a = np.random.randn(9, 6)
>>> B = linalg.pinv(a)
>>> np.allclose(a, np.dot(a, np.dot(B, a)))
True
>>> np.allclose(B, np.dot(B, np.dot(a, B)))
True
"""
a = _asarray_validated(a, check_finite=check_finite)
b = np.identity(a.shape[0], dtype=a.dtype)
if rcond is not None:
cond = rcond
x, resids, rank, s = lstsq(a, b, cond=cond, check_finite=False)
if return_rank:
return x, rank
else:
return x
def pinv2(a, cond=None, rcond=None, return_rank=False, check_finite=True):
"""
Compute the (Moore-Penrose) pseudo-inverse of a matrix.
Calculate a generalized inverse of a matrix using its
singular-value decomposition and including all 'large' singular
values.
Parameters
----------
a : (M, N) array_like
Matrix to be pseudo-inverted.
cond, rcond : float or None
Cutoff for 'small' singular values.
Singular values smaller than ``rcond*largest_singular_value``
are considered zero.
If None or -1, suitable machine precision is used.
return_rank : bool, optional
if True, return the effective rank of the matrix
check_finite : bool, optional
Whether to check that the input matrix contains only finite numbers.
Disabling may give a performance gain, but may result in problems
(crashes, non-termination) if the inputs do contain infinities or NaNs.
Returns
-------
B : (N, M) ndarray
The pseudo-inverse of matrix `a`.
rank : int
The effective rank of the matrix. Returned if return_rank == True
Raises
------
LinAlgError
If SVD computation does not converge.
Examples
--------
>>> from scipy import linalg
>>> a = np.random.randn(9, 6)
>>> B = linalg.pinv2(a)
>>> np.allclose(a, np.dot(a, np.dot(B, a)))
True
>>> np.allclose(B, np.dot(B, np.dot(a, B)))
True
"""
a = _asarray_validated(a, check_finite=check_finite)
u, s, vh = decomp_svd.svd(a, full_matrices=False, check_finite=False)
if rcond is not None:
cond = rcond
if cond in [None, -1]:
t = u.dtype.char.lower()
factor = {'f': 1E3, 'd': 1E6}
cond = factor[t] * np.finfo(t).eps
rank = np.sum(s > cond * np.max(s))
u = u[:, :rank]
u /= s[:rank]
B = np.transpose(np.conjugate(np.dot(u, vh[:rank])))
if return_rank:
return B, rank
else:
return B
def pinvh(a, cond=None, rcond=None, lower=True, return_rank=False,
check_finite=True):
"""
Compute the (Moore-Penrose) pseudo-inverse of a Hermitian matrix.
Calculate a generalized inverse of a Hermitian or real symmetric matrix
using its eigenvalue decomposition and including all eigenvalues with
'large' absolute value.
Parameters
----------
a : (N, N) array_like
Real symmetric or complex hermetian matrix to be pseudo-inverted
cond, rcond : float or None
Cutoff for 'small' eigenvalues.
Singular values smaller than rcond * largest_eigenvalue are considered
zero.
If None or -1, suitable machine precision is used.
lower : bool, optional
Whether the pertinent array data is taken from the lower or upper
triangle of a. (Default: lower)
return_rank : bool, optional
if True, return the effective rank of the matrix
check_finite : bool, optional
Whether to check that the input matrix contains only finite numbers.
Disabling may give a performance gain, but may result in problems
(crashes, non-termination) if the inputs do contain infinities or NaNs.
Returns
-------
B : (N, N) ndarray
The pseudo-inverse of matrix `a`.
rank : int
The effective rank of the matrix. Returned if return_rank == True
Raises
------
LinAlgError
If eigenvalue does not converge
Examples
--------
>>> from scipy.linalg import pinvh
>>> a = np.random.randn(9, 6)
>>> a = np.dot(a, a.T)
>>> B = pinvh(a)
>>> np.allclose(a, np.dot(a, np.dot(B, a)))
True
>>> np.allclose(B, np.dot(B, np.dot(a, B)))
True
"""
a = _asarray_validated(a, check_finite=check_finite)
s, u = decomp.eigh(a, lower=lower, check_finite=False)
if rcond is not None:
cond = rcond
if cond in [None, -1]:
t = u.dtype.char.lower()
factor = {'f': 1E3, 'd': 1E6}
cond = factor[t] * np.finfo(t).eps
# For Hermitian matrices, singular values equal abs(eigenvalues)
above_cutoff = (abs(s) > cond * np.max(abs(s)))
psigma_diag = 1.0 / s[above_cutoff]
u = u[:, above_cutoff]
B = np.dot(u * psigma_diag, np.conjugate(u).T)
if return_rank:
return B, len(psigma_diag)
else:
return B
| apache-2.0 | -7,642,605,037,057,237,000 | 33.897959 | 83 | 0.569794 | false |
askulkarni2/ansible-modules-core | cloud/amazon/ec2_scaling_policy.py | 37 | 6191 | #!/usr/bin/python
DOCUMENTATION = """
module: ec2_scaling_policy
short_description: Create or delete AWS scaling policies for Autoscaling groups
description:
- Can create or delete scaling policies for autoscaling groups
- Referenced autoscaling groups must already exist
version_added: "1.6"
author: "Zacharie Eakin (@zeekin)"
options:
state:
description:
- register or deregister the policy
required: true
choices: ['present', 'absent']
name:
description:
- Unique name for the scaling policy
required: true
asg_name:
description:
- Name of the associated autoscaling group
required: true
adjustment_type:
description:
- The type of change in capacity of the autoscaling group
required: false
choices: ['ChangeInCapacity','ExactCapacity','PercentChangeInCapacity']
scaling_adjustment:
description:
- The amount by which the autoscaling group is adjusted by the policy
required: false
min_adjustment_step:
description:
- Minimum amount of adjustment when policy is triggered
required: false
cooldown:
description:
- The minimum period of time between which autoscaling actions can take place
required: false
extends_documentation_fragment: aws
"""
EXAMPLES = '''
- ec2_scaling_policy:
state: present
region: US-XXX
name: "scaledown-policy"
adjustment_type: "ChangeInCapacity"
asg_name: "slave-pool"
scaling_adjustment: -1
min_adjustment_step: 1
cooldown: 300
'''
from ansible.module_utils.basic import *
from ansible.module_utils.ec2 import *
try:
import boto.ec2.autoscale
from boto.ec2.autoscale import ScalingPolicy
from boto.exception import BotoServerError
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
def create_scaling_policy(connection, module):
sp_name = module.params.get('name')
adjustment_type = module.params.get('adjustment_type')
asg_name = module.params.get('asg_name')
scaling_adjustment = module.params.get('scaling_adjustment')
min_adjustment_step = module.params.get('min_adjustment_step')
cooldown = module.params.get('cooldown')
scalingPolicies = connection.get_all_policies(as_group=asg_name,policy_names=[sp_name])
if not scalingPolicies:
sp = ScalingPolicy(
name=sp_name,
adjustment_type=adjustment_type,
as_name=asg_name,
scaling_adjustment=scaling_adjustment,
min_adjustment_step=min_adjustment_step,
cooldown=cooldown)
try:
connection.create_scaling_policy(sp)
policy = connection.get_all_policies(as_group=asg_name,policy_names=[sp_name])[0]
module.exit_json(changed=True, name=policy.name, arn=policy.policy_arn, as_name=policy.as_name, scaling_adjustment=policy.scaling_adjustment, cooldown=policy.cooldown, adjustment_type=policy.adjustment_type, min_adjustment_step=policy.min_adjustment_step)
except BotoServerError, e:
module.fail_json(msg=str(e))
else:
policy = scalingPolicies[0]
changed = False
# min_adjustment_step attribute is only relevant if the adjustment_type
# is set to percentage change in capacity, so it is a special case
if getattr(policy, 'adjustment_type') == 'PercentChangeInCapacity':
if getattr(policy, 'min_adjustment_step') != module.params.get('min_adjustment_step'):
changed = True
# set the min adjustment step incase the user decided to change their
# adjustment type to percentage
setattr(policy, 'min_adjustment_step', module.params.get('min_adjustment_step'))
# check the remaining attributes
for attr in ('adjustment_type','scaling_adjustment','cooldown'):
if getattr(policy, attr) != module.params.get(attr):
changed = True
setattr(policy, attr, module.params.get(attr))
try:
if changed:
connection.create_scaling_policy(policy)
policy = connection.get_all_policies(as_group=asg_name,policy_names=[sp_name])[0]
module.exit_json(changed=changed, name=policy.name, arn=policy.policy_arn, as_name=policy.as_name, scaling_adjustment=policy.scaling_adjustment, cooldown=policy.cooldown, adjustment_type=policy.adjustment_type, min_adjustment_step=policy.min_adjustment_step)
except BotoServerError, e:
module.fail_json(msg=str(e))
def delete_scaling_policy(connection, module):
sp_name = module.params.get('name')
asg_name = module.params.get('asg_name')
scalingPolicies = connection.get_all_policies(as_group=asg_name,policy_names=[sp_name])
if scalingPolicies:
try:
connection.delete_policy(sp_name, asg_name)
module.exit_json(changed=True)
except BotoServerError, e:
module.exit_json(changed=False, msg=str(e))
else:
module.exit_json(changed=False)
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(
dict(
name = dict(required=True, type='str'),
adjustment_type = dict(type='str', choices=['ChangeInCapacity','ExactCapacity','PercentChangeInCapacity']),
asg_name = dict(required=True, type='str'),
scaling_adjustment = dict(type='int'),
min_adjustment_step = dict(type='int'),
cooldown = dict(type='int'),
state=dict(default='present', choices=['present', 'absent']),
)
)
module = AnsibleModule(argument_spec=argument_spec)
if not HAS_BOTO:
module.fail_json(msg='boto required for this module')
region, ec2_url, aws_connect_params = get_aws_connection_info(module)
state = module.params.get('state')
try:
connection = connect_to_aws(boto.ec2.autoscale, region, **aws_connect_params)
except (boto.exception.NoAuthHandlerFound, StandardError), e:
module.fail_json(msg = str(e))
if state == 'present':
create_scaling_policy(connection, module)
elif state == 'absent':
delete_scaling_policy(connection, module)
main()
| gpl-3.0 | 6,494,474,825,796,692,000 | 34.58046 | 270 | 0.668551 | false |
jontrulson/upm | examples/python/ms5803.py | 7 | 2129 | #!/usr/bin/python
# Author: Jon Trulson <[email protected]>
# Copyright (c) 2016 Intel Corporation.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from __future__ import print_function
import time, sys, signal, atexit
from upm import pyupm_ms5803 as sensorObj
def main():
# Instantiate a MS5803 instance using bus 0 and default i2c address
sensor = sensorObj.MS5803(0)
# For SPI, bus 0, you would pass -1 as the address, and a valid pin for CS:
# MS5803(0, -1, 10);
## Exit handlers ##
# This function stops python from printing a stacktrace when you
# hit control-C
def SIGINTHandler(signum, frame):
raise SystemExit
# This function lets you run code on exit
def exitHandler():
print("Exiting")
sys.exit(0)
# Register exit handlers
atexit.register(exitHandler)
signal.signal(signal.SIGINT, SIGINTHandler)
while (1):
sensor.update()
print("Temperature:", sensor.getTemperature(), "C,", end=' ')
print("Pressure: ", sensor.getPressure(), "mbar")
time.sleep(1)
if __name__ == '__main__':
main()
| mit | 3,736,096,687,156,722,000 | 35.706897 | 79 | 0.711602 | false |
rubyu/anki | aqt/addons.py | 2 | 5205 | # Copyright: Damien Elmes <[email protected]>
# -*- coding: utf-8 -*-
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
import sys, os, traceback
from cStringIO import StringIO
from aqt.qt import *
from aqt.utils import showInfo, openFolder, isWin, openLink, \
askUser
from zipfile import ZipFile
import aqt.forms
import aqt
from aqt.downloader import download
# in the future, it would be nice to save the addon id and unzippped file list
# to the config so that we can clear up all files and check for updates
class AddonManager(object):
def __init__(self, mw):
self.mw = mw
f = self.mw.form; s = SIGNAL("triggered()")
self.mw.connect(f.actionOpenPluginFolder, s, self.onOpenAddonFolder)
self.mw.connect(f.actionDownloadSharedPlugin, s, self.onGetAddons)
self._menus = []
if isWin:
self.clearAddonCache()
sys.path.insert(0, self.addonsFolder())
if not self.mw.safeMode:
self.loadAddons()
def files(self):
return [f for f in os.listdir(self.addonsFolder())
if f.endswith(".py")]
def loadAddons(self):
for file in self.files():
try:
__import__(file.replace(".py", ""))
except:
traceback.print_exc()
self.rebuildAddonsMenu()
# Menus
######################################################################
def onOpenAddonFolder(self, path=None):
if path is None:
path = self.addonsFolder()
openFolder(path)
def rebuildAddonsMenu(self):
for m in self._menus:
self.mw.form.menuPlugins.removeAction(m.menuAction())
for file in self.files():
m = self.mw.form.menuPlugins.addMenu(
os.path.splitext(file)[0])
self._menus.append(m)
a = QAction(_("Edit..."), self.mw)
p = os.path.join(self.addonsFolder(), file)
self.mw.connect(a, SIGNAL("triggered()"),
lambda p=p: self.onEdit(p))
m.addAction(a)
a = QAction(_("Delete..."), self.mw)
self.mw.connect(a, SIGNAL("triggered()"),
lambda p=p: self.onRem(p))
m.addAction(a)
def onEdit(self, path):
d = QDialog(self.mw)
frm = aqt.forms.editaddon.Ui_Dialog()
frm.setupUi(d)
d.setWindowTitle(os.path.basename(path))
frm.text.setPlainText(unicode(open(path).read(), "utf8"))
d.connect(frm.buttonBox, SIGNAL("accepted()"),
lambda: self.onAcceptEdit(path, frm))
d.exec_()
def onAcceptEdit(self, path, frm):
open(path, "w").write(frm.text.toPlainText().encode("utf8"))
showInfo(_("Edits saved. Please restart Anki."))
def onRem(self, path):
if not askUser(_("Delete %s?") % os.path.basename(path)):
return
os.unlink(path)
self.rebuildAddonsMenu()
showInfo(_("Deleted. Please restart Anki."))
# Tools
######################################################################
def addonsFolder(self):
dir = self.mw.pm.addonFolder()
if isWin:
dir = dir.encode(sys.getfilesystemencoding())
return dir
def clearAddonCache(self):
"Clear .pyc files which may cause crashes if Python version updated."
dir = self.addonsFolder()
for curdir, dirs, files in os.walk(dir):
for f in files:
if not f.endswith(".pyc"):
continue
os.unlink(os.path.join(curdir, f))
def registerAddon(self, name, updateId):
# not currently used
return
# Installing add-ons
######################################################################
def onGetAddons(self):
GetAddons(self.mw)
def install(self, data, fname):
if fname.endswith(".py"):
# .py files go directly into the addon folder
path = os.path.join(self.addonsFolder(), fname)
open(path, "w").write(data)
return
# .zip file
z = ZipFile(StringIO(data))
base = self.addonsFolder()
for n in z.namelist():
if n.endswith("/"):
# folder; ignore
continue
# write
z.extract(n, base)
class GetAddons(QDialog):
def __init__(self, mw):
QDialog.__init__(self, mw)
self.mw = mw
self.form = aqt.forms.getaddons.Ui_Dialog()
self.form.setupUi(self)
b = self.form.buttonBox.addButton(
_("Browse"), QDialogButtonBox.ActionRole)
self.connect(b, SIGNAL("clicked()"), self.onBrowse)
self.exec_()
def onBrowse(self):
openLink(aqt.appShared + "addons/")
def accept(self):
QDialog.accept(self)
# create downloader thread
ret = download(self.mw, self.form.code.text())
if not ret:
return
data, fname = ret
self.mw.addonManager.install(data, fname)
self.mw.progress.finish()
showInfo(_("Download successful. Please restart Anki."))
| agpl-3.0 | 5,297,795,852,006,329,000 | 31.943038 | 78 | 0.543324 | false |
carl-mastrangelo/grpc | tools/profiling/microbenchmarks/bm_json.py | 18 | 6459 | # Copyright 2017 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Utilities for manipulating JSON data that represents microbenchmark results.
import os
# template arguments and dynamic arguments of individual benchmark types
# Example benchmark name: "BM_UnaryPingPong<TCP, NoOpMutator, NoOpMutator>/0/0"
_BM_SPECS = {
'BM_UnaryPingPong': {
'tpl': ['fixture', 'client_mutator', 'server_mutator'],
'dyn': ['request_size', 'response_size'],
},
'BM_PumpStreamClientToServer': {
'tpl': ['fixture'],
'dyn': ['request_size'],
},
'BM_PumpStreamServerToClient': {
'tpl': ['fixture'],
'dyn': ['request_size'],
},
'BM_StreamingPingPong': {
'tpl': ['fixture', 'client_mutator', 'server_mutator'],
'dyn': ['request_size', 'request_count'],
},
'BM_StreamingPingPongMsgs': {
'tpl': ['fixture', 'client_mutator', 'server_mutator'],
'dyn': ['request_size'],
},
'BM_PumpStreamServerToClient_Trickle': {
'tpl': [],
'dyn': ['request_size', 'bandwidth_kilobits'],
},
'BM_PumpUnbalancedUnary_Trickle': {
'tpl': [],
'dyn': ['cli_req_size', 'svr_req_size', 'bandwidth_kilobits'],
},
'BM_ErrorStringOnNewError': {
'tpl': ['fixture'],
'dyn': [],
},
'BM_ErrorStringRepeatedly': {
'tpl': ['fixture'],
'dyn': [],
},
'BM_ErrorGetStatus': {
'tpl': ['fixture'],
'dyn': [],
},
'BM_ErrorGetStatusCode': {
'tpl': ['fixture'],
'dyn': [],
},
'BM_ErrorHttpError': {
'tpl': ['fixture'],
'dyn': [],
},
'BM_HasClearGrpcStatus': {
'tpl': ['fixture'],
'dyn': [],
},
'BM_IsolatedFilter': {
'tpl': ['fixture', 'client_mutator'],
'dyn': [],
},
'BM_HpackEncoderEncodeHeader': {
'tpl': ['fixture'],
'dyn': ['end_of_stream', 'request_size'],
},
'BM_HpackParserParseHeader': {
'tpl': ['fixture', 'on_header'],
'dyn': [],
},
'BM_CallCreateDestroy': {
'tpl': ['fixture'],
'dyn': [],
},
'BM_Zalloc': {
'tpl': [],
'dyn': ['request_size'],
},
'BM_PollEmptyPollset_SpeedOfLight': {
'tpl': [],
'dyn': ['request_size', 'request_count'],
},
'BM_StreamCreateSendInitialMetadataDestroy': {
'tpl': ['fixture'],
'dyn': [],
},
'BM_TransportStreamSend': {
'tpl': [],
'dyn': ['request_size'],
},
'BM_TransportStreamRecv': {
'tpl': [],
'dyn': ['request_size'],
},
'BM_StreamingPingPongWithCoalescingApi': {
'tpl': ['fixture', 'client_mutator', 'server_mutator'],
'dyn': ['request_size', 'request_count', 'end_of_stream'],
},
'BM_Base16SomeStuff': {
'tpl': [],
'dyn': ['request_size'],
}
}
def numericalize(s):
"""Convert abbreviations like '100M' or '10k' to a number."""
if not s: return ''
if s[-1] == 'k':
return float(s[:-1]) * 1024
if s[-1] == 'M':
return float(s[:-1]) * 1024 * 1024
if 0 <= (ord(s[-1]) - ord('0')) <= 9:
return float(s)
assert 'not a number: %s' % s
def parse_name(name):
cpp_name = name
if '<' not in name and '/' not in name and name not in _BM_SPECS:
return {'name': name, 'cpp_name': name}
rest = name
out = {}
tpl_args = []
dyn_args = []
if '<' in rest:
tpl_bit = rest[rest.find('<') + 1:rest.rfind('>')]
arg = ''
nesting = 0
for c in tpl_bit:
if c == '<':
nesting += 1
arg += c
elif c == '>':
nesting -= 1
arg += c
elif c == ',':
if nesting == 0:
tpl_args.append(arg.strip())
arg = ''
else:
arg += c
else:
arg += c
tpl_args.append(arg.strip())
rest = rest[:rest.find('<')] + rest[rest.rfind('>') + 1:]
if '/' in rest:
s = rest.split('/')
rest = s[0]
dyn_args = s[1:]
name = rest
assert name in _BM_SPECS, '_BM_SPECS needs to be expanded for %s' % name
assert len(dyn_args) == len(_BM_SPECS[name]['dyn'])
assert len(tpl_args) == len(_BM_SPECS[name]['tpl'])
out['name'] = name
out['cpp_name'] = cpp_name
out.update(
dict((k, numericalize(v))
for k, v in zip(_BM_SPECS[name]['dyn'], dyn_args)))
out.update(dict(zip(_BM_SPECS[name]['tpl'], tpl_args)))
return out
def expand_json(js, js2=None):
if not js and not js2: raise StopIteration()
if not js: js = js2
for bm in js['benchmarks']:
if bm['name'].endswith('_stddev') or bm['name'].endswith('_mean'):
continue
context = js['context']
if 'label' in bm:
labels_list = [
s.split(':')
for s in bm['label'].strip().split(' ')
if len(s) and s[0] != '#'
]
for el in labels_list:
el[0] = el[0].replace('/iter', '_per_iteration')
labels = dict(labels_list)
else:
labels = {}
row = {
'jenkins_build': os.environ.get('BUILD_NUMBER', ''),
'jenkins_job': os.environ.get('JOB_NAME', ''),
}
row.update(context)
row.update(bm)
row.update(parse_name(row['name']))
row.update(labels)
if js2:
for bm2 in js2['benchmarks']:
if bm['name'] == bm2['name'] and 'already_used' not in bm2:
row['cpu_time'] = bm2['cpu_time']
row['real_time'] = bm2['real_time']
row['iterations'] = bm2['iterations']
bm2['already_used'] = True
break
yield row
| apache-2.0 | 190,914,363,233,680,320 | 29.323944 | 79 | 0.495433 | false |
edx/edx-platform | lms/djangoapps/lti_provider/tests/test_views.py | 5 | 9132 | """
Tests for the LTI provider views
"""
from unittest.mock import MagicMock, patch
from django.test import TestCase
from django.test.client import RequestFactory
from django.urls import reverse
from opaque_keys.edx.locator import BlockUsageLocator, CourseLocator
from common.djangoapps.student.tests.factories import UserFactory
from lms.djangoapps.courseware.testutils import RenderXBlockTestMixin
from lms.djangoapps.lti_provider import models, views
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
LTI_DEFAULT_PARAMS = {
'roles': 'Instructor,urn:lti:instrole:ims/lis/Administrator',
'context_id': 'lti_launch_context_id',
'oauth_version': '1.0',
'oauth_consumer_key': 'consumer_key',
'oauth_signature': 'OAuth Signature',
'oauth_signature_method': 'HMAC-SHA1',
'oauth_timestamp': 'OAuth Timestamp',
'oauth_nonce': 'OAuth Nonce',
'user_id': 'LTI_User',
}
LTI_OPTIONAL_PARAMS = {
'context_title': 'context title',
'context_label': 'context label',
'lis_result_sourcedid': 'result sourcedid',
'lis_outcome_service_url': 'outcome service URL',
'tool_consumer_instance_guid': 'consumer instance guid'
}
COURSE_KEY = CourseLocator(org='some_org', course='some_course', run='some_run')
USAGE_KEY = BlockUsageLocator(course_key=COURSE_KEY, block_type='problem', block_id='block_id')
COURSE_PARAMS = {
'course_key': COURSE_KEY,
'usage_key': USAGE_KEY
}
ALL_PARAMS = dict(list(LTI_DEFAULT_PARAMS.items()) + list(COURSE_PARAMS.items()))
def build_launch_request(extra_post_data=None, param_to_delete=None):
"""
Helper method to create a new request object for the LTI launch.
"""
if extra_post_data is None:
extra_post_data = {}
post_data = dict(list(LTI_DEFAULT_PARAMS.items()) + list(extra_post_data.items()))
if param_to_delete:
del post_data[param_to_delete]
request = RequestFactory().post('/', data=post_data)
request.user = UserFactory.create()
request.session = {}
return request
class LtiTestMixin:
"""
Mixin for LTI tests
"""
@patch.dict('django.conf.settings.FEATURES', {'ENABLE_LTI_PROVIDER': True})
def setUp(self):
super().setUp()
# Always accept the OAuth signature
self.mock_verify = MagicMock(return_value=True)
patcher = patch('lms.djangoapps.lti_provider.signature_validator.SignatureValidator.verify', self.mock_verify)
patcher.start()
self.addCleanup(patcher.stop)
self.consumer = models.LtiConsumer(
consumer_name='consumer',
consumer_key=LTI_DEFAULT_PARAMS['oauth_consumer_key'],
consumer_secret='secret'
)
self.consumer.save()
class LtiLaunchTest(LtiTestMixin, TestCase):
"""
Tests for the lti_launch view
"""
@patch('lms.djangoapps.lti_provider.views.render_courseware')
@patch('lms.djangoapps.lti_provider.views.authenticate_lti_user')
def test_valid_launch(self, _authenticate, render):
"""
Verifies that the LTI launch succeeds when passed a valid request.
"""
request = build_launch_request()
views.lti_launch(request, str(COURSE_KEY), str(USAGE_KEY))
render.assert_called_with(request, USAGE_KEY)
@patch('lms.djangoapps.lti_provider.views.render_courseware')
@patch('lms.djangoapps.lti_provider.views.store_outcome_parameters')
@patch('lms.djangoapps.lti_provider.views.authenticate_lti_user')
def test_valid_launch_with_optional_params(self, _authenticate, store_params, _render):
"""
Verifies that the LTI launch succeeds when passed a valid request.
"""
request = build_launch_request(extra_post_data=LTI_OPTIONAL_PARAMS)
views.lti_launch(request, str(COURSE_KEY), str(USAGE_KEY))
store_params.assert_called_with(
dict(list(ALL_PARAMS.items()) + list(LTI_OPTIONAL_PARAMS.items())),
request.user,
self.consumer
)
@patch('lms.djangoapps.lti_provider.views.render_courseware')
@patch('lms.djangoapps.lti_provider.views.store_outcome_parameters')
@patch('lms.djangoapps.lti_provider.views.authenticate_lti_user')
def test_outcome_service_registered(self, _authenticate, store_params, _render):
"""
Verifies that the LTI launch succeeds when passed a valid request.
"""
request = build_launch_request()
views.lti_launch(
request,
str(COURSE_PARAMS['course_key']),
str(COURSE_PARAMS['usage_key'])
)
store_params.assert_called_with(ALL_PARAMS, request.user, self.consumer)
def launch_with_missing_parameter(self, missing_param):
"""
Helper method to remove a parameter from the LTI launch and call the view
"""
request = build_launch_request(param_to_delete=missing_param)
return views.lti_launch(request, None, None)
def test_launch_with_missing_parameters(self):
"""
Runs through all required LTI parameters and verifies that the lti_launch
view returns Bad Request if any of them are missing.
"""
for missing_param in views.REQUIRED_PARAMETERS:
response = self.launch_with_missing_parameter(missing_param)
assert response.status_code == 400, (('Launch should fail when parameter ' + missing_param) + ' is missing')
def test_launch_with_disabled_feature_flag(self):
"""
Verifies that the LTI launch will fail if the ENABLE_LTI_PROVIDER flag
is not set
"""
with patch.dict('django.conf.settings.FEATURES', {'ENABLE_LTI_PROVIDER': False}):
request = build_launch_request()
response = views.lti_launch(request, None, None)
assert response.status_code == 403
def test_forbidden_if_signature_fails(self):
"""
Verifies that the view returns Forbidden if the LTI OAuth signature is
incorrect.
"""
self.mock_verify.return_value = False
request = build_launch_request()
response = views.lti_launch(request, None, None)
assert response.status_code == 403
assert response.status_code == 403
@patch('lms.djangoapps.lti_provider.views.render_courseware')
def test_lti_consumer_record_supplemented_with_guid(self, _render):
self.mock_verify.return_value = False
request = build_launch_request(LTI_OPTIONAL_PARAMS)
with self.assertNumQueries(3):
views.lti_launch(request, None, None)
consumer = models.LtiConsumer.objects.get(
consumer_key=LTI_DEFAULT_PARAMS['oauth_consumer_key']
)
assert consumer.instance_guid == 'consumer instance guid'
class LtiLaunchTestRender(LtiTestMixin, RenderXBlockTestMixin, ModuleStoreTestCase):
"""
Tests for the rendering returned by lti_launch view.
This class overrides the get_response method, which is used by
the tests defined in RenderXBlockTestMixin.
"""
def get_response(self, usage_key, url_encoded_params=None):
"""
Overridable method to get the response from the endpoint that is being tested.
"""
lti_launch_url = reverse(
'lti_provider_launch',
kwargs={
'course_id': str(self.course.id),
'usage_id': str(usage_key)
}
)
if url_encoded_params:
lti_launch_url += '?' + url_encoded_params
return self.client.post(lti_launch_url, data=LTI_DEFAULT_PARAMS)
# The following test methods override the base tests for verifying access
# by unenrolled and unauthenticated students, since there is a discrepancy
# of access rules between the 2 endpoints (LTI and xBlock_render).
# TODO fix this access discrepancy to the same underlying data.
def test_unenrolled_student(self):
"""
Override since LTI allows access to unenrolled students.
"""
self.setup_course()
self.setup_user(admin=False, enroll=False, login=True)
self.verify_response()
def test_unauthenticated(self):
"""
Override since LTI allows access to unauthenticated users.
"""
self.setup_course()
self.setup_user(admin=False, enroll=True, login=False)
self.verify_response()
def get_success_enrolled_staff_mongo_count(self):
"""
Override because mongo queries are higher for this
particular test. This has not been investigated exhaustively
as mongo is no longer used much, and removing user_partitions
from inheritance fixes the problem.
# The 9 mongoDB calls include calls for
# Old Mongo:
# (1) fill_in_run
# (2) get_course in get_course_with_access
# (3) get_item for HTML block in get_module_by_usage_id
# (4) get_parent when loading HTML block
# (5)-(8) calls related to the inherited user_partitions field.
# (9) edx_notes descriptor call to get_course
"""
return 9
| agpl-3.0 | 5,933,823,660,744,590,000 | 36.892116 | 120 | 0.656373 | false |
udayinfy/openerp-7.0 | wms/__openerp__.py | 3 | 2789 | # -*- coding: utf-8 -*-
##############################################################################
#
# wms module for OpenERP, This module allows to manage crossdocking in warehouses
# Copyright (C) 2011 SYLEAM Info Services (<http://www.Syleam.fr/>)
# Sylvain Garancher <[email protected]>
# Elico Corp (port to 7.0) <[email protected]>
# This file is a part of wms
#
# wms is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# wms is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Warehouse Management System',
'version': '1.2',
'category': 'Generic Modules/Inventory Control',
'description': """This module is extensions to stock module""",
'author': 'SYLEAM',
'website': 'http://www.syleam.fr/',
'depends': [
'base',
'stock',
],
'init_xml': [],
'images': [],
'update_xml': [
'security/ir.model.access.csv',
#'stock_view.xml',
# 'report_stock_view.xml',
#'wizard/stock_to_date_view.xml',
],
'demo_xml': [],
'test': [
#'test/wms_test01.yml',
#'test/wms_test02.yml',
#'test/wms_test03.yml',
#'test/wms_test04.yml',
#'test/wms_test05.yml',
#'test/wms_test06.yml',
#'test/wms_test07.yml',
#'test/wms_test08.yml',
#'test/wms_test09.yml',
#'test/wms_test10.yml',
#'test/wms_test11.yml',
#'test/wms_test12.yml',
#'test/wms_test13.yml',
#'test/wms_test14.yml',
#'test/wms_test15.yml',
#'test/wms_test16.yml',
#'test/wms_test17.yml',
#'test/wms_test18.yml',
#'test/wms_test19.yml',
#'test/wms_test20.yml',
#'test/wms_test21.yml',
#'test/wms_test22.yml',
#'test/wms_test23.yml',
#'test/wms_test24.yml',
#'test/wms_test25.yml',
#'test/wms_test26.yml',
#'test/wms_test27.yml',
#'test/wms_test28.yml',
#'test/wms_test29.yml',
],
'installable': True,
'active': False,
'license': 'AGPL-3',
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | 6,473,723,334,495,469,000 | 33.432099 | 84 | 0.554679 | false |
MMaus/mutils | models/threeSeg.py | 1 | 3984 | # -*- coding : utf8 -*-
"""
.. module:: 3seg
:synopsis: Equations and solutions for the three-segment model
.. moduleauthor:: Moritz Maus <[email protected]>
"""
# format: l1, l2, l3, c1, c2]
from pylab import (array, arccos, linspace, vstack, figure, clf, plot, xlabel,
ylabel, show, savefig, sqrt, xlim, ylim, axis, arange)
def cfunc(x, params):
"""
this function returns the constraint violation of the 3seg leg.
it must be zeroed!
Parameters
----------
x
configuration of the leg: Fy, h1, h2, tau1, tau2
params
parameter of the system: l, l1, l2, l3, c1, c2
Returns
-------
eq : *array* (1x5)
the non-fulfilment of the constraints (subject to root finding)
"""
l, l1, l2, l3, c1, c2 = params
# print l, l1, l2, l3, c1, c2
Fy, h1, h2, tau1, tau2 = array(x).squeeze()
# print Fy, h1, h2, tau1, tau2
if h1 > l1:
print "warning: invalid h1"
h1 = l1
return [5000, ]*5
if h2 > l3:
print "warning: invalid h2"
h2 = l2
return [5000, ]*5
if h1 + h2 > l2:
print "warning: invalid h1 + h2"
return [5000, ]*5
while h1 + h2 > l2:
h1 = .8 * h1
h2 = .8 * h2
eq1 = Fy * h1 - tau1
eq2 = tau1 - Fy * h1 - Fy * h2 + tau2
eq3 = Fy * h2 - tau2
eq4 = -1. * c1 * (arccos(h1 / l1) + arccos( (h1 + h2) / l2) - .9 * pi) - tau1
eq5 = -1. * c2 * (arccos(h2 / l3) + arccos( (h1 + h2) / l2) - .9 * pi) - tau2
eq6 = sqrt(l1**2 - h1**2) + sqrt(l2**2 - (h1 + h2)**2) + sqrt(l3**2 -
h2**2) - l
# note: eq2 is omitted because of equality to - (eq1 + eq3)!
return array([eq1, eq3, eq4, eq5, eq6])
if __name__ == '__main__':
import scipy.optimize as opt
x0 = array([ 2.64347199e+03, 7.04878037e-02, 1.67474976e-01,
1.86332534e+02, 4.42715408e+02])
# first parameter is L0
params = [.999, .36, .45, .2, 110., 65.]
#IC = array([0., .00001, .00001, .0002, .003])
IC = array([1., .001, .005, 1., 2.])
res0 = opt.fsolve(cfunc, IC, args=params, xtol=1e-10)
def qfun(x, p):
"""
did root finding succeed?
"""
return sum(cfunc(x, p) **2)
all_res = [res0, ]
all_ll = [params[0], ]
all_q = [qfun(res0, params),]
all_params = [params[:], ]
for leglength in linspace(.999, .5, 100):
params[0] = leglength
IC = all_res[-1]
all_res.append(opt.fsolve(cfunc, all_res[-1], args=params, xtol=1e-10))
all_ll.append(leglength)
all_params.append(params[:])
all_q.append(qfun(all_res[-1], all_params[-1]))
print 'll:', leglength
all_res = vstack(all_res)
all_params = vstack(all_params)
figure('force of the leg')
clf()
plot(all_ll, all_res[:,0],'b.-')
xlabel('leg length')
ylabel('Force')
show()
def visualize(config, param):
"""
.. note::
plots the leg on the current axes
parameters
----------
config : *array* (1x5)
of cfunc's x parameter type, describing the configuration of the leg
param : *list*
the list of model parameters, according to cfunc's definition
Returns
-------
*None*
"""
figure('anim figure')
clf()
# plot ground
plot([-1,1],[0,0], color='#000044', linewidth=8)
x = [0, -1 * config[1], config[2], 0]
y1 = sqrt(param[1]**2 - config[1]**2)
y2 = sqrt(param[2]**2 - (config[1] + config[2])**2)
y3 = sqrt(param[3]**2 - config[2]**2)
y = [0, y1, y1 + y2, y1 + y2 + y3]
plot(x, y, color='#000000', linewidth=3)
plot(x, y, color='#982112', linewidth=2, linestyle='--')
plot(x[-1], y[-1], 'o', markersize=13, color='#ffea93')
xlim(-1,1)
ylim(-.2,2)
axis('equal')
def viz(until):
for k in arange(until):
visualize(all_res[k,:], all_params[k,:])
savefig('fig_%02i.png' % k)
| gpl-2.0 | 334,950,680,466,379,970 | 23.9 | 81 | 0.522088 | false |
leilihh/novaha | nova/tests/virt/baremetal/db/test_bm_node.py | 29 | 6886 | # Copyright (c) 2012 NTT DOCOMO, INC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Bare-Metal DB testcase for BareMetalNode
"""
from nova import exception
from nova.tests.virt.baremetal.db import base
from nova.tests.virt.baremetal.db import utils
from nova.virt.baremetal import db
class BareMetalNodesTestCase(base.BMDBTestCase):
def _create_nodes(self):
nodes = [
utils.new_bm_node(pm_address='0', service_host="host1",
memory_mb=100000, cpus=100, local_gb=10000),
utils.new_bm_node(pm_address='1', service_host="host2",
instance_uuid='A',
memory_mb=100000, cpus=100, local_gb=10000),
utils.new_bm_node(pm_address='2', service_host="host2",
memory_mb=1000, cpus=1, local_gb=1000),
utils.new_bm_node(pm_address='3', service_host="host2",
memory_mb=1000, cpus=2, local_gb=1000),
utils.new_bm_node(pm_address='4', service_host="host2",
memory_mb=2000, cpus=1, local_gb=1000),
utils.new_bm_node(pm_address='5', service_host="host2",
memory_mb=2000, cpus=2, local_gb=1000),
]
self.ids = []
for n in nodes:
ref = db.bm_node_create(self.context, n)
self.ids.append(ref['id'])
def test_get_all(self):
r = db.bm_node_get_all(self.context)
self.assertEqual(r, [])
self._create_nodes()
r = db.bm_node_get_all(self.context)
self.assertEqual(len(r), 6)
def test_get(self):
self._create_nodes()
r = db.bm_node_get(self.context, self.ids[0])
self.assertEqual(r['pm_address'], '0')
r = db.bm_node_get(self.context, self.ids[1])
self.assertEqual(r['pm_address'], '1')
self.assertRaises(
exception.NodeNotFound,
db.bm_node_get,
self.context, -1)
def test_get_by_service_host(self):
self._create_nodes()
r = db.bm_node_get_all(self.context, service_host=None)
self.assertEqual(len(r), 6)
r = db.bm_node_get_all(self.context, service_host="host1")
self.assertEqual(len(r), 1)
self.assertEqual(r[0]['pm_address'], '0')
r = db.bm_node_get_all(self.context, service_host="host2")
self.assertEqual(len(r), 5)
pmaddrs = [x['pm_address'] for x in r]
self.assertIn('1', pmaddrs)
self.assertIn('2', pmaddrs)
self.assertIn('3', pmaddrs)
self.assertIn('4', pmaddrs)
self.assertIn('5', pmaddrs)
r = db.bm_node_get_all(self.context, service_host="host3")
self.assertEqual(r, [])
def test_get_associated(self):
self._create_nodes()
r = db.bm_node_get_associated(self.context, service_host=None)
self.assertEqual(len(r), 1)
self.assertEqual(r[0]['pm_address'], '1')
r = db.bm_node_get_unassociated(self.context, service_host=None)
self.assertEqual(len(r), 5)
pmaddrs = [x['pm_address'] for x in r]
self.assertIn('0', pmaddrs)
self.assertIn('2', pmaddrs)
self.assertIn('3', pmaddrs)
self.assertIn('4', pmaddrs)
self.assertIn('5', pmaddrs)
def test_destroy(self):
self._create_nodes()
db.bm_node_destroy(self.context, self.ids[0])
self.assertRaises(
exception.NodeNotFound,
db.bm_node_get,
self.context, self.ids[0])
r = db.bm_node_get_all(self.context)
self.assertEqual(len(r), 5)
def test_destroy_with_interfaces(self):
self._create_nodes()
if_a_id = db.bm_interface_create(self.context, self.ids[0],
'aa:aa:aa:aa:aa:aa', None, None)
if_b_id = db.bm_interface_create(self.context, self.ids[0],
'bb:bb:bb:bb:bb:bb', None, None)
if_x_id = db.bm_interface_create(self.context, self.ids[1],
'11:22:33:44:55:66', None, None)
db.bm_node_destroy(self.context, self.ids[0])
self.assertRaises(
exception.NovaException,
db.bm_interface_get,
self.context, if_a_id)
self.assertRaises(
exception.NovaException,
db.bm_interface_get,
self.context, if_b_id)
# Another node's interface is not affected
if_x = db.bm_interface_get(self.context, if_x_id)
self.assertEqual(self.ids[1], if_x['bm_node_id'])
self.assertRaises(
exception.NodeNotFound,
db.bm_node_get,
self.context, self.ids[0])
r = db.bm_node_get_all(self.context)
self.assertEqual(len(r), 5)
def test_find_free(self):
self._create_nodes()
fn = db.bm_node_find_free(self.context, 'host2')
self.assertEqual(fn['pm_address'], '2')
fn = db.bm_node_find_free(self.context, 'host2',
memory_mb=500, cpus=2, local_gb=100)
self.assertEqual(fn['pm_address'], '3')
fn = db.bm_node_find_free(self.context, 'host2',
memory_mb=1001, cpus=1, local_gb=1000)
self.assertEqual(fn['pm_address'], '4')
fn = db.bm_node_find_free(self.context, 'host2',
memory_mb=2000, cpus=1, local_gb=1000)
self.assertEqual(fn['pm_address'], '4')
fn = db.bm_node_find_free(self.context, 'host2',
memory_mb=2000, cpus=2, local_gb=1000)
self.assertEqual(fn['pm_address'], '5')
# check memory_mb
fn = db.bm_node_find_free(self.context, 'host2',
memory_mb=2001, cpus=2, local_gb=1000)
self.assertIsNone(fn)
# check cpus
fn = db.bm_node_find_free(self.context, 'host2',
memory_mb=2000, cpus=3, local_gb=1000)
self.assertIsNone(fn)
# check local_gb
fn = db.bm_node_find_free(self.context, 'host2',
memory_mb=2000, cpus=2, local_gb=1001)
self.assertIsNone(fn)
| apache-2.0 | 8,871,365,492,239,454,000 | 35.052356 | 78 | 0.55199 | false |
mSenyor/kivy | examples/animation/animate.py | 40 | 1338 | '''
Widget animation
================
This example demonstrates creating and applying a multi-part animation to
a button widget. You should see a button labelled 'plop' that will move with
an animation when clicked.
'''
import kivy
kivy.require('1.0.7')
from kivy.animation import Animation
from kivy.app import App
from kivy.uix.button import Button
class TestApp(App):
def animate(self, instance):
# create an animation object. This object could be stored
# and reused each call or reused across different widgets.
# += is a sequential step, while &= is in parallel
animation = Animation(pos=(100, 100), t='out_bounce')
animation += Animation(pos=(200, 100), t='out_bounce')
animation &= Animation(size=(500, 500))
animation += Animation(size=(100, 50))
# apply the animation on the button, passed in the "instance" argument
# Notice that default 'click' animation (changing the button
# color while the mouse is down) is unchanged.
animation.start(instance)
def build(self):
# create a button, and attach animate() method as a on_press handler
button = Button(size_hint=(None, None), text='plop',
on_press=self.animate)
return button
if __name__ == '__main__':
TestApp().run()
| mit | 5,951,742,404,710,813,000 | 31.634146 | 78 | 0.648729 | false |
99cloud/keystone_register | horizon/management/commands/startdash.py | 16 | 2450 | import glob
from optparse import make_option
import os
from django.core.management.base import CommandError
from django.core.management.templates import TemplateCommand
from django.utils.importlib import import_module
import horizon
class Command(TemplateCommand):
template = os.path.join(horizon.__path__[0], "conf", "dash_template")
option_list = TemplateCommand.option_list + (
make_option('--target',
dest='target',
action='store',
default=None,
help='The directory in which the panel '
'should be created. Defaults to the '
'current directory. The value "auto" '
'may also be used to automatically '
'create the panel inside the specified '
'dashboard module.'),)
help = ("Creates a Django app directory structure for a new dashboard "
"with the given name in the current directory or optionally in "
"the given directory.")
def handle(self, dash_name=None, **options):
if dash_name is None:
raise CommandError("You must provide a dashboard name.")
# Use our default template if one isn't specified.
if not options.get("template", None):
options["template"] = self.template
# We have html templates as well, so make sure those are included.
options["extensions"].extend(["tmpl", "html", "js", "css"])
# Check that the app_name cannot be imported.
try:
import_module(dash_name)
except ImportError:
pass
else:
raise CommandError("%r conflicts with the name of an existing "
"Python module and cannot be used as an app "
"name. Please try another name." % dash_name)
super(Command, self).handle('dash', dash_name, **options)
target = options.pop("target", None)
if not target:
target = os.path.join(os.curdir, dash_name)
# Rename our python template files.
file_names = glob.glob(os.path.join(target, "*.py.tmpl"))
for filename in file_names:
os.rename(filename, filename[:-5])
| apache-2.0 | 317,741,898,856,593,400 | 40.525424 | 79 | 0.546122 | false |
suyashphadtare/sajil-final-frappe | frappe/widgets/form/save.py | 32 | 1739 | # Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe, json
from frappe.widgets.form.load import run_onload
@frappe.whitelist()
def savedocs():
"""save / submit / update doclist"""
try:
doc = frappe.get_doc(json.loads(frappe.form_dict.doc))
set_local_name(doc)
# action
doc.docstatus = {"Save":0, "Submit": 1, "Update": 1, "Cancel": 2}[frappe.form_dict.action]
try:
doc.save()
except frappe.NameError, e:
doctype, name, original_exception = e if isinstance(e, tuple) else (doc.doctype or "", doc.name or "", None)
frappe.msgprint(frappe._("{0} {1} already exists").format(doctype, name))
raise
# update recent documents
run_onload(doc)
frappe.user.update_recent(doc.doctype, doc.name)
send_updated_docs(doc)
except Exception:
frappe.msgprint(frappe._('Did not save'))
frappe.errprint(frappe.utils.get_traceback())
raise
@frappe.whitelist()
def cancel(doctype=None, name=None):
"""cancel a doclist"""
try:
doc = frappe.get_doc(doctype, name)
doc.cancel()
send_updated_docs(doc)
except Exception:
frappe.errprint(frappe.utils.get_traceback())
frappe.msgprint(frappe._("Did not cancel"))
raise
def send_updated_docs(doc):
from load import get_docinfo
get_docinfo(doc)
d = doc.as_dict()
if hasattr(doc, 'localname'):
d["localname"] = doc.localname
frappe.response.docs.append(d)
def set_local_name(doc):
def _set_local_name(d):
if doc.get('__islocal') or d.get('__islocal'):
d.localname = d.name
d.name = None
_set_local_name(doc)
for child in doc.get_all_children():
_set_local_name(child)
if doc.get("__newname"):
doc.name = doc.get("__newname")
| mit | -8,531,020,398,046,033,000 | 24.573529 | 111 | 0.688327 | false |
andim27/magiccamp | tests/regressiontests/forms/localflavor/ro.py | 30 | 5828 | # -*- coding: utf-8 -*-
# Tests for the contrib/localflavor/ RO form fields.
tests = r"""
>>> from django.contrib.localflavor.ro.forms import *
##ROCIFField ################################################################
f = ROCIFField()
f.clean('21694681')
u'21694681'
f.clean('RO21694681')
u'21694681'
f.clean('21694680')
Traceback (most recent call last):
...
ValidationError: [u'Enter a valid CIF']
f.clean('21694680000')
Traceback (most recent call last):
...
ValidationError: [u'Ensure this value has at most 10 characters (it has 11).']
f.clean('0')
Traceback (most recent call last):
...
ValidationError: [u'Ensure this value has at least 2 characters (it has 1).']
f.clean(None)
Traceback (most recent call last):
...
ValidationError: [u'This field is required.']
f.clean('')
Traceback (most recent call last):
...
ValidationError: [u'This field is required.']
##ROCNPField #################################################################
f = ROCNPField()
f.clean('1981211204489')
u'1981211204489'
f.clean('1981211204487')
Traceback (most recent call last):
...
ValidationError: [u'Enter a valid CNP']
f.clean('1981232204489')
Traceback (most recent call last):
...
ValidationError: [u'Enter a valid CNP']
f.clean('9981211204489')
Traceback (most recent call last):
...
ValidationError: [u'Enter a valid CNP']
f.clean('9981211209')
Traceback (most recent call last):
...
ValidationError: [u'Ensure this value has at least 13 characters (it has 10).']
f.clean('19812112044891')
Traceback (most recent call last):
...
ValidationError: [u'Ensure this value has at most 13 characters (it has 14).']
f.clean('')
Traceback (most recent call last):
...
ValidationError: [u'This field is required.']
##ROCountyField ##############################################################
f = ROCountyField()
f.clean('CJ')
'CJ'
f.clean('cj')
'CJ'
f.clean('Argeş')
'AG'
f.clean('argeş')
'AG'
f.clean('Arges')
Traceback (most recent call last):
...
ValidationError: [u'Enter a Romanian county code or name.']
f.clean('')
Traceback (most recent call last):
...
ValidationError: [u'This field is required.']
##ROCountySelect #############################################################
f = ROCountySelect()
f.render('county','CJ')
u'<select name="county">\n<option value="AB">Alba</option>\n<option value="AR">A
rad</option>\n<option value="AG">Arge\u015f</option>\n<option value="BC">Bac\u01
03u</option>\n<option value="BH">Bihor</option>\n<option value="BN">Bistri\u0163
a-N\u0103s\u0103ud</option>\n<option value="BT">Boto\u015fani</option>\n<option
value="BV">Bra\u015fov</option>\n<option value="BR">Br\u0103ila</option>\n<optio
n value="B">Bucure\u015fti</option>\n<option value="BZ">Buz\u0103u</option>\n<op
tion value="CS">Cara\u015f-Severin</option>\n<option value="CL">C\u0103l\u0103ra
\u015fi</option>\n<option value="CJ" selected="selected">Cluj</option>\n<option
value="CT">Constan\u0163a</option>\n<option value="CV">Covasna</option>\n<option
value="DB">D\xe2mbovi\u0163a</option>\n<option value="DJ">Dolj</option>\n<optio
n value="GL">Gala\u0163i</option>\n<option value="GR">Giurgiu</option>\n<option
value="GJ">Gorj</option>\n<option value="HR">Harghita</option>\n<option value="H
D">Hunedoara</option>\n<option value="IL">Ialomi\u0163a</option>\n<option value=
"IS">Ia\u015fi</option>\n<option value="IF">Ilfov</option>\n<option value="MM">M
aramure\u015f</option>\n<option value="MH">Mehedin\u0163i</option>\n<option valu
e="MS">Mure\u015f</option>\n<option value="NT">Neam\u0163</option>\n<option valu
e="OT">Olt</option>\n<option value="PH">Prahova</option>\n<option value="SM">Sat
u Mare</option>\n<option value="SJ">S\u0103laj</option>\n<option value="SB">Sibi
u</option>\n<option value="SV">Suceava</option>\n<option value="TR">Teleorman</o
ption>\n<option value="TM">Timi\u015f</option>\n<option value="TL">Tulcea</optio
n>\n<option value="VS">Vaslui</option>\n<option value="VL">V\xe2lcea</option>\n<
option value="VN">Vrancea</option>\n</select>'
##ROIBANField #################################################################
f = ROIBANField()
f.clean('RO56RZBR0000060003291177')
u'RO56RZBR0000060003291177'
f.clean('RO56RZBR0000060003291176')
Traceback (most recent call last):
...
ValidationError: [u'Enter a valid IBAN in ROXX-XXXX-XXXX-XXXX-XXXX-XXXX format']
f.clean('RO56-RZBR-0000-0600-0329-1177')
u'RO56RZBR0000060003291177'
f.clean('AT61 1904 3002 3457 3201')
Traceback (most recent call last):
...
ValidationError: [u'Enter a valid IBAN in ROXX-XXXX-XXXX-XXXX-XXXX-XXXX format']
f.clean('RO56RZBR000006000329117')
Traceback (most recent call last):
...
ValidationError: [u'Ensure this value has at least 24 characters (it has 23).']
f.clean('')
Traceback (most recent call last):
...
ValidationError: [u'This field is required.']
##ROPhoneNumberField ##########################################################
f = ROPhoneNumberField()
f.clean('0264485936')
u'0264485936'
f.clean('(0264)-485936')
u'0264485936'
f.clean('02644859368')
Traceback (most recent call last):
...
ValidationError: [u'Phone numbers must be in XXXX-XXXXXX format.']
f.clean('026448593')
Traceback (most recent call last):
...
ValidationError: [u'Ensure this value has at least 10 characters (it has 9).']
f.clean(None)
Traceback (most recent call last):
...
ValidationError: [u'This field is required.']
##ROPostalCodeField ###########################################################
f = ROPostalCodeField()
f.clean('400473')
u'400473'
f.clean('40047')
Traceback (most recent call last):
...
ValidationError: [u'Ensure this value has at least 6 characters (it has 5).']
f.clean('4004731')
Traceback (most recent call last):
...
ValidationError: [u'Ensure this value has at most 6 characters (it has 7).']
f.clean('')
Traceback (most recent call last):
...
ValidationError: [u'This field is required.']
"""
| bsd-3-clause | -7,796,059,109,742,182,000 | 32.291429 | 80 | 0.667525 | false |
ClimbsRocks/scikit-learn | examples/cluster/plot_color_quantization.py | 61 | 3444 | # -*- coding: utf-8 -*-
"""
==================================
Color Quantization using K-Means
==================================
Performs a pixel-wise Vector Quantization (VQ) of an image of the summer palace
(China), reducing the number of colors required to show the image from 96,615
unique colors to 64, while preserving the overall appearance quality.
In this example, pixels are represented in a 3D-space and K-means is used to
find 64 color clusters. In the image processing literature, the codebook
obtained from K-means (the cluster centers) is called the color palette. Using
a single byte, up to 256 colors can be addressed, whereas an RGB encoding
requires 3 bytes per pixel. The GIF file format, for example, uses such a
palette.
For comparison, a quantized image using a random codebook (colors picked up
randomly) is also shown.
"""
# Authors: Robert Layton <[email protected]>
# Olivier Grisel <[email protected]>
# Mathieu Blondel <[email protected]>
#
# License: BSD 3 clause
print(__doc__)
import numpy as np
import matplotlib.pyplot as plt
from sklearn.cluster import KMeans
from sklearn.metrics import pairwise_distances_argmin
from sklearn.datasets import load_sample_image
from sklearn.utils import shuffle
from time import time
n_colors = 64
# Load the Summer Palace photo
china = load_sample_image("china.jpg")
# Convert to floats instead of the default 8 bits integer coding. Dividing by
# 255 is important so that plt.imshow behaves works well on float data (need to
# be in the range [0-1])
china = np.array(china, dtype=np.float64) / 255
# Load Image and transform to a 2D numpy array.
w, h, d = original_shape = tuple(china.shape)
assert d == 3
image_array = np.reshape(china, (w * h, d))
print("Fitting model on a small sub-sample of the data")
t0 = time()
image_array_sample = shuffle(image_array, random_state=0)[:1000]
kmeans = KMeans(n_clusters=n_colors, random_state=0).fit(image_array_sample)
print("done in %0.3fs." % (time() - t0))
# Get labels for all points
print("Predicting color indices on the full image (k-means)")
t0 = time()
labels = kmeans.predict(image_array)
print("done in %0.3fs." % (time() - t0))
codebook_random = shuffle(image_array, random_state=0)[:n_colors + 1]
print("Predicting color indices on the full image (random)")
t0 = time()
labels_random = pairwise_distances_argmin(codebook_random,
image_array,
axis=0)
print("done in %0.3fs." % (time() - t0))
def recreate_image(codebook, labels, w, h):
"""Recreate the (compressed) image from the code book & labels"""
d = codebook.shape[1]
image = np.zeros((w, h, d))
label_idx = 0
for i in range(w):
for j in range(h):
image[i][j] = codebook[labels[label_idx]]
label_idx += 1
return image
# Display all results, alongside original image
plt.figure(1)
plt.clf()
ax = plt.axes([0, 0, 1, 1])
plt.axis('off')
plt.title('Original image (96,615 colors)')
plt.imshow(china)
plt.figure(2)
plt.clf()
ax = plt.axes([0, 0, 1, 1])
plt.axis('off')
plt.title('Quantized image (64 colors, K-Means)')
plt.imshow(recreate_image(kmeans.cluster_centers_, labels, w, h))
plt.figure(3)
plt.clf()
ax = plt.axes([0, 0, 1, 1])
plt.axis('off')
plt.title('Quantized image (64 colors, Random)')
plt.imshow(recreate_image(codebook_random, labels_random, w, h))
plt.show()
| bsd-3-clause | -3,621,842,209,109,831,000 | 31.8 | 79 | 0.679733 | false |
GinnyN/towerofdimensions-django | build/lib/django/contrib/flatpages/models.py | 410 | 1134 | from django.db import models
from django.contrib.sites.models import Site
from django.utils.translation import ugettext_lazy as _
class FlatPage(models.Model):
url = models.CharField(_('URL'), max_length=100, db_index=True)
title = models.CharField(_('title'), max_length=200)
content = models.TextField(_('content'), blank=True)
enable_comments = models.BooleanField(_('enable comments'))
template_name = models.CharField(_('template name'), max_length=70, blank=True,
help_text=_("Example: 'flatpages/contact_page.html'. If this isn't provided, the system will use 'flatpages/default.html'."))
registration_required = models.BooleanField(_('registration required'), help_text=_("If this is checked, only logged-in users will be able to view the page."))
sites = models.ManyToManyField(Site)
class Meta:
db_table = 'django_flatpage'
verbose_name = _('flat page')
verbose_name_plural = _('flat pages')
ordering = ('url',)
def __unicode__(self):
return u"%s -- %s" % (self.url, self.title)
def get_absolute_url(self):
return self.url
| bsd-3-clause | 9,040,081,696,244,614,000 | 42.615385 | 163 | 0.669312 | false |
shogun-toolbox/shogun | examples/undocumented/python/distance_manhattanword.py | 2 | 1105 | #!/usr/bin/env python
import shogun as sg
traindna = '../data/fm_train_dna.dat'
testdna = '../data/fm_test_dna.dat'
parameter_list = [[traindna,testdna,3,0,False],[traindna,testdna,4,0,False]]
def distance_manhattenword (train_fname=traindna,test_fname=testdna,order=3,gap=0,reverse=False):
charfeat=sg.create_string_features(sg.read_csv(train_fname), sg.DNA)
feats_train=sg.create_string_features(charfeat, order-1, order, gap, reverse)
preproc = sg.create_transformer("SortWordString")
preproc.fit(feats_train)
feats_train = preproc.transform(feats_train)
charfeat=sg.create_string_features(sg.read_csv(test_fname), sg.DNA)
feats_test=sg.create_string_features(charfeat, order-1, order, gap, reverse)
feats_test = preproc.transform(feats_test)
distance = sg.create_distance('ManhattanWordDistance')
distance.init(feats_train, feats_train)
dm_train=distance.get_distance_matrix()
distance.init(feats_train, feats_test)
dm_test=distance.get_distance_matrix()
return dm_train,dm_test
if __name__=='__main__':
print('ManhattanWordDistance')
distance_manhattenword(*parameter_list[0])
| bsd-3-clause | 4,810,299,774,981,410,000 | 35.833333 | 97 | 0.755656 | false |
cbertinato/pandas | pandas/_config/localization.py | 1 | 4655 | """
Helpers for configuring locale settings.
Name `localization` is chosen to avoid overlap with builtin `locale` module.
"""
from contextlib import contextmanager
import locale
import re
import subprocess
from pandas._config.config import options
@contextmanager
def set_locale(new_locale, lc_var=locale.LC_ALL):
"""
Context manager for temporarily setting a locale.
Parameters
----------
new_locale : str or tuple
A string of the form <language_country>.<encoding>. For example to set
the current locale to US English with a UTF8 encoding, you would pass
"en_US.UTF-8".
lc_var : int, default `locale.LC_ALL`
The category of the locale being set.
Notes
-----
This is useful when you want to run a particular block of code under a
particular locale, without globally setting the locale. This probably isn't
thread-safe.
"""
current_locale = locale.getlocale()
try:
locale.setlocale(lc_var, new_locale)
normalized_locale = locale.getlocale()
if all(x is not None for x in normalized_locale):
yield '.'.join(normalized_locale)
else:
yield new_locale
finally:
locale.setlocale(lc_var, current_locale)
def can_set_locale(lc, lc_var=locale.LC_ALL):
"""
Check to see if we can set a locale, and subsequently get the locale,
without raising an Exception.
Parameters
----------
lc : str
The locale to attempt to set.
lc_var : int, default `locale.LC_ALL`
The category of the locale being set.
Returns
-------
is_valid : bool
Whether the passed locale can be set
"""
try:
with set_locale(lc, lc_var=lc_var):
pass
except (ValueError, locale.Error):
# horrible name for a Exception subclass
return False
else:
return True
def _valid_locales(locales, normalize):
"""
Return a list of normalized locales that do not throw an ``Exception``
when set.
Parameters
----------
locales : str
A string where each locale is separated by a newline.
normalize : bool
Whether to call ``locale.normalize`` on each locale.
Returns
-------
valid_locales : list
A list of valid locales.
"""
if normalize:
normalizer = lambda x: locale.normalize(x.strip())
else:
normalizer = lambda x: x.strip()
return list(filter(can_set_locale, map(normalizer, locales)))
def _default_locale_getter():
try:
raw_locales = subprocess.check_output(['locale -a'], shell=True)
except subprocess.CalledProcessError as e:
raise type(e)("{exception}, the 'locale -a' command cannot be found "
"on your system".format(exception=e))
return raw_locales
def get_locales(prefix=None, normalize=True,
locale_getter=_default_locale_getter):
"""
Get all the locales that are available on the system.
Parameters
----------
prefix : str
If not ``None`` then return only those locales with the prefix
provided. For example to get all English language locales (those that
start with ``"en"``), pass ``prefix="en"``.
normalize : bool
Call ``locale.normalize`` on the resulting list of available locales.
If ``True``, only locales that can be set without throwing an
``Exception`` are returned.
locale_getter : callable
The function to use to retrieve the current locales. This should return
a string with each locale separated by a newline character.
Returns
-------
locales : list of strings
A list of locale strings that can be set with ``locale.setlocale()``.
For example::
locale.setlocale(locale.LC_ALL, locale_string)
On error will return None (no locale available, e.g. Windows)
"""
try:
raw_locales = locale_getter()
except Exception:
return None
try:
# raw_locales is "\n" separated list of locales
# it may contain non-decodable parts, so split
# extract what we can and then rejoin.
raw_locales = raw_locales.split(b'\n')
out_locales = []
for x in raw_locales:
out_locales.append(str(
x, encoding=options.display.encoding))
except TypeError:
pass
if prefix is None:
return _valid_locales(out_locales, normalize)
pattern = re.compile('{prefix}.*'.format(prefix=prefix))
found = pattern.findall('\n'.join(out_locales))
return _valid_locales(found, normalize)
| bsd-3-clause | -4,127,282,855,604,569,600 | 27.734568 | 79 | 0.625994 | false |
Microsoft/hummingbird | tests/test_sklearn_decomposition.py | 1 | 5758 | """
Tests sklearn matrix decomposition converters
"""
import unittest
import warnings
import sys
from distutils.version import LooseVersion
import numpy as np
import torch
import sklearn
from sklearn.decomposition import FastICA, KernelPCA, PCA, TruncatedSVD
from sklearn.model_selection import train_test_split
from sklearn.datasets import load_digits
import hummingbird.ml
class TestSklearnMatrixDecomposition(unittest.TestCase):
def _fit_model_pca(self, model, precompute=False):
data = load_digits()
X_train, X_test, y_train, y_test = train_test_split(data.data, data.target, test_size=0.2, random_state=42)
X_test = X_test.astype("float32")
if precompute:
# For precompute we use a linear kernel
model.fit(np.dot(X_train, X_train.T))
X_test = np.dot(X_test, X_train.T)
else:
model.fit(X_train)
torch_model = hummingbird.ml.convert(model, "torch")
self.assertTrue(torch_model is not None)
np.testing.assert_allclose(model.transform(X_test), torch_model.transform(X_test), rtol=1e-6, atol=2 * 1e-5)
# PCA n_components none
def test_pca_converter_none(self):
self._fit_model_pca(PCA(n_components=None))
# PCA n_componenets two
def test_pca_converter_two(self):
self._fit_model_pca(PCA(n_components=2))
# PCA n_componenets mle and whiten true
@unittest.skipIf(
LooseVersion(sklearn.__version__) < LooseVersion("0.23.2"),
reason="With Sklearn version < 0.23.2 returns ValueError: math domain error (https://github.com/scikit-learn/scikit-learn/issues/4441)",
)
def test_pca_converter_mle_whiten(self):
self._fit_model_pca(PCA(n_components="mle", whiten=True))
# PCA n_componenets mle and solver full
@unittest.skipIf(
LooseVersion(sklearn.__version__) < LooseVersion("0.23.2"),
reason="With Sklearn version < 0.23.2 returns ValueError: math domain error (https://github.com/scikit-learn/scikit-learn/issues/4441)",
)
def test_pca_converter_mle_full(self):
self._fit_model_pca(PCA(n_components="mle", svd_solver="full"))
# PCA n_componenets none and solver arpack
def test_pca_converter_none_arpack(self):
self._fit_model_pca(PCA(n_components=None, svd_solver="arpack"))
# PCA n_componenets none and solver randomized
def test_pca_converter_none_randomized(self):
self._fit_model_pca(PCA(n_components=None, svd_solver="randomized"))
# KernelPCA linear kernel
def test_kernel_pca_converter_linear(self):
self._fit_model_pca(KernelPCA(n_components=5, kernel="linear"))
# KernelPCA linear kernel with inverse transform
def test_kernel_pca_converter_linear_fit_inverse_transform(self):
self._fit_model_pca(KernelPCA(n_components=5, kernel="linear", fit_inverse_transform=True))
# KernelPCA poly kernel
def test_kernel_pca_converter_poly(self):
self._fit_model_pca(KernelPCA(n_components=5, kernel="poly", degree=2))
# KernelPCA poly kernel coef0
def test_kernel_pca_converter_poly_coef0(self):
self._fit_model_pca(KernelPCA(n_components=10, kernel="poly", degree=3, coef0=10))
# KernelPCA poly kernel with inverse transform
def test_kernel_pca_converter_poly_fit_inverse_transform(self):
self._fit_model_pca(KernelPCA(n_components=5, kernel="poly", degree=3, fit_inverse_transform=True))
# KernelPCA poly kernel
def test_kernel_pca_converter_rbf(self):
self._fit_model_pca(KernelPCA(n_components=5, kernel="rbf"))
# KernelPCA sigmoid kernel
def test_kernel_pca_converter_sigmoid(self):
self._fit_model_pca(KernelPCA(n_components=5, kernel="sigmoid"))
# KernelPCA cosine kernel
def test_kernel_pca_converter_cosine(self):
self._fit_model_pca(KernelPCA(n_components=5, kernel="cosine"))
# KernelPCA precomputed kernel
def test_kernel_pca_converter_precomputed(self):
self._fit_model_pca(KernelPCA(n_components=5, kernel="precomputed"), precompute=True)
# TODO: Fails on macos-latest Python 3.8 due to a sklearn bug.
# FastICA converter with n_components none
# def test_fast_ica_converter_none(self):
# self._fit_model_pca(FastICA(n_components=None))
# FastICA converter with n_components 3
def test_fast_ica_converter_3(self):
self._fit_model_pca(FastICA(n_components=3))
# FastICA converter with n_components 3 whiten
def test_fast_ica_converter_3_whiten(self):
self._fit_model_pca(FastICA(n_components=3, whiten=True))
# FastICA converter with n_components 3 deflation algorithm
def test_fast_ica_converter_3_deflation(self):
self._fit_model_pca(FastICA(n_components=3, algorithm="deflation"))
# FastICA converter with n_components 3 fun exp
def test_fast_ica_converter_3_exp(self):
self._fit_model_pca(FastICA(n_components=3, fun="exp"))
# FastICA converter with n_components 3 fun cube
def test_fast_ica_converter_3_cube(self):
self._fit_model_pca(FastICA(n_components=3, fun="cube"))
# FastICA converter with n_components 3 fun custom
def test_fast_ica_converter_3_custom(self):
def my_g(x):
return x ** 3, (3 * x ** 2).mean(axis=-1)
self._fit_model_pca(FastICA(n_components=3, fun=my_g))
# TruncatedSVD converter with n_components 3
def test_truncated_svd_converter_3(self):
self._fit_model_pca(TruncatedSVD(n_components=3))
# TruncatedSVD converter with n_components 3 algorithm arpack
def test_truncated_svd_converter_3_arpack(self):
self._fit_model_pca(TruncatedSVD(n_components=3, algorithm="arpack"))
if __name__ == "__main__":
unittest.main()
| mit | 8,471,067,745,009,065,000 | 38.438356 | 144 | 0.686349 | false |
mkrupcale/ansible | lib/ansible/modules/cloud/amazon/ec2_asg.py | 13 | 38185 | #!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['stableinterface'],
'supported_by': 'committer',
'version': '1.0'}
DOCUMENTATION = """
---
module: ec2_asg
short_description: Create or delete AWS Autoscaling Groups
description:
- Can create or delete AWS Autoscaling Groups
- Works with the ec2_lc module to manage Launch Configurations
version_added: "1.6"
author: "Gareth Rushgrove (@garethr)"
options:
state:
description:
- register or deregister the instance
required: false
choices: ['present', 'absent']
default: present
name:
description:
- Unique name for group to be created or deleted
required: true
load_balancers:
description:
- List of ELB names to use for the group
required: false
availability_zones:
description:
- List of availability zone names in which to create the group. Defaults to all the availability zones in the region if vpc_zone_identifier is not set.
required: false
launch_config_name:
description:
- Name of the Launch configuration to use for the group. See the ec2_lc module for managing these.
required: true
min_size:
description:
- Minimum number of instances in group, if unspecified then the current group value will be used.
required: false
max_size:
description:
- Maximum number of instances in group, if unspecified then the current group value will be used.
required: false
placement_group:
description:
- Physical location of your cluster placement group created in Amazon EC2.
required: false
version_added: "2.3"
default: None
desired_capacity:
description:
- Desired number of instances in group, if unspecified then the current group value will be used.
required: false
replace_all_instances:
description:
- In a rolling fashion, replace all instances with an old launch configuration with one from the current launch configuration.
required: false
version_added: "1.8"
default: False
replace_batch_size:
description:
- Number of instances you'd like to replace at a time. Used with replace_all_instances.
required: false
version_added: "1.8"
default: 1
replace_instances:
description:
- List of instance_ids belonging to the named ASG that you would like to terminate and be replaced with instances matching the current launch configuration.
required: false
version_added: "1.8"
default: None
lc_check:
description:
- Check to make sure instances that are being replaced with replace_instances do not already have the current launch_config.
required: false
version_added: "1.8"
default: True
vpc_zone_identifier:
description:
- List of VPC subnets to use
required: false
default: None
tags:
description:
- A list of tags to add to the Auto Scale Group. Optional key is 'propagate_at_launch', which defaults to true.
required: false
default: None
version_added: "1.7"
health_check_period:
description:
- Length of time in seconds after a new EC2 instance comes into service that Auto Scaling starts checking its health.
required: false
default: 500 seconds
version_added: "1.7"
health_check_type:
description:
- The service you want the health status from, Amazon EC2 or Elastic Load Balancer.
required: false
default: EC2
version_added: "1.7"
choices: ['EC2', 'ELB']
default_cooldown:
description:
- The number of seconds after a scaling activity completes before another can begin.
required: false
default: 300 seconds
version_added: "2.0"
wait_timeout:
description:
- how long before wait instances to become viable when replaced. Used in conjunction with instance_ids option.
default: 300
version_added: "1.8"
wait_for_instances:
description:
- Wait for the ASG instances to be in a ready state before exiting. If instances are behind an ELB, it will wait until the ELB determines all instances have a lifecycle_state of "InService" and a health_status of "Healthy".
version_added: "1.9"
default: yes
required: False
termination_policies:
description:
- An ordered list of criteria used for selecting instances to be removed from the Auto Scaling group when reducing capacity.
- For 'Default', when used to create a new autoscaling group, the "Default"i value is used. When used to change an existent autoscaling group, the current termination policies are maintained.
required: false
default: Default
choices: ['OldestInstance', 'NewestInstance', 'OldestLaunchConfiguration', 'ClosestToNextInstanceHour', 'Default']
version_added: "2.0"
notification_topic:
description:
- A SNS topic ARN to send auto scaling notifications to.
default: None
required: false
version_added: "2.2"
notification_types:
description:
- A list of auto scaling events to trigger notifications on.
default: ['autoscaling:EC2_INSTANCE_LAUNCH', 'autoscaling:EC2_INSTANCE_LAUNCH_ERROR', 'autoscaling:EC2_INSTANCE_TERMINATE', 'autoscaling:EC2_INSTANCE_TERMINATE_ERROR']
required: false
version_added: "2.2"
suspend_processes:
description:
- A list of scaling processes to suspend.
required: False
default: []
choices: ['Launch', 'Terminate', 'HealthCheck', 'ReplaceUnhealthy', 'AZRebalance', 'AlarmNotification', 'ScheduledActions', 'AddToLoadBalancer']
version_added: "2.3"
extends_documentation_fragment:
- aws
- ec2
"""
EXAMPLES = '''
# Basic configuration
- ec2_asg:
name: special
load_balancers: [ 'lb1', 'lb2' ]
availability_zones: [ 'eu-west-1a', 'eu-west-1b' ]
launch_config_name: 'lc-1'
min_size: 1
max_size: 10
desired_capacity: 5
vpc_zone_identifier: [ 'subnet-abcd1234', 'subnet-1a2b3c4d' ]
tags:
- environment: production
propagate_at_launch: no
# Rolling ASG Updates
Below is an example of how to assign a new launch config to an ASG and terminate old instances.
All instances in "myasg" that do not have the launch configuration named "my_new_lc" will be terminated in
a rolling fashion with instances using the current launch configuration, "my_new_lc".
This could also be considered a rolling deploy of a pre-baked AMI.
If this is a newly created group, the instances will not be replaced since all instances
will have the current launch configuration.
- name: create launch config
ec2_lc:
name: my_new_lc
image_id: ami-lkajsf
key_name: mykey
region: us-east-1
security_groups: sg-23423
instance_type: m1.small
assign_public_ip: yes
- ec2_asg:
name: myasg
launch_config_name: my_new_lc
health_check_period: 60
health_check_type: ELB
replace_all_instances: yes
min_size: 5
max_size: 5
desired_capacity: 5
region: us-east-1
To only replace a couple of instances instead of all of them, supply a list
to "replace_instances":
- ec2_asg:
name: myasg
launch_config_name: my_new_lc
health_check_period: 60
health_check_type: ELB
replace_instances:
- i-b345231
- i-24c2931
min_size: 5
max_size: 5
desired_capacity: 5
region: us-east-1
'''
import time
import logging as log
import traceback
from ansible.module_utils.basic import *
from ansible.module_utils.ec2 import *
log.getLogger('boto').setLevel(log.CRITICAL)
#log.basicConfig(filename='/tmp/ansible_ec2_asg.log',level=log.DEBUG, format='%(asctime)s: %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p')
try:
import boto.ec2.autoscale
from boto.ec2.autoscale import AutoScaleConnection, AutoScalingGroup, Tag
from boto.exception import BotoServerError
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
ASG_ATTRIBUTES = ('availability_zones', 'default_cooldown', 'desired_capacity',
'health_check_period', 'health_check_type', 'launch_config_name',
'load_balancers', 'max_size', 'min_size', 'name', 'placement_group',
'termination_policies', 'vpc_zone_identifier')
INSTANCE_ATTRIBUTES = ('instance_id', 'health_status', 'lifecycle_state', 'launch_config_name')
def enforce_required_arguments(module):
''' As many arguments are not required for autoscale group deletion
they cannot be mandatory arguments for the module, so we enforce
them here '''
missing_args = []
for arg in ('min_size', 'max_size', 'launch_config_name'):
if module.params[arg] is None:
missing_args.append(arg)
if missing_args:
module.fail_json(msg="Missing required arguments for autoscaling group create/update: %s" % ",".join(missing_args))
def get_properties(autoscaling_group):
properties = dict((attr, getattr(autoscaling_group, attr)) for attr in ASG_ATTRIBUTES)
# Ugly hack to make this JSON-serializable. We take a list of boto Tag
# objects and replace them with a dict-representation. Needed because the
# tags are included in ansible's return value (which is jsonified)
if 'tags' in properties and isinstance(properties['tags'], list):
serializable_tags = {}
for tag in properties['tags']:
serializable_tags[tag.key] = [tag.value, tag.propagate_at_launch]
properties['tags'] = serializable_tags
properties['healthy_instances'] = 0
properties['in_service_instances'] = 0
properties['unhealthy_instances'] = 0
properties['pending_instances'] = 0
properties['viable_instances'] = 0
properties['terminating_instances'] = 0
instance_facts = {}
if autoscaling_group.instances:
properties['instances'] = [i.instance_id for i in autoscaling_group.instances]
for i in autoscaling_group.instances:
instance_facts[i.instance_id] = {'health_status': i.health_status,
'lifecycle_state': i.lifecycle_state,
'launch_config_name': i.launch_config_name }
if i.health_status == 'Healthy' and i.lifecycle_state == 'InService':
properties['viable_instances'] += 1
if i.health_status == 'Healthy':
properties['healthy_instances'] += 1
else:
properties['unhealthy_instances'] += 1
if i.lifecycle_state == 'InService':
properties['in_service_instances'] += 1
if i.lifecycle_state == 'Terminating':
properties['terminating_instances'] += 1
if i.lifecycle_state == 'Pending':
properties['pending_instances'] += 1
properties['instance_facts'] = instance_facts
properties['load_balancers'] = autoscaling_group.load_balancers
if getattr(autoscaling_group, "tags", None):
properties['tags'] = dict((t.key, t.value) for t in autoscaling_group.tags)
return properties
def elb_dreg(asg_connection, module, group_name, instance_id):
region, ec2_url, aws_connect_params = get_aws_connection_info(module)
as_group = asg_connection.get_all_groups(names=[group_name])[0]
wait_timeout = module.params.get('wait_timeout')
props = get_properties(as_group)
count = 1
if as_group.load_balancers and as_group.health_check_type == 'ELB':
try:
elb_connection = connect_to_aws(boto.ec2.elb, region, **aws_connect_params)
except boto.exception.NoAuthHandlerFound as e:
module.fail_json(msg=str(e))
else:
return
for lb in as_group.load_balancers:
elb_connection.deregister_instances(lb, instance_id)
log.debug("De-registering {0} from ELB {1}".format(instance_id, lb))
wait_timeout = time.time() + wait_timeout
while wait_timeout > time.time() and count > 0:
count = 0
for lb in as_group.load_balancers:
lb_instances = elb_connection.describe_instance_health(lb)
for i in lb_instances:
if i.instance_id == instance_id and i.state == "InService":
count += 1
log.debug("{0}: {1}, {2}".format(i.instance_id, i.state, i.description))
time.sleep(10)
if wait_timeout <= time.time():
# waiting took too long
module.fail_json(msg = "Waited too long for instance to deregister. {0}".format(time.asctime()))
def elb_healthy(asg_connection, elb_connection, module, group_name):
healthy_instances = set()
as_group = asg_connection.get_all_groups(names=[group_name])[0]
props = get_properties(as_group)
# get healthy, inservice instances from ASG
instances = []
for instance, settings in props['instance_facts'].items():
if settings['lifecycle_state'] == 'InService' and settings['health_status'] == 'Healthy':
instances.append(instance)
log.debug("ASG considers the following instances InService and Healthy: {0}".format(instances))
log.debug("ELB instance status:")
for lb in as_group.load_balancers:
# we catch a race condition that sometimes happens if the instance exists in the ASG
# but has not yet show up in the ELB
try:
lb_instances = elb_connection.describe_instance_health(lb, instances=instances)
except boto.exception.BotoServerError as e:
if e.error_code == 'InvalidInstance':
return None
module.fail_json(msg=str(e))
for i in lb_instances:
if i.state == "InService":
healthy_instances.add(i.instance_id)
log.debug("{0}: {1}".format(i.instance_id, i.state))
return len(healthy_instances)
def wait_for_elb(asg_connection, module, group_name):
region, ec2_url, aws_connect_params = get_aws_connection_info(module)
wait_timeout = module.params.get('wait_timeout')
# if the health_check_type is ELB, we want to query the ELBs directly for instance
# status as to avoid health_check_grace period that is awarded to ASG instances
as_group = asg_connection.get_all_groups(names=[group_name])[0]
if as_group.load_balancers and as_group.health_check_type == 'ELB':
log.debug("Waiting for ELB to consider instances healthy.")
try:
elb_connection = connect_to_aws(boto.ec2.elb, region, **aws_connect_params)
except boto.exception.NoAuthHandlerFound as e:
module.fail_json(msg=str(e))
wait_timeout = time.time() + wait_timeout
healthy_instances = elb_healthy(asg_connection, elb_connection, module, group_name)
while healthy_instances < as_group.min_size and wait_timeout > time.time():
healthy_instances = elb_healthy(asg_connection, elb_connection, module, group_name)
log.debug("ELB thinks {0} instances are healthy.".format(healthy_instances))
time.sleep(10)
if wait_timeout <= time.time():
# waiting took too long
module.fail_json(msg = "Waited too long for ELB instances to be healthy. %s" % time.asctime())
log.debug("Waiting complete. ELB thinks {0} instances are healthy.".format(healthy_instances))
def suspend_processes(as_group, module):
suspend_processes = set(module.params.get('suspend_processes'))
try:
suspended_processes = set([p.process_name for p in as_group.suspended_processes])
except AttributeError:
# New ASG being created, no suspended_processes defined yet
suspended_processes = set()
if suspend_processes == suspended_processes:
return False
resume_processes = list(suspended_processes - suspend_processes)
if resume_processes:
as_group.resume_processes(resume_processes)
if suspend_processes:
as_group.suspend_processes(list(suspend_processes))
return True
def create_autoscaling_group(connection, module):
group_name = module.params.get('name')
load_balancers = module.params['load_balancers']
availability_zones = module.params['availability_zones']
launch_config_name = module.params.get('launch_config_name')
min_size = module.params['min_size']
max_size = module.params['max_size']
placement_group = module.params.get('placement_group')
desired_capacity = module.params.get('desired_capacity')
vpc_zone_identifier = module.params.get('vpc_zone_identifier')
set_tags = module.params.get('tags')
health_check_period = module.params.get('health_check_period')
health_check_type = module.params.get('health_check_type')
default_cooldown = module.params.get('default_cooldown')
wait_for_instances = module.params.get('wait_for_instances')
as_groups = connection.get_all_groups(names=[group_name])
wait_timeout = module.params.get('wait_timeout')
termination_policies = module.params.get('termination_policies')
notification_topic = module.params.get('notification_topic')
notification_types = module.params.get('notification_types')
if not vpc_zone_identifier and not availability_zones:
region, ec2_url, aws_connect_params = get_aws_connection_info(module)
try:
ec2_connection = connect_to_aws(boto.ec2, region, **aws_connect_params)
except (boto.exception.NoAuthHandlerFound, AnsibleAWSError) as e:
module.fail_json(msg=str(e))
elif vpc_zone_identifier:
vpc_zone_identifier = ','.join(vpc_zone_identifier)
asg_tags = []
for tag in set_tags:
for k,v in tag.iteritems():
if k !='propagate_at_launch':
asg_tags.append(Tag(key=k,
value=v,
propagate_at_launch=bool(tag.get('propagate_at_launch', True)),
resource_id=group_name))
if not as_groups:
if not vpc_zone_identifier and not availability_zones:
availability_zones = module.params['availability_zones'] = [zone.name for zone in ec2_connection.get_all_zones()]
enforce_required_arguments(module)
launch_configs = connection.get_all_launch_configurations(names=[launch_config_name])
if len(launch_configs) == 0:
module.fail_json(msg="No launch config found with name %s" % launch_config_name)
ag = AutoScalingGroup(
group_name=group_name,
load_balancers=load_balancers,
availability_zones=availability_zones,
launch_config=launch_configs[0],
min_size=min_size,
max_size=max_size,
placement_group=placement_group,
desired_capacity=desired_capacity,
vpc_zone_identifier=vpc_zone_identifier,
connection=connection,
tags=asg_tags,
health_check_period=health_check_period,
health_check_type=health_check_type,
default_cooldown=default_cooldown,
termination_policies=termination_policies)
try:
connection.create_auto_scaling_group(ag)
suspend_processes(ag, module)
if wait_for_instances:
wait_for_new_inst(module, connection, group_name, wait_timeout, desired_capacity, 'viable_instances')
wait_for_elb(connection, module, group_name)
if notification_topic:
ag.put_notification_configuration(notification_topic, notification_types)
as_group = connection.get_all_groups(names=[group_name])[0]
asg_properties = get_properties(as_group)
changed = True
return(changed, asg_properties)
except BotoServerError as e:
module.fail_json(msg="Failed to create Autoscaling Group: %s" % str(e), exception=traceback.format_exc(e))
else:
as_group = as_groups[0]
changed = False
if suspend_processes(as_group, module):
changed = True
for attr in ASG_ATTRIBUTES:
if module.params.get(attr, None) is not None:
module_attr = module.params.get(attr)
if attr == 'vpc_zone_identifier':
module_attr = ','.join(module_attr)
group_attr = getattr(as_group, attr)
# we do this because AWS and the module may return the same list
# sorted differently
if attr != 'termination_policies':
try:
module_attr.sort()
except:
pass
try:
group_attr.sort()
except:
pass
if group_attr != module_attr:
changed = True
setattr(as_group, attr, module_attr)
if len(set_tags) > 0:
have_tags = {}
want_tags = {}
for tag in asg_tags:
want_tags[tag.key] = [tag.value, tag.propagate_at_launch]
dead_tags = []
for tag in as_group.tags:
have_tags[tag.key] = [tag.value, tag.propagate_at_launch]
if tag.key not in want_tags:
changed = True
dead_tags.append(tag)
if dead_tags != []:
connection.delete_tags(dead_tags)
if have_tags != want_tags:
changed = True
connection.create_or_update_tags(asg_tags)
# handle loadbalancers separately because None != []
load_balancers = module.params.get('load_balancers') or []
if load_balancers and as_group.load_balancers != load_balancers:
changed = True
as_group.load_balancers = module.params.get('load_balancers')
if changed:
try:
as_group.update()
except BotoServerError as e:
module.fail_json(msg="Failed to update Autoscaling Group: %s" % str(e), exception=traceback.format_exc(e))
if notification_topic:
try:
as_group.put_notification_configuration(notification_topic, notification_types)
except BotoServerError as e:
module.fail_json(msg="Failed to update Autoscaling Group notifications: %s" % str(e), exception=traceback.format_exc(e))
if wait_for_instances:
wait_for_new_inst(module, connection, group_name, wait_timeout, desired_capacity, 'viable_instances')
wait_for_elb(connection, module, group_name)
try:
as_group = connection.get_all_groups(names=[group_name])[0]
asg_properties = get_properties(as_group)
except BotoServerError as e:
module.fail_json(msg="Failed to read existing Autoscaling Groups: %s" % str(e), exception=traceback.format_exc(e))
return(changed, asg_properties)
def delete_autoscaling_group(connection, module):
group_name = module.params.get('name')
notification_topic = module.params.get('notification_topic')
if notification_topic:
ag.delete_notification_configuration(notification_topic)
groups = connection.get_all_groups(names=[group_name])
if groups:
group = groups[0]
group.max_size = 0
group.min_size = 0
group.desired_capacity = 0
group.update()
instances = True
while instances:
tmp_groups = connection.get_all_groups(names=[group_name])
if tmp_groups:
tmp_group = tmp_groups[0]
if not tmp_group.instances:
instances = False
time.sleep(10)
group.delete()
while len(connection.get_all_groups(names=[group_name])):
time.sleep(5)
changed=True
return changed
else:
changed=False
return changed
def get_chunks(l, n):
for i in xrange(0, len(l), n):
yield l[i:i+n]
def update_size(group, max_size, min_size, dc):
log.debug("setting ASG sizes")
log.debug("minimum size: {0}, desired_capacity: {1}, max size: {2}".format(min_size, dc, max_size ))
group.max_size = max_size
group.min_size = min_size
group.desired_capacity = dc
group.update()
def replace(connection, module):
batch_size = module.params.get('replace_batch_size')
wait_timeout = module.params.get('wait_timeout')
group_name = module.params.get('name')
max_size = module.params.get('max_size')
min_size = module.params.get('min_size')
desired_capacity = module.params.get('desired_capacity')
lc_check = module.params.get('lc_check')
replace_instances = module.params.get('replace_instances')
as_group = connection.get_all_groups(names=[group_name])[0]
wait_for_new_inst(module, connection, group_name, wait_timeout, as_group.min_size, 'viable_instances')
props = get_properties(as_group)
instances = props['instances']
if replace_instances:
instances = replace_instances
#check if min_size/max_size/desired capacity have been specified and if not use ASG values
if min_size is None:
min_size = as_group.min_size
if max_size is None:
max_size = as_group.max_size
if desired_capacity is None:
desired_capacity = as_group.desired_capacity
# check to see if instances are replaceable if checking launch configs
new_instances, old_instances = get_instances_by_lc(props, lc_check, instances)
num_new_inst_needed = desired_capacity - len(new_instances)
if lc_check:
if num_new_inst_needed == 0 and old_instances:
log.debug("No new instances needed, but old instances are present. Removing old instances")
terminate_batch(connection, module, old_instances, instances, True)
as_group = connection.get_all_groups(names=[group_name])[0]
props = get_properties(as_group)
changed = True
return(changed, props)
# we don't want to spin up extra instances if not necessary
if num_new_inst_needed < batch_size:
log.debug("Overriding batch size to {0}".format(num_new_inst_needed))
batch_size = num_new_inst_needed
if not old_instances:
changed = False
return(changed, props)
# set temporary settings and wait for them to be reached
# This should get overwritten if the number of instances left is less than the batch size.
as_group = connection.get_all_groups(names=[group_name])[0]
update_size(as_group, max_size + batch_size, min_size + batch_size, desired_capacity + batch_size)
wait_for_new_inst(module, connection, group_name, wait_timeout, as_group.min_size, 'viable_instances')
wait_for_elb(connection, module, group_name)
as_group = connection.get_all_groups(names=[group_name])[0]
props = get_properties(as_group)
instances = props['instances']
if replace_instances:
instances = replace_instances
log.debug("beginning main loop")
for i in get_chunks(instances, batch_size):
# break out of this loop if we have enough new instances
break_early, desired_size, term_instances = terminate_batch(connection, module, i, instances, False)
wait_for_term_inst(connection, module, term_instances)
wait_for_new_inst(module, connection, group_name, wait_timeout, desired_size, 'viable_instances')
wait_for_elb(connection, module, group_name)
as_group = connection.get_all_groups(names=[group_name])[0]
if break_early:
log.debug("breaking loop")
break
update_size(as_group, max_size, min_size, desired_capacity)
as_group = connection.get_all_groups(names=[group_name])[0]
asg_properties = get_properties(as_group)
log.debug("Rolling update complete.")
changed=True
return(changed, asg_properties)
def get_instances_by_lc(props, lc_check, initial_instances):
new_instances = []
old_instances = []
# old instances are those that have the old launch config
if lc_check:
for i in props['instances']:
if props['instance_facts'][i]['launch_config_name'] == props['launch_config_name']:
new_instances.append(i)
else:
old_instances.append(i)
else:
log.debug("Comparing initial instances with current: {0}".format(initial_instances))
for i in props['instances']:
if i not in initial_instances:
new_instances.append(i)
else:
old_instances.append(i)
log.debug("New instances: {0}, {1}".format(len(new_instances), new_instances))
log.debug("Old instances: {0}, {1}".format(len(old_instances), old_instances))
return new_instances, old_instances
def list_purgeable_instances(props, lc_check, replace_instances, initial_instances):
instances_to_terminate = []
instances = ( inst_id for inst_id in replace_instances if inst_id in props['instances'])
# check to make sure instances given are actually in the given ASG
# and they have a non-current launch config
if lc_check:
for i in instances:
if props['instance_facts'][i]['launch_config_name'] != props['launch_config_name']:
instances_to_terminate.append(i)
else:
for i in instances:
if i in initial_instances:
instances_to_terminate.append(i)
return instances_to_terminate
def terminate_batch(connection, module, replace_instances, initial_instances, leftovers=False):
batch_size = module.params.get('replace_batch_size')
min_size = module.params.get('min_size')
desired_capacity = module.params.get('desired_capacity')
group_name = module.params.get('name')
wait_timeout = int(module.params.get('wait_timeout'))
lc_check = module.params.get('lc_check')
decrement_capacity = False
break_loop = False
as_group = connection.get_all_groups(names=[group_name])[0]
props = get_properties(as_group)
desired_size = as_group.min_size
new_instances, old_instances = get_instances_by_lc(props, lc_check, initial_instances)
num_new_inst_needed = desired_capacity - len(new_instances)
# check to make sure instances given are actually in the given ASG
# and they have a non-current launch config
instances_to_terminate = list_purgeable_instances(props, lc_check, replace_instances, initial_instances)
log.debug("new instances needed: {0}".format(num_new_inst_needed))
log.debug("new instances: {0}".format(new_instances))
log.debug("old instances: {0}".format(old_instances))
log.debug("batch instances: {0}".format(",".join(instances_to_terminate)))
if num_new_inst_needed == 0:
decrement_capacity = True
if as_group.min_size != min_size:
as_group.min_size = min_size
as_group.update()
log.debug("Updating minimum size back to original of {0}".format(min_size))
#if are some leftover old instances, but we are already at capacity with new ones
# we don't want to decrement capacity
if leftovers:
decrement_capacity = False
break_loop = True
instances_to_terminate = old_instances
desired_size = min_size
log.debug("No new instances needed")
if num_new_inst_needed < batch_size and num_new_inst_needed !=0 :
instances_to_terminate = instances_to_terminate[:num_new_inst_needed]
decrement_capacity = False
break_loop = False
log.debug("{0} new instances needed".format(num_new_inst_needed))
log.debug("decrementing capacity: {0}".format(decrement_capacity))
for instance_id in instances_to_terminate:
elb_dreg(connection, module, group_name, instance_id)
log.debug("terminating instance: {0}".format(instance_id))
connection.terminate_instance(instance_id, decrement_capacity=decrement_capacity)
# we wait to make sure the machines we marked as Unhealthy are
# no longer in the list
return break_loop, desired_size, instances_to_terminate
def wait_for_term_inst(connection, module, term_instances):
batch_size = module.params.get('replace_batch_size')
wait_timeout = module.params.get('wait_timeout')
group_name = module.params.get('name')
lc_check = module.params.get('lc_check')
as_group = connection.get_all_groups(names=[group_name])[0]
props = get_properties(as_group)
count = 1
wait_timeout = time.time() + wait_timeout
while wait_timeout > time.time() and count > 0:
log.debug("waiting for instances to terminate")
count = 0
as_group = connection.get_all_groups(names=[group_name])[0]
props = get_properties(as_group)
instance_facts = props['instance_facts']
instances = ( i for i in instance_facts if i in term_instances)
for i in instances:
lifecycle = instance_facts[i]['lifecycle_state']
health = instance_facts[i]['health_status']
log.debug("Instance {0} has state of {1},{2}".format(i,lifecycle,health ))
if lifecycle == 'Terminating' or health == 'Unhealthy':
count += 1
time.sleep(10)
if wait_timeout <= time.time():
# waiting took too long
module.fail_json(msg = "Waited too long for old instances to terminate. %s" % time.asctime())
def wait_for_new_inst(module, connection, group_name, wait_timeout, desired_size, prop):
# make sure we have the latest stats after that last loop.
as_group = connection.get_all_groups(names=[group_name])[0]
props = get_properties(as_group)
log.debug("Waiting for {0} = {1}, currently {2}".format(prop, desired_size, props[prop]))
# now we make sure that we have enough instances in a viable state
wait_timeout = time.time() + wait_timeout
while wait_timeout > time.time() and desired_size > props[prop]:
log.debug("Waiting for {0} = {1}, currently {2}".format(prop, desired_size, props[prop]))
time.sleep(10)
as_group = connection.get_all_groups(names=[group_name])[0]
props = get_properties(as_group)
if wait_timeout <= time.time():
# waiting took too long
module.fail_json(msg = "Waited too long for new instances to become viable. %s" % time.asctime())
log.debug("Reached {0}: {1}".format(prop, desired_size))
return props
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(
dict(
name=dict(required=True, type='str'),
load_balancers=dict(type='list'),
availability_zones=dict(type='list'),
launch_config_name=dict(type='str'),
min_size=dict(type='int'),
max_size=dict(type='int'),
placement_group=dict(type='str'),
desired_capacity=dict(type='int'),
vpc_zone_identifier=dict(type='list'),
replace_batch_size=dict(type='int', default=1),
replace_all_instances=dict(type='bool', default=False),
replace_instances=dict(type='list', default=[]),
lc_check=dict(type='bool', default=True),
wait_timeout=dict(type='int', default=300),
state=dict(default='present', choices=['present', 'absent']),
tags=dict(type='list', default=[]),
health_check_period=dict(type='int', default=300),
health_check_type=dict(default='EC2', choices=['EC2', 'ELB']),
default_cooldown=dict(type='int', default=300),
wait_for_instances=dict(type='bool', default=True),
termination_policies=dict(type='list', default='Default'),
notification_topic=dict(type='str', default=None),
notification_types=dict(type='list', default=[
'autoscaling:EC2_INSTANCE_LAUNCH',
'autoscaling:EC2_INSTANCE_LAUNCH_ERROR',
'autoscaling:EC2_INSTANCE_TERMINATE',
'autoscaling:EC2_INSTANCE_TERMINATE_ERROR'
]),
suspend_processes=dict(type='list', default=[])
),
)
module = AnsibleModule(
argument_spec=argument_spec,
mutually_exclusive = [['replace_all_instances', 'replace_instances']]
)
if not HAS_BOTO:
module.fail_json(msg='boto required for this module')
state = module.params.get('state')
replace_instances = module.params.get('replace_instances')
replace_all_instances = module.params.get('replace_all_instances')
region, ec2_url, aws_connect_params = get_aws_connection_info(module)
try:
connection = connect_to_aws(boto.ec2.autoscale, region, **aws_connect_params)
if not connection:
module.fail_json(msg="failed to connect to AWS for the given region: %s" % str(region))
except boto.exception.NoAuthHandlerFound as e:
module.fail_json(msg=str(e))
changed = create_changed = replace_changed = False
if state == 'present':
create_changed, asg_properties=create_autoscaling_group(connection, module)
elif state == 'absent':
changed = delete_autoscaling_group(connection, module)
module.exit_json( changed = changed )
if replace_all_instances or replace_instances:
replace_changed, asg_properties=replace(connection, module)
if create_changed or replace_changed:
changed = True
module.exit_json( changed = changed, **asg_properties )
if __name__ == '__main__':
main()
| gpl-3.0 | -2,928,054,172,319,307,300 | 40.236501 | 232 | 0.646065 | false |
bogdanvuk/sydpy | sydpy/types/__init__.py | 1 | 1304 | # This file is part of sydpy.
#
# Copyright (C) 2014-2015 Bogdan Vukobratovic
#
# sydpy is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation, either version 2.1
# of the License, or (at your option) any later version.
#
# sydpy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General
# Public License along with sydpy. If not, see
# <http://www.gnu.org/licenses/>.
from ._type_base import conv, convgen, ConversionError
from .bit import bit, bit8, bit16, bit32, bit64, Bit
from .array import Array, array
from .vector import vector, Vector
from .struct import struct, Struct
from .enum import Enum
__all__ = ["conv",
"convgen",
"bit",
"bit8",
"bit16",
"bit32",
"bit64",
"Bit",
"array",
"Array",
"vector",
"Vector",
"struct",
"Struct",
"enum",
"Enum"
]
| lgpl-2.1 | -2,229,752,739,850,204,400 | 30.047619 | 66 | 0.618098 | false |
kubeflow/kfserving | python/kfserving/test/test_v1beta1_transformer_config.py | 1 | 2113 | # Copyright 2020 kubeflow.org.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding: utf-8
"""
KFServing
Python SDK for KFServing # noqa: E501
The version of the OpenAPI document: v0.1
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import datetime
import kfserving
from kfserving.models.v1beta1_transformer_config import V1beta1TransformerConfig # noqa: E501
from kfserving.rest import ApiException
class TestV1beta1TransformerConfig(unittest.TestCase):
"""V1beta1TransformerConfig unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def make_instance(self, include_optional):
"""Test V1beta1TransformerConfig
include_option is a boolean, when False only required
params are included, when True both required and
optional params are included """
# model = kfserving.models.v1beta1_transformer_config.V1beta1TransformerConfig() # noqa: E501
if include_optional :
return V1beta1TransformerConfig(
default_image_version = '0',
image = '0'
)
else :
return V1beta1TransformerConfig(
default_image_version = '0',
image = '0',
)
def testV1beta1TransformerConfig(self):
"""Test V1beta1TransformerConfig"""
inst_req_only = self.make_instance(include_optional=False)
inst_req_and_optional = self.make_instance(include_optional=True)
if __name__ == '__main__':
unittest.main()
| apache-2.0 | -1,648,028,866,633,783,300 | 29.623188 | 102 | 0.677709 | false |
zenodo/invenio | invenio/legacy/registry.py | 18 | 2353 | # -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2014, 2015 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
import os
import inspect
from flask import current_app
from flask_registry import RegistryProxy, ImportPathRegistry, \
ModuleAutoDiscoveryRegistry
from invenio.ext.registry import ModuleAutoDiscoverySubRegistry
from invenio.utils.datastructures import LazyDict
legacy_modules = RegistryProxy('legacy', ImportPathRegistry,
initial=['invenio.legacy.*'])
webadmin_proxy = RegistryProxy('legacy.webadmin', \
ModuleAutoDiscoverySubRegistry, 'web.admin',
registry_namespace=legacy_modules)
def _admin_handler_name(name):
parts = name.split('.')
return '%s/%s' % (parts[2], parts[5])
webadmin = LazyDict(lambda: dict((_admin_handler_name(module.__name__), module)
for module in webadmin_proxy))
webinterface_proxy = RegistryProxy(
'legacy.webinterface', ModuleAutoDiscoveryRegistry, 'webinterface',
registry_namespace=legacy_modules)
def _webinterface(module):
from invenio.ext.legacy.handler import WebInterfaceDirectory
parts = module.__name__.split('.')
for value in dir(module):
webinterface = getattr(module, value)
if inspect.isclass(webinterface) and \
issubclass(webinterface, WebInterfaceDirectory) and \
webinterface.__module__ == module.__name__:
yield webinterface.__name__, webinterface
def _webinterfaces(modules):
for module in modules:
for value in _webinterface(module):
yield value
webinterfaces = LazyDict(lambda: dict(_webinterfaces(webinterface_proxy)))
| gpl-2.0 | 4,499,823,531,437,782,500 | 36.349206 | 79 | 0.708032 | false |
openmb/openblackhole-enigma2 | lib/python/Plugins/SystemPlugins/SoftwareManager/SoftwareTools.py | 47 | 9344 | # -*- coding: iso-8859-1 -*-
from enigma import eConsoleAppContainer
from Components.Console import Console
from Components.About import about
from Components.PackageInfo import PackageInfoHandler
from Components.Language import language
from Components.Sources.List import List
from Components.Ipkg import IpkgComponent
from Components.Network import iNetwork
from Tools.Directories import pathExists, fileExists, resolveFilename, SCOPE_METADIR
from Tools.HardwareInfo import HardwareInfo
from time import time
class SoftwareTools(PackageInfoHandler):
lastDownloadDate = None
NetworkConnectionAvailable = None
list_updating = False
available_updates = 0
available_updatelist = []
available_packetlist = []
installed_packetlist = {}
def __init__(self):
aboutInfo = about.getImageVersionString()
if aboutInfo.startswith("dev-"):
self.ImageVersion = 'Experimental'
else:
self.ImageVersion = 'Stable'
self.language = language.getLanguage()[:2] # getLanguage returns e.g. "fi_FI" for "language_country"
PackageInfoHandler.__init__(self, self.statusCallback, blocking = False, neededTag = 'ALL_TAGS', neededFlag = self.ImageVersion)
self.directory = resolveFilename(SCOPE_METADIR)
self.list = List([])
self.NotifierCallback = None
self.Console = Console()
self.UpdateConsole = Console()
self.cmdList = []
self.unwanted_extensions = ('-dbg', '-dev', '-doc', '-staticdev', '-src')
self.ipkg = IpkgComponent()
self.ipkg.addCallback(self.ipkgCallback)
def statusCallback(self, status, progress):
pass
def startSoftwareTools(self, callback = None):
if callback is not None:
self.NotifierCallback = callback
iNetwork.checkNetworkState(self.checkNetworkCB)
def checkNetworkCB(self,data):
if data is not None:
if data <= 2:
self.NetworkConnectionAvailable = True
self.getUpdates()
else:
self.NetworkConnectionAvailable = False
self.getUpdates()
def getUpdates(self, callback = None):
if self.lastDownloadDate is None:
if self.NetworkConnectionAvailable == True:
self.lastDownloadDate = time()
if self.list_updating is False and callback is None:
self.list_updating = True
self.ipkg.startCmd(IpkgComponent.CMD_UPDATE)
elif self.list_updating is False and callback is not None:
self.list_updating = True
self.NotifierCallback = callback
self.ipkg.startCmd(IpkgComponent.CMD_UPDATE)
elif self.list_updating is True and callback is not None:
self.NotifierCallback = callback
else:
self.list_updating = False
if callback is not None:
callback(False)
elif self.NotifierCallback is not None:
self.NotifierCallback(False)
else:
if self.NetworkConnectionAvailable == True:
self.lastDownloadDate = time()
if self.list_updating is False and callback is None:
self.list_updating = True
self.ipkg.startCmd(IpkgComponent.CMD_UPDATE)
elif self.list_updating is False and callback is not None:
self.list_updating = True
self.NotifierCallback = callback
self.ipkg.startCmd(IpkgComponent.CMD_UPDATE)
elif self.list_updating is True and callback is not None:
self.NotifierCallback = callback
else:
if self.list_updating and callback is not None:
self.NotifierCallback = callback
self.startIpkgListAvailable()
else:
self.list_updating = False
if callback is not None:
callback(False)
elif self.NotifierCallback is not None:
self.NotifierCallback(False)
def ipkgCallback(self, event, param):
if event == IpkgComponent.EVENT_ERROR:
self.list_updating = False
if self.NotifierCallback is not None:
self.NotifierCallback(False)
elif event == IpkgComponent.EVENT_DONE:
if self.list_updating:
self.startIpkgListAvailable()
pass
def startIpkgListAvailable(self, callback = None):
if callback is not None:
self.list_updating = True
if self.list_updating:
if not self.UpdateConsole:
self.UpdateConsole = Console()
cmd = self.ipkg.ipkg + " list"
self.UpdateConsole.ePopen(cmd, self.IpkgListAvailableCB, callback)
def IpkgListAvailableCB(self, result, retval, extra_args = None):
(callback) = extra_args
if result:
if self.list_updating:
self.available_packetlist = []
for x in result.splitlines():
tokens = x.split(' - ')
name = tokens[0].strip()
if not any(name.endswith(x) for x in self.unwanted_extensions):
l = len(tokens)
version = l > 1 and tokens[1].strip() or ""
descr = l > 2 and tokens[2].strip() or ""
self.available_packetlist.append([name, version, descr])
if callback is None:
self.startInstallMetaPackage()
else:
if self.UpdateConsole:
if len(self.UpdateConsole.appContainers) == 0:
callback(True)
else:
self.list_updating = False
if self.UpdateConsole:
if len(self.UpdateConsole.appContainers) == 0:
if callback is not None:
callback(False)
def startInstallMetaPackage(self, callback = None):
if callback is not None:
self.list_updating = True
if self.list_updating:
if self.NetworkConnectionAvailable == True:
if not self.UpdateConsole:
self.UpdateConsole = Console()
cmd = self.ipkg.ipkg + " install enigma2-meta enigma2-plugins-meta enigma2-skins-meta"
self.UpdateConsole.ePopen(cmd, self.InstallMetaPackageCB, callback)
else:
self.InstallMetaPackageCB(True)
def InstallMetaPackageCB(self, result, retval = None, extra_args = None):
(callback) = extra_args
if result:
self.fillPackagesIndexList()
if callback is None:
self.startIpkgListInstalled()
else:
if self.UpdateConsole:
if len(self.UpdateConsole.appContainers) == 0:
callback(True)
else:
self.list_updating = False
if self.UpdateConsole:
if len(self.UpdateConsole.appContainers) == 0:
if callback is not None:
callback(False)
def startIpkgListInstalled(self, callback = None):
if callback is not None:
self.list_updating = True
if self.list_updating:
if not self.UpdateConsole:
self.UpdateConsole = Console()
cmd = self.ipkg.ipkg + " list_installed"
self.UpdateConsole.ePopen(cmd, self.IpkgListInstalledCB, callback)
def IpkgListInstalledCB(self, result, retval, extra_args = None):
(callback) = extra_args
if result:
self.installed_packetlist = {}
for x in result.splitlines():
tokens = x.split(' - ')
name = tokens[0].strip()
if not any(name.endswith(x) for x in self.unwanted_extensions):
l = len(tokens)
version = l > 1 and tokens[1].strip() or ""
self.installed_packetlist[name] = version
for package in self.packagesIndexlist[:]:
if not self.verifyPrerequisites(package[0]["prerequisites"]):
self.packagesIndexlist.remove(package)
for package in self.packagesIndexlist[:]:
attributes = package[0]["attributes"]
if attributes.has_key("packagetype"):
if attributes["packagetype"] == "internal":
self.packagesIndexlist.remove(package)
if callback is None:
self.countUpdates()
else:
if self.UpdateConsole:
if len(self.UpdateConsole.appContainers) == 0:
callback(True)
else:
self.list_updating = False
if self.UpdateConsole:
if len(self.UpdateConsole.appContainers) == 0:
if callback is not None:
callback(False)
def countUpdates(self, callback = None):
self.available_updates = 0
self.available_updatelist = []
for package in self.packagesIndexlist[:]:
attributes = package[0]["attributes"]
packagename = attributes["packagename"]
for x in self.available_packetlist:
if x[0] == packagename:
if self.installed_packetlist.has_key(packagename):
if self.installed_packetlist[packagename] != x[1]:
self.available_updates +=1
self.available_updatelist.append([packagename])
self.list_updating = False
if self.UpdateConsole:
if len(self.UpdateConsole.appContainers) == 0:
if callback is not None:
callback(True)
callback = None
elif self.NotifierCallback is not None:
self.NotifierCallback(True)
self.NotifierCallback = None
def startIpkgUpdate(self, callback = None):
if not self.Console:
self.Console = Console()
cmd = self.ipkg.ipkg + " update"
self.Console.ePopen(cmd, self.IpkgUpdateCB, callback)
def IpkgUpdateCB(self, result, retval, extra_args = None):
(callback) = extra_args
if result:
if self.Console:
if len(self.Console.appContainers) == 0:
if callback is not None:
callback(True)
callback = None
def cleanupSoftwareTools(self):
self.list_updating = False
if self.NotifierCallback is not None:
self.NotifierCallback = None
self.ipkg.stop()
if self.Console is not None:
if len(self.Console.appContainers):
for name in self.Console.appContainers.keys():
self.Console.kill(name)
if self.UpdateConsole is not None:
if len(self.UpdateConsole.appContainers):
for name in self.UpdateConsole.appContainers.keys():
self.UpdateConsole.kill(name)
def verifyPrerequisites(self, prerequisites):
if prerequisites.has_key("hardware"):
hardware_found = False
for hardware in prerequisites["hardware"]:
if hardware == HardwareInfo().device_name:
hardware_found = True
if not hardware_found:
return False
return True
iSoftwareTools = SoftwareTools()
| gpl-2.0 | 1,030,085,553,780,374,000 | 32.491039 | 130 | 0.710188 | false |
sassman/ansible-modules-core | cloud/amazon/ec2_snapshot.py | 6 | 5398 | #!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: ec2_snapshot
short_description: creates a snapshot from an existing volume
description:
- creates an EC2 snapshot from an existing EBS volume
version_added: "1.5"
options:
region:
description:
- The AWS region to use. If not specified then the value of the EC2_REGION environment variable, if any, is used.
required: false
aliases: ['aws_region', 'ec2_region']
volume_id:
description:
- volume from which to take the snapshot
required: false
description:
description:
- description to be applied to the snapshot
required: false
instance_id:
description:
- instance that has the required volume to snapshot mounted
required: false
device_name:
description:
- device name of a mounted volume to be snapshotted
required: false
snapshot_tags:
description:
- a hash/dictionary of tags to add to the snapshot
required: false
version_added: "1.6"
wait:
description:
- wait for the snapshot to be ready
choices: ['yes', 'no']
required: false
default: yes
version_added: "1.5.1"
wait_timeout:
description:
- how long before wait gives up, in seconds
- specify 0 to wait forever
required: false
default: 0
version_added: "1.5.1"
author: Will Thames
extends_documentation_fragment: aws
'''
EXAMPLES = '''
# Simple snapshot of volume using volume_id
- local_action:
module: ec2_snapshot
volume_id: vol-abcdef12
description: snapshot of /data from DB123 taken 2013/11/28 12:18:32
# Snapshot of volume mounted on device_name attached to instance_id
- local_action:
module: ec2_snapshot
instance_id: i-12345678
device_name: /dev/sdb1
description: snapshot of /data from DB123 taken 2013/11/28 12:18:32
# Snapshot of volume with tagging
- local_action:
module: ec2_snapshot
instance_id: i-12345678
device_name: /dev/sdb1
snapshot_tags:
frequency: hourly
source: /data
'''
import sys
import time
try:
import boto.ec2
except ImportError:
print "failed=True msg='boto required for this module'"
sys.exit(1)
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(
dict(
volume_id = dict(),
description = dict(),
instance_id = dict(),
device_name = dict(),
wait = dict(type='bool', default='true'),
wait_timeout = dict(default=0),
snapshot_tags = dict(type='dict', default=dict()),
)
)
module = AnsibleModule(argument_spec=argument_spec)
volume_id = module.params.get('volume_id')
description = module.params.get('description')
instance_id = module.params.get('instance_id')
device_name = module.params.get('device_name')
wait = module.params.get('wait')
wait_timeout = module.params.get('wait_timeout')
snapshot_tags = module.params.get('snapshot_tags')
if not volume_id and not instance_id or volume_id and instance_id:
module.fail_json('One and only one of volume_id or instance_id must be specified')
if instance_id and not device_name or device_name and not instance_id:
module.fail_json('Instance ID and device name must both be specified')
ec2 = ec2_connect(module)
if instance_id:
try:
volumes = ec2.get_all_volumes(filters={'attachment.instance-id': instance_id, 'attachment.device': device_name})
if not volumes:
module.fail_json(msg="Could not find volume with name %s attached to instance %s" % (device_name, instance_id))
volume_id = volumes[0].id
except boto.exception.BotoServerError, e:
module.fail_json(msg = "%s: %s" % (e.error_code, e.error_message))
try:
snapshot = ec2.create_snapshot(volume_id, description=description)
time_waited = 0
if wait:
snapshot.update()
while snapshot.status != 'completed':
time.sleep(3)
snapshot.update()
time_waited += 3
if wait_timeout and time_waited > wait_timeout:
module.fail_json('Timed out while creating snapshot.')
for k, v in snapshot_tags.items():
snapshot.add_tag(k, v)
except boto.exception.BotoServerError, e:
module.fail_json(msg = "%s: %s" % (e.error_code, e.error_message))
module.exit_json(changed=True, snapshot_id=snapshot.id, volume_id=snapshot.volume_id,
volume_size=snapshot.volume_size, tags=snapshot.tags.copy())
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.ec2 import *
main()
| gpl-3.0 | -6,052,397,444,278,806,000 | 31.914634 | 127 | 0.658392 | false |
dcolligan/server | setup.py | 4 | 2617 | # Don't import __future__ packages here; they make setup fail
# First, we try to use setuptools. If it's not available locally,
# we fall back on ez_setup.
try:
from setuptools import setup
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup
with open("README.pypi.rst") as readmeFile:
long_description = readmeFile.read()
install_requires = []
with open("requirements.txt") as requirementsFile:
for line in requirementsFile:
line = line.strip()
if len(line) == 0:
continue
if line[0] == '#':
continue
if line.find('-c constraints.txt') == -1:
pinnedVersion = line.split()[0]
install_requires.append(pinnedVersion)
dependency_links = []
try:
with open("constraints.txt") as constraintsFile:
for line in constraintsFile:
line = line.strip()
if len(line) == 0:
continue
if line[0] == '#':
continue
dependency_links.append(line)
except EnvironmentError:
print('No constraints file found, proceeding without '
'creating dependency links.')
setup(
name="ga4gh-server",
description="A reference implementation of the GA4GH API",
packages=["ga4gh", "ga4gh.server", "ga4gh.server.datamodel",
"ga4gh.server.templates"],
namespace_packages=["ga4gh"],
zip_safe=False,
url="https://github.com/ga4gh/ga4gh-server",
use_scm_version={"write_to": "ga4gh/server/_version.py"},
entry_points={
'console_scripts': [
'ga4gh_configtest=ga4gh.server.cli.configtest:configtest_main',
'ga4gh_server=ga4gh.server.cli.server:server_main',
'ga4gh_repo=ga4gh.server.cli.repomanager:repo_main',
]
},
long_description=long_description,
install_requires=install_requires,
dependency_links=dependency_links,
license='Apache License 2.0',
include_package_data=True,
author="Global Alliance for Genomics and Health",
author_email="[email protected]",
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: Apache Software License',
'Natural Language :: English',
'Programming Language :: Python :: 2.7',
'Topic :: Scientific/Engineering :: Bio-Informatics',
],
keywords=['genomics', 'reference'],
# Use setuptools_scm to set the version number automatically from Git
setup_requires=['setuptools_scm'],
)
| apache-2.0 | -4,213,342,856,843,965,400 | 33.893333 | 75 | 0.635078 | false |
saullocastro/compmech | doc/pyplots/theory/fem/fsdt_donnell_kquad4.py | 3 | 1473 | from matplotlib.pyplot import *
from math import sqrt
m = 1/3.
xs = [+1, +1, -1, -1]
ys = [-1, +1, -1, +1]
figure(figsize=(4, 4))
ax = gca()
ax.spines['right'].set_visible(False)
ax.spines['top'].set_visible(False)
ax.spines['left'].set_position(('data', 0))
ax.spines['bottom'].set_position(('data', 0))
ax.xaxis.set_ticks_position('none')
ax.yaxis.set_ticks_position('none')
ax.set_aspect('equal')
ax.set_xlim(-1.4, +1.6)
ax.set_ylim(-1.4, +1.6)
ax.text(1.8, 0., r'$\xi$', transform=ax.transData, va='center')
ax.text(0., 1.8, r'$\eta$', rotation='horizontal', transform=ax.transData,
ha='center')
ax.text(+1.1, +1.1, '$n_1$\n' + r'$(+1, +1)$', ha='center', va='bottom',
fontsize=10)
ax.text(-1.1, +1.1, '$n_2$\n' + r'$(-1, +1)$', ha='center', va='bottom',
fontsize=10)
ax.text(-1.1, -1.1, '$n_3$\n' + r'$(-1, -1)$', ha='center', va='top' ,
fontsize=10)
ax.text(+1.1, -1.1, '$n_4$\n' + r'$(+1, -1)$', ha='center', va='top' ,
fontsize=10)
# radius
ax.annotate('$r_1$', xy=(-1, 0.5), xytext=(-0.5, 0.2),
arrowprops=dict(arrowstyle='->'), va='center', ha='center')
ax.annotate('$r_2$', xy=(+1, 0.5), xytext=(+0.5, 0.2),
arrowprops=dict(arrowstyle='->'), va='center', ha='center')
ax.set_xticks([])
ax.set_yticks([])
#ax.set_xticklabels(['-1', '+1'])
#ax.set_yticklabels(['-1', '+1'])
plot([1, -1, -1, 1, 1], [1, 1, -1, -1, 1], '-k')
plot(xs, ys, 'ok', mfc='k')
tight_layout()
savefig('test.png')
#show()
| bsd-3-clause | -6,313,386,928,571,948,000 | 31.733333 | 74 | 0.545825 | false |
timduru/platform-external-chromium_org | tools/site_compare/site_compare.py | 179 | 6504 | #!/usr/bin/env python
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""SiteCompare component to handle bulk scrapes.
Invokes a list of browsers and sends them to a list of URLs,
saving the rendered results to a specified directory, then
performs comparison operations on the resulting bitmaps and
saves the results
"""
# This line is necessary to work around a QEMU bug
import _imaging
import os # Functions for walking the directory tree
import types # Runtime type-checking
import command_line # command-line parsing
import drivers # Functions for driving keyboard/mouse/windows, OS-specific
import operators # Functions that, given two bitmaps as input, produce
# output depending on the performance of an operation
import scrapers # Functions that know how to capture a render from
# particular browsers
import commands.compare2 # compare one page in two versions of same browser
import commands.maskmaker # generate a mask based on repeated scrapes
import commands.measure # measure length of time a page takes to load
import commands.scrape # scrape a URL or series of URLs to a bitmap
# The timeload command is obsolete (too flaky); it may be reinstated
# later but for now it's been superceded by "measure"
# import commands.timeload # measure length of time a page takes to load
def Scrape(browsers, urls, window_size=(1024, 768),
window_pos=(0, 0), timeout=20, save_path=None, **kwargs):
"""Invoke one or more browsers over one or more URLs, scraping renders.
Args:
browsers: browsers to invoke with optional version strings
urls: URLs to visit
window_size: size of the browser window to display
window_pos: location of browser window
timeout: time (in seconds) to wait for page to load
save_path: root of save path, automatically appended with browser and
version
kwargs: miscellaneous keyword args, passed to scraper
Returns:
None
@TODO(jhaas): more parameters, or perhaps an indefinite dictionary
parameter, for things like length of time to wait for timeout, speed
of mouse clicks, etc. Possibly on a per-browser, per-URL, or
per-browser-per-URL basis
"""
if type(browsers) in types.StringTypes: browsers = [browsers]
if save_path is None:
# default save path is "scrapes" off the current root
save_path = os.path.join(os.path.split(__file__)[0], "Scrapes")
for browser in browsers:
# Browsers should be tuples of (browser, version)
if type(browser) in types.StringTypes: browser = (browser, None)
scraper = scrapers.GetScraper(browser)
full_path = os.path.join(save_path, browser[0], scraper.version)
drivers.windowing.PreparePath(full_path)
scraper.Scrape(urls, full_path, window_size, window_pos, timeout, kwargs)
def Compare(base, compare, ops, root_path=None, out_path=None):
"""Compares a series of scrapes using a series of operators.
Args:
base: (browser, version) tuple of version to consider the baseline
compare: (browser, version) tuple of version to compare to
ops: list of operators plus operator arguments
root_path: root of the scrapes
out_path: place to put any output from the operators
Returns:
None
@TODO(jhaas): this method will likely change, to provide a robust and
well-defined way of chaining operators, applying operators conditionally,
and full-featured scripting of the operator chain. There also needs
to be better definition of the output; right now it's to stdout and
a log.txt file, with operator-dependent images saved for error output
"""
if root_path is None:
# default save path is "scrapes" off the current root
root_path = os.path.join(os.path.split(__file__)[0], "Scrapes")
if out_path is None:
out_path = os.path.join(os.path.split(__file__)[0], "Compares")
if type(base) in types.StringTypes: base = (base, None)
if type(compare) in types.StringTypes: compare = (compare, None)
if type(ops) in types.StringTypes: ops = [ops]
base_dir = os.path.join(root_path, base[0])
compare_dir = os.path.join(root_path, compare[0])
if base[1] is None:
# base defaults to earliest capture
base = (base[0], max(os.listdir(base_dir)))
if compare[1] is None:
# compare defaults to latest capture
compare = (compare[0], min(os.listdir(compare_dir)))
out_path = os.path.join(out_path, base[0], base[1], compare[0], compare[1])
drivers.windowing.PreparePath(out_path)
# TODO(jhaas): right now we're just dumping output to a log file
# (and the console), which works as far as it goes but isn't nearly
# robust enough. Change this after deciding exactly what we want to
# change it to.
out_file = open(os.path.join(out_path, "log.txt"), "w")
description_string = ("Comparing %s %s to %s %s" %
(base[0], base[1], compare[0], compare[1]))
out_file.write(description_string)
print description_string
base_dir = os.path.join(base_dir, base[1])
compare_dir = os.path.join(compare_dir, compare[1])
for filename in os.listdir(base_dir):
out_file.write("%s: " % filename)
if not os.path.isfile(os.path.join(compare_dir, filename)):
out_file.write("Does not exist in target directory\n")
print "File %s does not exist in target directory" % filename
continue
base_filename = os.path.join(base_dir, filename)
compare_filename = os.path.join(compare_dir, filename)
for op in ops:
if type(op) in types.StringTypes: op = (op, None)
module = operators.GetOperator(op[0])
ret = module.Compare(base_filename, compare_filename)
if ret is None:
print "%s: OK" % (filename,)
out_file.write("OK\n")
else:
print "%s: %s" % (filename, ret[0])
out_file.write("%s\n" % (ret[0]))
ret[1].save(os.path.join(out_path, filename))
out_file.close()
def main():
"""Main executable. Parse the command line and invoke the command."""
cmdline = command_line.CommandLine()
# The below two commands are currently unstable so have been disabled
# commands.compare2.CreateCommand(cmdline)
# commands.maskmaker.CreateCommand(cmdline)
commands.measure.CreateCommand(cmdline)
commands.scrape.CreateCommand(cmdline)
cmdline.ParseCommandLine()
return 0
if __name__ == "__main__":
sys.exit(main())
| bsd-3-clause | 7,044,068,989,490,660,000 | 35.954545 | 80 | 0.699569 | false |
SummerLW/Perf-Insight-Report | third_party/gsutil/third_party/apitools/apitools/base/py/batch_test.py | 11 | 19527 | """Tests for google3.cloud.bigscience.apitools.base.py.batch."""
import textwrap
import mock
from six.moves import http_client
from six.moves.urllib import parse
import unittest2
from apitools.base.py import batch
from apitools.base.py import exceptions
from apitools.base.py import http_wrapper
class FakeCredentials(object):
def __init__(self):
self.num_refreshes = 0
def refresh(self, _):
self.num_refreshes += 1
class FakeHttp(object):
class FakeRequest(object):
def __init__(self, credentials=None):
if credentials is not None:
self.credentials = credentials
def __init__(self, credentials=None):
self.request = FakeHttp.FakeRequest(credentials=credentials)
class FakeService(object):
"""A service for testing."""
def GetMethodConfig(self, _):
return {}
def GetUploadConfig(self, _):
return {}
# pylint: disable=unused-argument
def PrepareHttpRequest(
self, method_config, request, global_params, upload_config):
return global_params['desired_request']
# pylint: enable=unused-argument
def ProcessHttpResponse(self, _, http_response):
return http_response
class BatchTest(unittest2.TestCase):
def assertUrlEqual(self, expected_url, provided_url):
def parse_components(url):
parsed = parse.urlsplit(url)
query = parse.parse_qs(parsed.query)
return parsed._replace(query=''), query
expected_parse, expected_query = parse_components(expected_url)
provided_parse, provided_query = parse_components(provided_url)
self.assertEqual(expected_parse, provided_parse)
self.assertEqual(expected_query, provided_query)
def __ConfigureMock(self, mock_request, expected_request, response):
if isinstance(response, list):
response = list(response)
def CheckRequest(_, request, **unused_kwds):
self.assertUrlEqual(expected_request.url, request.url)
self.assertEqual(expected_request.http_method, request.http_method)
if isinstance(response, list):
return response.pop(0)
else:
return response
mock_request.side_effect = CheckRequest
def testRequestServiceUnavailable(self):
mock_service = FakeService()
desired_url = 'https://www.example.com'
batch_api_request = batch.BatchApiRequest(batch_url=desired_url,
retryable_codes=[])
# The request to be added. The actual request sent will be somewhat
# larger, as this is added to a batch.
desired_request = http_wrapper.Request(desired_url, 'POST', {
'content-type': 'multipart/mixed; boundary="None"',
'content-length': 80,
}, 'x' * 80)
with mock.patch.object(http_wrapper, 'MakeRequest',
autospec=True) as mock_request:
self.__ConfigureMock(
mock_request,
http_wrapper.Request(desired_url, 'POST', {
'content-type': 'multipart/mixed; boundary="None"',
'content-length': 419,
}, 'x' * 419),
http_wrapper.Response({
'status': '200',
'content-type': 'multipart/mixed; boundary="boundary"',
}, textwrap.dedent("""\
--boundary
content-type: text/plain
content-id: <id+0>
HTTP/1.1 503 SERVICE UNAVAILABLE
nope
--boundary--"""), None))
batch_api_request.Add(
mock_service, 'unused', None,
global_params={'desired_request': desired_request})
api_request_responses = batch_api_request.Execute(
FakeHttp(), sleep_between_polls=0)
self.assertEqual(1, len(api_request_responses))
# Make sure we didn't retry non-retryable code 503.
self.assertEqual(1, mock_request.call_count)
self.assertTrue(api_request_responses[0].is_error)
self.assertIsNone(api_request_responses[0].response)
self.assertIsInstance(api_request_responses[0].exception,
exceptions.HttpError)
def testSingleRequestInBatch(self):
mock_service = FakeService()
desired_url = 'https://www.example.com'
batch_api_request = batch.BatchApiRequest(batch_url=desired_url)
# The request to be added. The actual request sent will be somewhat
# larger, as this is added to a batch.
desired_request = http_wrapper.Request(desired_url, 'POST', {
'content-type': 'multipart/mixed; boundary="None"',
'content-length': 80,
}, 'x' * 80)
with mock.patch.object(http_wrapper, 'MakeRequest',
autospec=True) as mock_request:
self.__ConfigureMock(
mock_request,
http_wrapper.Request(desired_url, 'POST', {
'content-type': 'multipart/mixed; boundary="None"',
'content-length': 419,
}, 'x' * 419),
http_wrapper.Response({
'status': '200',
'content-type': 'multipart/mixed; boundary="boundary"',
}, textwrap.dedent("""\
--boundary
content-type: text/plain
content-id: <id+0>
HTTP/1.1 200 OK
content
--boundary--"""), None))
batch_api_request.Add(mock_service, 'unused', None, {
'desired_request': desired_request,
})
api_request_responses = batch_api_request.Execute(FakeHttp())
self.assertEqual(1, len(api_request_responses))
self.assertEqual(1, mock_request.call_count)
self.assertFalse(api_request_responses[0].is_error)
response = api_request_responses[0].response
self.assertEqual({'status': '200'}, response.info)
self.assertEqual('content', response.content)
self.assertEqual(desired_url, response.request_url)
def testRefreshOnAuthFailure(self):
mock_service = FakeService()
desired_url = 'https://www.example.com'
batch_api_request = batch.BatchApiRequest(batch_url=desired_url)
# The request to be added. The actual request sent will be somewhat
# larger, as this is added to a batch.
desired_request = http_wrapper.Request(desired_url, 'POST', {
'content-type': 'multipart/mixed; boundary="None"',
'content-length': 80,
}, 'x' * 80)
with mock.patch.object(http_wrapper, 'MakeRequest',
autospec=True) as mock_request:
self.__ConfigureMock(
mock_request,
http_wrapper.Request(desired_url, 'POST', {
'content-type': 'multipart/mixed; boundary="None"',
'content-length': 419,
}, 'x' * 419), [
http_wrapper.Response({
'status': '200',
'content-type': 'multipart/mixed; boundary="boundary"',
}, textwrap.dedent("""\
--boundary
content-type: text/plain
content-id: <id+0>
HTTP/1.1 401 UNAUTHORIZED
Invalid grant
--boundary--"""), None),
http_wrapper.Response({
'status': '200',
'content-type': 'multipart/mixed; boundary="boundary"',
}, textwrap.dedent("""\
--boundary
content-type: text/plain
content-id: <id+0>
HTTP/1.1 200 OK
content
--boundary--"""), None)
])
batch_api_request.Add(mock_service, 'unused', None, {
'desired_request': desired_request,
})
credentials = FakeCredentials()
api_request_responses = batch_api_request.Execute(
FakeHttp(credentials=credentials), sleep_between_polls=0)
self.assertEqual(1, len(api_request_responses))
self.assertEqual(2, mock_request.call_count)
self.assertEqual(1, credentials.num_refreshes)
self.assertFalse(api_request_responses[0].is_error)
response = api_request_responses[0].response
self.assertEqual({'status': '200'}, response.info)
self.assertEqual('content', response.content)
self.assertEqual(desired_url, response.request_url)
def testNoAttempts(self):
desired_url = 'https://www.example.com'
batch_api_request = batch.BatchApiRequest(batch_url=desired_url)
batch_api_request.Add(FakeService(), 'unused', None, {
'desired_request': http_wrapper.Request(desired_url, 'POST', {
'content-type': 'multipart/mixed; boundary="None"',
'content-length': 80,
}, 'x' * 80),
})
api_request_responses = batch_api_request.Execute(None, max_retries=0)
self.assertEqual(1, len(api_request_responses))
self.assertIsNone(api_request_responses[0].response)
self.assertIsNone(api_request_responses[0].exception)
def _DoTestConvertIdToHeader(self, test_id, expected_result):
batch_request = batch.BatchHttpRequest('https://www.example.com')
self.assertEqual(
expected_result % batch_request._BatchHttpRequest__base_id,
batch_request._ConvertIdToHeader(test_id))
def testConvertIdSimple(self):
self._DoTestConvertIdToHeader('blah', '<%s+blah>')
def testConvertIdThatNeedsEscaping(self):
self._DoTestConvertIdToHeader('~tilde1', '<%s+%%7Etilde1>')
def _DoTestConvertHeaderToId(self, header, expected_id):
batch_request = batch.BatchHttpRequest('https://www.example.com')
self.assertEqual(expected_id,
batch_request._ConvertHeaderToId(header))
def testConvertHeaderToIdSimple(self):
self._DoTestConvertHeaderToId('<hello+blah>', 'blah')
def testConvertHeaderToIdWithLotsOfPlus(self):
self._DoTestConvertHeaderToId('<a+++++plus>', 'plus')
def _DoTestConvertInvalidHeaderToId(self, invalid_header):
batch_request = batch.BatchHttpRequest('https://www.example.com')
self.assertRaises(exceptions.BatchError,
batch_request._ConvertHeaderToId, invalid_header)
def testHeaderWithoutAngleBrackets(self):
self._DoTestConvertInvalidHeaderToId('1+1')
def testHeaderWithoutPlus(self):
self._DoTestConvertInvalidHeaderToId('<HEADER>')
def testSerializeRequest(self):
request = http_wrapper.Request(body='Hello World', headers={
'content-type': 'protocol/version',
})
expected_serialized_request = '\n'.join([
'GET HTTP/1.1',
'Content-Type: protocol/version',
'MIME-Version: 1.0',
'content-length: 11',
'Host: ',
'',
'Hello World',
])
batch_request = batch.BatchHttpRequest('https://www.example.com')
self.assertEqual(expected_serialized_request,
batch_request._SerializeRequest(request))
def testSerializeRequestPreservesHeaders(self):
# Now confirm that if an additional, arbitrary header is added
# that it is successfully serialized to the request. Merely
# check that it is included, because the order of the headers
# in the request is arbitrary.
request = http_wrapper.Request(body='Hello World', headers={
'content-type': 'protocol/version',
'key': 'value',
})
batch_request = batch.BatchHttpRequest('https://www.example.com')
self.assertTrue(
'key: value\n' in batch_request._SerializeRequest(request))
def testSerializeRequestNoBody(self):
request = http_wrapper.Request(body=None, headers={
'content-type': 'protocol/version',
})
expected_serialized_request = '\n'.join([
'GET HTTP/1.1',
'Content-Type: protocol/version',
'MIME-Version: 1.0',
'Host: ',
'',
'',
])
batch_request = batch.BatchHttpRequest('https://www.example.com')
self.assertEqual(expected_serialized_request,
batch_request._SerializeRequest(request))
def testDeserializeRequest(self):
serialized_payload = '\n'.join([
'GET HTTP/1.1',
'Content-Type: protocol/version',
'MIME-Version: 1.0',
'content-length: 11',
'key: value',
'Host: ',
'',
'Hello World',
])
example_url = 'https://www.example.com'
expected_response = http_wrapper.Response({
'content-length': str(len('Hello World')),
'Content-Type': 'protocol/version',
'key': 'value',
'MIME-Version': '1.0',
'status': '',
'Host': ''
}, 'Hello World', example_url)
batch_request = batch.BatchHttpRequest(example_url)
self.assertEqual(
expected_response,
batch_request._DeserializeResponse(serialized_payload))
def testNewId(self):
batch_request = batch.BatchHttpRequest('https://www.example.com')
for i in range(100):
self.assertEqual(str(i), batch_request._NewId())
def testAdd(self):
batch_request = batch.BatchHttpRequest('https://www.example.com')
for x in range(100):
batch_request.Add(http_wrapper.Request(body=str(x)))
for key in batch_request._BatchHttpRequest__request_response_handlers:
value = batch_request._BatchHttpRequest__request_response_handlers[
key]
self.assertEqual(key, value.request.body)
self.assertFalse(value.request.url)
self.assertEqual('GET', value.request.http_method)
self.assertIsNone(value.response)
self.assertIsNone(value.handler)
def testInternalExecuteWithFailedRequest(self):
with mock.patch.object(http_wrapper, 'MakeRequest',
autospec=True) as mock_request:
self.__ConfigureMock(
mock_request,
http_wrapper.Request('https://www.example.com', 'POST', {
'content-type': 'multipart/mixed; boundary="None"',
'content-length': 80,
}, 'x' * 80),
http_wrapper.Response({'status': '300'}, None, None))
batch_request = batch.BatchHttpRequest('https://www.example.com')
self.assertRaises(
exceptions.HttpError, batch_request._Execute, None)
def testInternalExecuteWithNonMultipartResponse(self):
with mock.patch.object(http_wrapper, 'MakeRequest',
autospec=True) as mock_request:
self.__ConfigureMock(
mock_request,
http_wrapper.Request('https://www.example.com', 'POST', {
'content-type': 'multipart/mixed; boundary="None"',
'content-length': 80,
}, 'x' * 80),
http_wrapper.Response({
'status': '200',
'content-type': 'blah/blah'
}, '', None))
batch_request = batch.BatchHttpRequest('https://www.example.com')
self.assertRaises(
exceptions.BatchError, batch_request._Execute, None)
def testInternalExecute(self):
with mock.patch.object(http_wrapper, 'MakeRequest',
autospec=True) as mock_request:
self.__ConfigureMock(
mock_request,
http_wrapper.Request('https://www.example.com', 'POST', {
'content-type': 'multipart/mixed; boundary="None"',
'content-length': 583,
}, 'x' * 583),
http_wrapper.Response({
'status': '200',
'content-type': 'multipart/mixed; boundary="boundary"',
}, textwrap.dedent("""\
--boundary
content-type: text/plain
content-id: <id+2>
HTTP/1.1 200 OK
Second response
--boundary
content-type: text/plain
content-id: <id+1>
HTTP/1.1 401 UNAUTHORIZED
First response
--boundary--"""), None))
test_requests = {
'1': batch.RequestResponseAndHandler(
http_wrapper.Request(body='first'), None, None),
'2': batch.RequestResponseAndHandler(
http_wrapper.Request(body='second'), None, None),
}
batch_request = batch.BatchHttpRequest('https://www.example.com')
batch_request._BatchHttpRequest__request_response_handlers = (
test_requests)
batch_request._Execute(FakeHttp())
test_responses = (
batch_request._BatchHttpRequest__request_response_handlers)
self.assertEqual(http_client.UNAUTHORIZED,
test_responses['1'].response.status_code)
self.assertEqual(http_client.OK,
test_responses['2'].response.status_code)
self.assertIn(
'First response', test_responses['1'].response.content)
self.assertIn(
'Second response', test_responses['2'].response.content)
def testPublicExecute(self):
def LocalCallback(response, exception):
self.assertEqual({'status': '418'}, response.info)
self.assertEqual('Teapot', response.content)
self.assertIsNone(response.request_url)
self.assertIsInstance(exception, exceptions.HttpError)
global_callback = mock.Mock()
batch_request = batch.BatchHttpRequest(
'https://www.example.com', global_callback)
with mock.patch.object(batch.BatchHttpRequest, '_Execute',
autospec=True) as mock_execute:
mock_execute.return_value = None
test_requests = {
'0': batch.RequestResponseAndHandler(
None,
http_wrapper.Response({'status': '200'}, 'Hello!', None),
None),
'1': batch.RequestResponseAndHandler(
None,
http_wrapper.Response({'status': '418'}, 'Teapot', None),
LocalCallback),
}
batch_request._BatchHttpRequest__request_response_handlers = (
test_requests)
batch_request.Execute(None)
# Global callback was called once per handler.
self.assertEqual(len(test_requests), global_callback.call_count)
| bsd-3-clause | 159,701,753,534,936,220 | 37.213307 | 79 | 0.557689 | false |
mensler/ansible | hacking/cherrypick.py | 62 | 1474 | #!/usr/bin/env python3
import os
import sys
import tempfile
import sh
REPO_PATH = {'extras': '/srv/ansible/stable-2.2/lib/ansible/modules/extras',
'core': '/srv/ansible/stable-2.2/lib/ansible/modules/core'}
if __name__ == '__main__':
commit_hash = sys.argv[1]
which_modules = sys.argv[2]
git = sh.git.bake('--no-pager', _tty_out=False)
try:
# Get the change
git('checkout', 'devel')
patch = git('format-patch', '-1', '--stdout', commit_hash).stdout
finally:
git('checkout', '-')
# Transform the change for the new repo
patch = patch.replace(b'lib/ansible/modules/', b'')
new_patch = []
patch_stream = (l for l in patch.split(b'\n'))
for line in patch_stream:
if line.strip() == b'---':
new_patch.append(b'(cherry picked from %s)' % commit_hash.encode('utf-8'))
new_patch.append(line)
break
new_patch.append(line)
new_patch.extend(list(patch_stream))
# Save the patch
try:
fh, patchfilename = tempfile.mkstemp()
os.write(fh, b'\n'.join(new_patch))
os.close(fh)
# Apply the patch
try:
orig_dir = os.getcwd()
os.chdir(REPO_PATH[which_modules])
git('am', patchfilename)
finally:
os.chdir(orig_dir)
except:
print("Problem occurred. Patch saved in: {}".format(patchfilename))
else:
os.remove(patchfilename)
| gpl-3.0 | 3,919,752,161,076,808,000 | 27.346154 | 86 | 0.567843 | false |
erikmcc/kubernetes | cluster/juju/layers/kubernetes-worker/lib/charms/kubernetes/flagmanager.py | 290 | 4961 | #!/usr/bin/env python
# Copyright 2015 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from charmhelpers.core import unitdata
class FlagManager:
'''
FlagManager - A Python class for managing the flags to pass to an
application without remembering what's been set previously.
This is a blind class assuming the operator knows what they are doing.
Each instance of this class should be initialized with the intended
application to manage flags. Flags are then appended to a data-structure
and cached in unitdata for later recall.
THe underlying data-provider is backed by a SQLITE database on each unit,
tracking the dictionary, provided from the 'charmhelpers' python package.
Summary:
opts = FlagManager('docker')
opts.add('bip', '192.168.22.2')
opts.to_s()
'''
def __init__(self, daemon, opts_path=None):
self.db = unitdata.kv()
self.daemon = daemon
if not self.db.get(daemon):
self.data = {}
else:
self.data = self.db.get(daemon)
def __save(self):
self.db.set(self.daemon, self.data)
def add(self, key, value, strict=False):
'''
Adds data to the map of values for the DockerOpts file.
Supports single values, or "multiopt variables". If you
have a flag only option, like --tlsverify, set the value
to None. To preserve the exact value, pass strict
eg:
opts.add('label', 'foo')
opts.add('label', 'foo, bar, baz')
opts.add('flagonly', None)
opts.add('cluster-store', 'consul://a:4001,b:4001,c:4001/swarm',
strict=True)
'''
if strict:
self.data['{}-strict'.format(key)] = value
self.__save()
return
if value:
values = [x.strip() for x in value.split(',')]
# handle updates
if key in self.data and self.data[key] is not None:
item_data = self.data[key]
for c in values:
c = c.strip()
if c not in item_data:
item_data.append(c)
self.data[key] = item_data
else:
# handle new
self.data[key] = values
else:
# handle flagonly
self.data[key] = None
self.__save()
def remove(self, key, value):
'''
Remove a flag value from the DockerOpts manager
Assuming the data is currently {'foo': ['bar', 'baz']}
d.remove('foo', 'bar')
> {'foo': ['baz']}
:params key:
:params value:
'''
self.data[key].remove(value)
self.__save()
def destroy(self, key, strict=False):
'''
Destructively remove all values and key from the FlagManager
Assuming the data is currently {'foo': ['bar', 'baz']}
d.wipe('foo')
>{}
:params key:
:params strict:
'''
try:
if strict:
self.data.pop('{}-strict'.format(key))
else:
self.data.pop(key)
self.__save()
except KeyError:
pass
def get(self, key, default=None):
"""Return the value for ``key``, or the default if ``key`` doesn't exist.
"""
return self.data.get(key, default)
def destroy_all(self):
'''
Destructively removes all data from the FlagManager.
'''
self.data.clear()
self.__save()
def to_s(self):
'''
Render the flags to a single string, prepared for the Docker
Defaults file. Typically in /etc/default/docker
d.to_s()
> "--foo=bar --foo=baz"
'''
flags = []
for key in self.data:
if self.data[key] is None:
# handle flagonly
flags.append("{}".format(key))
elif '-strict' in key:
# handle strict values, and do it in 2 steps.
# If we rstrip -strict it strips a tailing s
proper_key = key.rstrip('strict').rstrip('-')
flags.append("{}={}".format(proper_key, self.data[key]))
else:
# handle multiopt and typical flags
for item in self.data[key]:
flags.append("{}={}".format(key, item))
return ' '.join(flags)
| apache-2.0 | 3,580,320,430,098,121,000 | 32.295302 | 81 | 0.556138 | false |
JackWoot/E2E-Messenger | Server/server/lib/werkzeug/contrib/wrappers.py | 318 | 10331 | # -*- coding: utf-8 -*-
"""
werkzeug.contrib.wrappers
~~~~~~~~~~~~~~~~~~~~~~~~~
Extra wrappers or mixins contributed by the community. These wrappers can
be mixed in into request objects to add extra functionality.
Example::
from werkzeug.wrappers import Request as RequestBase
from werkzeug.contrib.wrappers import JSONRequestMixin
class Request(RequestBase, JSONRequestMixin):
pass
Afterwards this request object provides the extra functionality of the
:class:`JSONRequestMixin`.
:copyright: (c) 2013 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
import codecs
try:
from simplejson import loads
except ImportError:
from json import loads
from werkzeug.exceptions import BadRequest
from werkzeug.utils import cached_property
from werkzeug.http import dump_options_header, parse_options_header
from werkzeug._compat import wsgi_decoding_dance
def is_known_charset(charset):
"""Checks if the given charset is known to Python."""
try:
codecs.lookup(charset)
except LookupError:
return False
return True
class JSONRequestMixin(object):
"""Add json method to a request object. This will parse the input data
through simplejson if possible.
:exc:`~werkzeug.exceptions.BadRequest` will be raised if the content-type
is not json or if the data itself cannot be parsed as json.
"""
@cached_property
def json(self):
"""Get the result of simplejson.loads if possible."""
if 'json' not in self.environ.get('CONTENT_TYPE', ''):
raise BadRequest('Not a JSON request')
try:
return loads(self.data)
except Exception:
raise BadRequest('Unable to read JSON request')
class ProtobufRequestMixin(object):
"""Add protobuf parsing method to a request object. This will parse the
input data through `protobuf`_ if possible.
:exc:`~werkzeug.exceptions.BadRequest` will be raised if the content-type
is not protobuf or if the data itself cannot be parsed property.
.. _protobuf: http://code.google.com/p/protobuf/
"""
#: by default the :class:`ProtobufRequestMixin` will raise a
#: :exc:`~werkzeug.exceptions.BadRequest` if the object is not
#: initialized. You can bypass that check by setting this
#: attribute to `False`.
protobuf_check_initialization = True
def parse_protobuf(self, proto_type):
"""Parse the data into an instance of proto_type."""
if 'protobuf' not in self.environ.get('CONTENT_TYPE', ''):
raise BadRequest('Not a Protobuf request')
obj = proto_type()
try:
obj.ParseFromString(self.data)
except Exception:
raise BadRequest("Unable to parse Protobuf request")
# Fail if not all required fields are set
if self.protobuf_check_initialization and not obj.IsInitialized():
raise BadRequest("Partial Protobuf request")
return obj
class RoutingArgsRequestMixin(object):
"""This request mixin adds support for the wsgiorg routing args
`specification`_.
.. _specification: http://www.wsgi.org/wsgi/Specifications/routing_args
"""
def _get_routing_args(self):
return self.environ.get('wsgiorg.routing_args', (()))[0]
def _set_routing_args(self, value):
if self.shallow:
raise RuntimeError('A shallow request tried to modify the WSGI '
'environment. If you really want to do that, '
'set `shallow` to False.')
self.environ['wsgiorg.routing_args'] = (value, self.routing_vars)
routing_args = property(_get_routing_args, _set_routing_args, doc='''
The positional URL arguments as `tuple`.''')
del _get_routing_args, _set_routing_args
def _get_routing_vars(self):
rv = self.environ.get('wsgiorg.routing_args')
if rv is not None:
return rv[1]
rv = {}
if not self.shallow:
self.routing_vars = rv
return rv
def _set_routing_vars(self, value):
if self.shallow:
raise RuntimeError('A shallow request tried to modify the WSGI '
'environment. If you really want to do that, '
'set `shallow` to False.')
self.environ['wsgiorg.routing_args'] = (self.routing_args, value)
routing_vars = property(_get_routing_vars, _set_routing_vars, doc='''
The keyword URL arguments as `dict`.''')
del _get_routing_vars, _set_routing_vars
class ReverseSlashBehaviorRequestMixin(object):
"""This mixin reverses the trailing slash behavior of :attr:`script_root`
and :attr:`path`. This makes it possible to use :func:`~urlparse.urljoin`
directly on the paths.
Because it changes the behavior or :class:`Request` this class has to be
mixed in *before* the actual request class::
class MyRequest(ReverseSlashBehaviorRequestMixin, Request):
pass
This example shows the differences (for an application mounted on
`/application` and the request going to `/application/foo/bar`):
+---------------+-------------------+---------------------+
| | normal behavior | reverse behavior |
+===============+===================+=====================+
| `script_root` | ``/application`` | ``/application/`` |
+---------------+-------------------+---------------------+
| `path` | ``/foo/bar`` | ``foo/bar`` |
+---------------+-------------------+---------------------+
"""
@cached_property
def path(self):
"""Requested path as unicode. This works a bit like the regular path
info in the WSGI environment but will not include a leading slash.
"""
path = wsgi_decoding_dance(self.environ.get('PATH_INFO') or '',
self.charset, self.encoding_errors)
return path.lstrip('/')
@cached_property
def script_root(self):
"""The root path of the script includling a trailing slash."""
path = wsgi_decoding_dance(self.environ.get('SCRIPT_NAME') or '',
self.charset, self.encoding_errors)
return path.rstrip('/') + '/'
class DynamicCharsetRequestMixin(object):
""""If this mixin is mixed into a request class it will provide
a dynamic `charset` attribute. This means that if the charset is
transmitted in the content type headers it's used from there.
Because it changes the behavior or :class:`Request` this class has
to be mixed in *before* the actual request class::
class MyRequest(DynamicCharsetRequestMixin, Request):
pass
By default the request object assumes that the URL charset is the
same as the data charset. If the charset varies on each request
based on the transmitted data it's not a good idea to let the URLs
change based on that. Most browsers assume either utf-8 or latin1
for the URLs if they have troubles figuring out. It's strongly
recommended to set the URL charset to utf-8::
class MyRequest(DynamicCharsetRequestMixin, Request):
url_charset = 'utf-8'
.. versionadded:: 0.6
"""
#: the default charset that is assumed if the content type header
#: is missing or does not contain a charset parameter. The default
#: is latin1 which is what HTTP specifies as default charset.
#: You may however want to set this to utf-8 to better support
#: browsers that do not transmit a charset for incoming data.
default_charset = 'latin1'
def unknown_charset(self, charset):
"""Called if a charset was provided but is not supported by
the Python codecs module. By default latin1 is assumed then
to not lose any information, you may override this method to
change the behavior.
:param charset: the charset that was not found.
:return: the replacement charset.
"""
return 'latin1'
@cached_property
def charset(self):
"""The charset from the content type."""
header = self.environ.get('CONTENT_TYPE')
if header:
ct, options = parse_options_header(header)
charset = options.get('charset')
if charset:
if is_known_charset(charset):
return charset
return self.unknown_charset(charset)
return self.default_charset
class DynamicCharsetResponseMixin(object):
"""If this mixin is mixed into a response class it will provide
a dynamic `charset` attribute. This means that if the charset is
looked up and stored in the `Content-Type` header and updates
itself automatically. This also means a small performance hit but
can be useful if you're working with different charsets on
responses.
Because the charset attribute is no a property at class-level, the
default value is stored in `default_charset`.
Because it changes the behavior or :class:`Response` this class has
to be mixed in *before* the actual response class::
class MyResponse(DynamicCharsetResponseMixin, Response):
pass
.. versionadded:: 0.6
"""
#: the default charset.
default_charset = 'utf-8'
def _get_charset(self):
header = self.headers.get('content-type')
if header:
charset = parse_options_header(header)[1].get('charset')
if charset:
return charset
return self.default_charset
def _set_charset(self, charset):
header = self.headers.get('content-type')
ct, options = parse_options_header(header)
if not ct:
raise TypeError('Cannot set charset if Content-Type '
'header is missing.')
options['charset'] = charset
self.headers['Content-Type'] = dump_options_header(ct, options)
charset = property(_get_charset, _set_charset, doc="""
The charset for the response. It's stored inside the
Content-Type header as a parameter.""")
del _get_charset, _set_charset
| gpl-2.0 | -1,249,642,636,525,795,800 | 36.161871 | 78 | 0.626077 | false |
karthikvadla16/spark-tk | regression-tests/sparktkregtests/testcases/frames/column_method_drop_test.py | 14 | 3803 | # vim: set encoding=utf-8
# Copyright (c) 2016 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Tests methods that access or alter columns"""
import unittest
from sparktkregtests.lib import sparktk_test
dummy_int_val = -77 # placeholder data value for added column
dummy_col_count = 1000 # length of dummy list for column add
# This method is to test different sources of functions
# i.e. global
def global_dummy_val_list(row):
return [dummy_int_val for _ in range(0, dummy_col_count)]
class ColumnMethodTest(sparktk_test.SparkTKTestCase):
# Test class bound methods
@staticmethod
def static_dummy_val_list(row):
return [dummy_int_val for _ in range(0, dummy_col_count)]
def setUp(self):
"""Build test_frame"""
super(ColumnMethodTest, self).setUp()
dataset = self.get_file("int_str_float.csv")
schema = [("int", int), ("str", str), ("float", float)]
self.frame = self.context.frame.import_csv(dataset, schema=schema)
def test_column_names(self):
"""all original columns"""
header = self.frame.column_names
self.assertEqual(header, ['int', 'str', 'float'])
def test_column_names_drop(self):
"""Exercise subsets of 1 and 2 columns"""
self.frame.drop_columns('str')
header = self.frame.column_names
self.assertEqual(header, ['int', 'float'])
def test_column_names_drop_multiple(self):
"""Drop multiple columns"""
self.frame.drop_columns(['str', 'float'])
header = self.frame.column_names
self.assertEqual(header, ['int'])
def test_drop_non_existent_column(self):
"""test dropping non-existent column"""
with self.assertRaisesRegexp(
ValueError, 'Invalid column name non-existent provided'):
self.frame.drop_columns("non-existent")
def test_drop_columns(self):
"""Test drop columns scenarios"""
self.frame.add_columns(
lambda row: dummy_int_val, ('product', int))
col_count = len(self.frame.take(1)[0])
self.frame.drop_columns(['int'])
self.assertNotIn('int', self.frame.column_names)
self.assertEqual(col_count-1, len(self.frame.take(1)[0]))
def test_drop_columns_multiple(self):
"""Test drop columns multiple, repeated"""
self.frame.add_columns(
lambda row: dummy_int_val, ('product', int))
col_count = len(self.frame.take(1)[0])
self.frame.drop_columns(['str', 'product', 'str'])
self.assertNotIn('str', self.frame.column_names)
self.assertNotIn('product', self.frame.column_names)
self.assertEqual(col_count-2, len(self.frame.take(1)[0]))
def test_drop_zero_columns(self):
"""Test dropping no columns"""
self.frame.drop_columns([])
header = self.frame.column_names
self.assertEqual(header, ['int', 'str', 'float'])
def test_drop_nonexistent_column(self):
"""Test drop non-existent column"""
with self.assertRaisesRegexp(ValueError, 'Invalid column name'):
self.frame.drop_columns(['no-such-name'])
if __name__ == "__main__":
unittest.main()
| apache-2.0 | 7,348,922,924,445,089,000 | 34.47619 | 75 | 0.645906 | false |
mikeckennedy/cookiecutter-pyramid-talk-python-starter | {{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/services/cms_service.py | 1 | 1465 | from {{cookiecutter.project_slug}}.data.cms_page import CmsPage
from {{cookiecutter.project_slug}}.data.dbsession import DbSessionFactory
class CmsService:
@classmethod
def get_page_by_url(cls, url):
if not url:
return None
url = url.lower().strip()
session = DbSessionFactory.create_session()
page = session.query(CmsPage) \
.filter(CmsPage.url == url) \
.first()
session.close()
return page
@classmethod
def add_page(cls, url, html, is_redirect=False, redirect_url=None):
if not url or not url.strip():
raise ValueError('url cannot be empty')
url = url.lower().strip()
session = DbSessionFactory.create_session()
page = CmsPage()
page.url = url
page.html = html
page.is_redirect = is_redirect
page.redirect_url = redirect_url
session.add(page)
session.commit()
return page
@classmethod
def init_test_data(cls):
url = '/landing_pages/a_dynamic_cms_page'
if cls.get_page_by_url(url) is not None:
return
cls.add_page(
url,
'<h1>This is a CMS page</h1>\n' +
'\n' +
'<p>\n' +
'You can create them in the DB and any URL can be mapped.<br>\n' +
'See CmsController / CmsService for more info.\n' +
'</p>\n'
)
| mit | -4,483,074,600,582,903,300 | 25.160714 | 78 | 0.545392 | false |
jamesblunt/sympy | sympy/physics/sho.py | 71 | 2482 | from __future__ import print_function, division
from sympy.core import S, pi, Rational
from sympy.functions import assoc_laguerre, sqrt, exp, factorial, factorial2
def R_nl(n, l, nu, r):
"""
Returns the radial wavefunction R_{nl} for a 3d isotropic harmonic
oscillator.
``n``
the "nodal" quantum number. Corresponds to the number of nodes in
the wavefunction. n >= 0
``l``
the quantum number for orbital angular momentum
``nu``
mass-scaled frequency: nu = m*omega/(2*hbar) where `m' is the mass
and `omega` the frequency of the oscillator.
(in atomic units nu == omega/2)
``r``
Radial coordinate
Examples
========
>>> from sympy.physics.sho import R_nl
>>> from sympy import var
>>> var("r nu l")
(r, nu, l)
>>> R_nl(0, 0, 1, r)
2*2**(3/4)*exp(-r**2)/pi**(1/4)
>>> R_nl(1, 0, 1, r)
4*2**(1/4)*sqrt(3)*(-2*r**2 + 3/2)*exp(-r**2)/(3*pi**(1/4))
l, nu and r may be symbolic:
>>> R_nl(0, 0, nu, r)
2*2**(3/4)*sqrt(nu**(3/2))*exp(-nu*r**2)/pi**(1/4)
>>> R_nl(0, l, 1, r)
r**l*sqrt(2**(l + 3/2)*2**(l + 2)/factorial2(2*l + 1))*exp(-r**2)/pi**(1/4)
The normalization of the radial wavefunction is:
>>> from sympy import Integral, oo
>>> Integral(R_nl(0, 0, 1, r)**2 * r**2, (r, 0, oo)).n()
1.00000000000000
>>> Integral(R_nl(1, 0, 1, r)**2 * r**2, (r, 0, oo)).n()
1.00000000000000
>>> Integral(R_nl(1, 1, 1, r)**2 * r**2, (r, 0, oo)).n()
1.00000000000000
"""
n, l, nu, r = map(S, [n, l, nu, r])
# formula uses n >= 1 (instead of nodal n >= 0)
n = n + 1
C = sqrt(
((2*nu)**(l + Rational(3, 2))*2**(n + l + 1)*factorial(n - 1))/
(sqrt(pi)*(factorial2(2*n + 2*l - 1)))
)
return C*r**(l)*exp(-nu*r**2)*assoc_laguerre(n - 1, l + S(1)/2, 2*nu*r**2)
def E_nl(n, l, hw):
"""
Returns the Energy of an isotropic harmonic oscillator
``n``
the "nodal" quantum number
``l``
the orbital angular momentum
``hw``
the harmonic oscillator parameter.
The unit of the returned value matches the unit of hw, since the energy is
calculated as:
E_nl = (2*n + l + 3/2)*hw
Examples
========
>>> from sympy.physics.sho import E_nl
>>> from sympy import symbols
>>> x, y, z = symbols('x, y, z')
>>> E_nl(x, y, z)
z*(2*x + y + 3/2)
"""
return (2*n + l + Rational(3, 2))*hw
| bsd-3-clause | -6,422,820,457,292,936,000 | 26.577778 | 79 | 0.520145 | false |
iceihehe/pipeg | python3/genome2.py | 4 | 5090 | #!/usr/bin/env python3
# Copyright © 2012-13 Qtrac Ltd. All rights reserved.
# This program or module is free software: you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version. It is provided for
# educational purposes and is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
import collections
import json
import os
import re
import subprocess
import sys
UTF8 = "utf-8"
TRANSFORM, SUMMARIZE = ("TRANSFORM", "SUMMARIZE")
Code = collections.namedtuple("Code", "name code kind")
def main():
genome = 3 * GENOME
for i, code in enumerate(CODE):
context = dict(genome=genome, target="G[AC]{2}TT", replace="TCGA")
execute(code, context)
if sys.version_info[:2] > (3, 1):
def execute(code, context):
module, offset = create_module(code.code, context)
with subprocess.Popen([sys.executable, "-"], stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE) as process:
communicate(process, code, module, offset)
else:
def execute(code, context):
module, offset = create_module(code.code, context)
process = subprocess.Popen([sys.executable, "-"],
stdin=subprocess.PIPE, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
communicate(process, code, module, offset)
def create_module(code, context):
lines = ["import json", "result = error = None"]
for key, value in context.items():
lines.append("{} = {!r}".format(key, value))
offset = len(lines) + 1
outputLine = "\nprint(json.dumps((result, error)))"
return "\n".join(lines) + "\n" + code + outputLine, offset
def communicate(process, code, module, offset):
stdout, stderr = process.communicate(module.encode(UTF8))
if stderr:
stderr = stderr.decode(UTF8).lstrip().replace(", in <module>", ":")
stderr = re.sub(", line (\d+)",
lambda match: str(int(match.group(1)) - offset), stderr)
print(re.sub(r'File."[^"]+?"', "'{}' has an error on line "
.format(code.name), stderr))
return
if stdout:
result, error = json.loads(stdout.decode(UTF8))
handle_result(code, result, error)
return
print("'{}' produced no result\n".format(code.name))
def handle_result(code, result, error):
if error is not None:
print("'{}' error: {}".format(code.name, error))
elif result is None:
print("'{}' produced no result".format(code.name))
elif code.kind == TRANSFORM:
genome = result
try:
print("'{}' produced a genome of length {}".format(code.name,
len(genome)))
except TypeError as err:
print("'{}' error: expected a sequence result: {}".format(
code.name, err))
elif code.kind == SUMMARIZE:
print("'{}' produced a result of {}".format(code.name, result))
print()
CODE = (
Code("Count",
"""
import re
matches = re.findall(target, genome)
if matches:
result = len(matches)
else:
error = "'{}' not found".format(target)
""", SUMMARIZE)
,
Code("Replace",
"""
import re
result, count = re.subn(target, replace, genome)
if not count:
error = "no '{}' replacements made".format(target)
""", TRANSFORM)
,
Code("Exception Test",
"""
result = 0
for i in range(len(genome)):
if genome[i] = "A":
result += 1
""", SUMMARIZE)
,
Code("Error Test",
"""
import re
matches = re.findall(target * 5, genome)
if matches:
result = len(matches)
else:
error = "'{}' not found".format(target)
""", TRANSFORM)
,
Code("No Result Test",
"""
# No result
""", TRANSFORM)
,
Code("Wrong Kind Test",
"""
result = len(genome)
""", TRANSFORM)
,
Code("Termination Test",
"""
import sys
result = "terminating"
sys.exit()
""", SUMMARIZE)
,
Code("Length",
"""
result = len(genome)
""", SUMMARIZE)
)
GENOME = """TGTTAGTCGCTCCTCGGTCTAAGACATCAAAGTCGGTCTGCGCGGCTGCTCCCTTAGCGCTG
CATAAGAGCGGGGCAGAGAGAGATAGGCGTTTTGACCGTGGCGAGCAAGGCGCGTCATAGTGTCGCCGTGACTG
ATCCTACTGGGTTCTTGCTACTGCCCGGGTCGCAATCCAAAATCTCCACGCGCTGCCACCCCGAAGAAGATATA
TGTCACTGAATTGTATTGGTAACATAGTCGAATTGGGTTCAGGTAAGTTAGTCGTTTAGCCGCTGCGACAGTGG
TGGAAGGGCGAATAGTGTAAAATTTCGCCTGTTAGTGAACATTATCAGGCTGCCATCGTTGATCGCCCCTCTTA
AACTCAGTCTTAAATGAGTTCCCGCCTAAGGTCATTCGTGCCTTGATGATTGATAGCTCGATTGGTCCCTTATG
AAACCGGACCAGAAATGTACCCGCTGAACCGGTGTCATAAGTGTCGCCGTCCCTACGATCGACACTTCCTGAGC
ACGAACGATTTGCGACGCTGTAATGCCACGAGGACTGCATTGAAGATTTTTTGTCCTAGGTGTATGTGCTTCTC
AGGAAGATGCACTACGCACTCCCCTTATCACGGGTGTGACCATCAGGTAGCGTAGGAAGATTAAGACCGCGTAA
CTATCCCTTTCCGTCGCACTCCGACGTCTCAGCACATGTGCGGGGGCCCCTAATTGAGAAACAGTCCATGGTTG
TCCGTAAGTTTCGGAAATCAACTTCACTGCTAGATGGTTGGACGCCAAGGCTCAATAGGTTGGACTCTAAGAAG
""".replace("\n", "")
if __name__ == "__main__":
main()
| mit | 1,511,552,572,860,280,300 | 29.656627 | 75 | 0.677343 | false |
sjdines/mezzanine-fluent-pages | mezzanine_fluent_pages/mezzanine_layout_page/admin.py | 1 | 6680 | from django.conf.urls import url
from django.contrib import admin
from fluent_contents.admin import PlaceholderEditorAdmin
from fluent_contents.analyzer import get_template_placeholder_data
from fluent_utils.ajax import JsonResponse
from mezzanine.pages.admin import PageAdmin
from . import models, widgets
class FluentContentsLayoutPageAdmin(PlaceholderEditorAdmin, PageAdmin):
"""
Admin configuration for `FluentContentsLayoutPage`.
"""
# The `change_form_template` is overwritten to include the content type id in the JS which is
# used in the fluent ajax calls.
change_form_template = 'admin/fluent_mezzanine/change_form.html'
class Media:
# This is a custom JS adaption of the `fluent_layouts.js` found in
# `fluent_pages.fluentpage`. The only modification is to change the `app_root` variable
# declaration to a new endpoint. The rest of the code has been used here so `fluent_pages`
# is not a requirement to use this package.
js = ('fluent_mezzanine/fluent_layouts.js',)
def get_placeholder_data(self, request, obj=None):
"""
Provides a list of `fluent_contents.models.PlaceholderData`
classes, that describe the contents of the template.
:param request: Django request object.
:param obj: Object to get place holder data from.
:return: list of `~fluent_contents.models.PlaceholderData`
"""
template = self.get_page_template(obj)
if not template:
return [] # No template means no data!
else:
return get_template_placeholder_data(template)
def get_page_template(self, page):
"""
Return the template that is associated with the page.
If no page is provided then the first available template will
be used as defined in `PageLayout`. If not `PageLayout` exists
then `None` will be returned.
:param page: Page object to obtain the template from.
:return: Template object or None.
"""
if page is None:
# Add page. start with default template.
try:
return models.PageLayout.objects.all()[0].get_template()
except IndexError:
return None
else:
# Change page, honor template of object.
return page.layout.get_template()
# ---- Layout selector code ----
def formfield_for_foreignkey(self, db_field, request=None, **kwargs):
"""
Overwrite the widget for the `layout` foreign key.
:param db_field: Field on the object.
:param request: Django request object.
:param kwargs: Extra keyword arguments.
:return: Formfield.
"""
if db_field.name == 'layout':
kwargs['widget'] = widgets.LayoutSelector
return super(FluentContentsLayoutPageAdmin, self).formfield_for_foreignkey(
db_field,
request,
**kwargs
)
def get_urls(self):
"""
Add URL pattern for obtaining layout information.
:return: List of URL patterns.
"""
urls = super(FluentContentsLayoutPageAdmin, self).get_urls()
my_urls = [
url(
r'^get_layout/(?P<id>\d+)/$',
self.admin_site.admin_view(self.get_layout_view),
name='get_layout',
)
]
return my_urls + urls
def get_layout_view(self, request, id):
"""
Return the metadata about a layout
:param request: Django request object.
:param id: Id integer value (pk) for the layout referenced.
:return: JsonResponse with layout information or error message.
"""
# Get the layout or if it does not exist return an error message.
try:
layout = models.PageLayout.objects.get(pk=id)
except models.PageLayout.DoesNotExist:
json = {'success': False, 'error': 'Layout not found'}
status = 404
else:
template = layout.get_template()
placeholders = get_template_placeholder_data(template)
status = 200
# Set useful information regarding the layout.
json = {
'id': layout.id,
'key': layout.key,
'title': layout.title,
'placeholders': [p.as_dict() for p in placeholders],
}
return JsonResponse(json, status=status)
# ---- Layout permission hooks ----
def get_readonly_fields(self, request, obj=None):
"""
Allow layout modification on initial creation only if no perms.
If the user does not have the privilege to access the layout
field initially we need to overwrite that as it is a required
field.
After it is set we can return to the default behaviour.
:param request: Django request object.
:param obj: Object instance that uses layout fields.
:return: List of read only fields.
"""
fields = super(FluentContentsLayoutPageAdmin, self).get_readonly_fields(request, obj)
if (
obj is not None and
'layout' not in fields and
not self.has_change_page_layout_permission(request, obj)
):
# Disable on edit page only.
# Add page is allowed, need to be able to choose initial layout
fields = fields + ('layout',)
return fields
def has_change_page_layout_permission(self, request, obj):
"""
Whether the user can change the page layout.
:param request: Django request object.
:param obj: Object instance that uses layout fields.
:return: Boolean (True if user has permission to change
the layout; False if the user does not have permission to
change the layout).
"""
codename = '{0}.change_page_layout'.format(obj._meta.app_label)
return request.user.has_perm(codename, obj=obj)
class PageLayoutAdmin(admin.ModelAdmin):
"""
Admin configuration for `PageLayout` model.
"""
# Config list page:
list_display = ['title', 'key', ]
fieldsets = (
(
None, {
'fields': (
'title',
'key',
'template_path'
),
}
),
)
prepopulated_fields = {
'key': (
'title',
)
}
# Admin registration.
admin.site.register(models.FluentContentsLayoutPage, FluentContentsLayoutPageAdmin)
admin.site.register(models.PageLayout, PageLayoutAdmin)
| bsd-2-clause | 5,857,697,848,118,716,000 | 33.43299 | 98 | 0.600599 | false |
ibmsoe/tensorflow | tensorflow/python/framework/meta_graph.py | 34 | 26049 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""MetaGraph and related functions."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import copy
import os.path
import re
import six
from google.protobuf.any_pb2 import Any
from google.protobuf import text_format
from tensorflow.core.framework import attr_value_pb2
from tensorflow.core.framework import graph_pb2
from tensorflow.core.framework import op_def_pb2
from tensorflow.core.protobuf import meta_graph_pb2
from tensorflow.core.protobuf import saver_pb2
from tensorflow.python.framework import graph_io
from tensorflow.python.framework import importer
from tensorflow.python.framework import op_def_registry
from tensorflow.python.framework import ops
from tensorflow.python.framework import versions
from tensorflow.python.lib.io import file_io
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util import compat
# Prefix to be added to unbound input names so they are easily identifiable.
_UNBOUND_INPUT_PREFIX = "$unbound_inputs_"
def _node_def(from_node_def, export_scope, unbound_inputs, clear_devices=False):
"""Create a `NodeDef` proto with export_scope stripped.
Args:
from_node_def: A `node_def_pb2.NodeDef` protocol buffer.
export_scope: A `string` representing the name scope to remove.
unbound_inputs: An array of unbound input names if they exist.
clear_devices: Boolean which controls whether to clear device information
from node_def. Default false.
Returns:
A `node_def_pb2.NodeDef` protocol buffer.
"""
node_def = copy.deepcopy(from_node_def)
for i, v in enumerate(node_def.input):
if (export_scope and
not node_def.input[i].lstrip("^").startswith(export_scope)):
# Adds "$unbound_inputs_" prefix to the unbound name so they are easily
# identifiable.
node_def.input[i] = re.sub(r"([\^]|^)(.*)",
r"\1" + _UNBOUND_INPUT_PREFIX + r"\2",
compat.as_str(v))
unbound_inputs.append(node_def.input[i])
else:
node_def.input[i] = ops.strip_name_scope(v, export_scope)
node_def.name = compat.as_bytes(
ops.strip_name_scope(from_node_def.name, export_scope))
for k, v in six.iteritems(from_node_def.attr):
if k == "_class":
new_s = [compat.as_bytes(
ops.strip_name_scope(s, export_scope)) for s in v.list.s
if not export_scope or
compat.as_str(s).split("@")[1].startswith(export_scope)]
node_def.attr[k].CopyFrom(attr_value_pb2.AttrValue(
list=attr_value_pb2.AttrValue.ListValue(s=new_s)))
else:
node_def.attr[k].CopyFrom(v)
if clear_devices:
node_def.device = ""
return node_def
def _read_file(filename):
"""Reads a file containing `GraphDef` and returns the protocol buffer.
Args:
filename: `graph_def` filename including the path.
Returns:
A `GraphDef` protocol buffer.
Raises:
IOError: If the file doesn't exist, or cannot be successfully parsed.
"""
graph_def = graph_pb2.GraphDef()
if not file_io.file_exists(filename):
raise IOError("File %s does not exist." % filename)
# First try to read it as a binary file.
file_content = file_io.FileIO(filename, "rb").read()
try:
graph_def.ParseFromString(file_content)
return graph_def
except Exception: # pylint: disable=broad-except
pass
# Next try to read it as a text file.
try:
text_format.Merge(file_content, graph_def)
except text_format.ParseError as e:
raise IOError("Cannot parse file %s: %s." % (filename, str(e)))
return graph_def
def ops_used_by_graph_def(graph_def):
"""Collect the list of ops used by a graph.
Does not validate that the ops are all registered.
Args:
graph_def: A `GraphDef` proto, as from `graph.as_graph_def()`.
Returns:
A list of strings, each naming an op used by the graph.
"""
# Map function names to definitions
name_to_function = {}
for fun in graph_def.library.function:
name_to_function[fun.signature.name] = fun
# Collect the list of op names. Since functions can reference functions, we
# need a recursive traversal.
used_ops = set() # Includes both primitive ops and functions
functions_to_process = [] # A subset of used_ops
def mark_op_as_used(op):
if op not in used_ops and op in name_to_function:
functions_to_process.append(name_to_function[op])
used_ops.add(op)
for node in graph_def.node:
mark_op_as_used(node.op)
while functions_to_process:
fun = functions_to_process.pop()
for node in fun.node_def:
mark_op_as_used(node.op)
return [op for op in used_ops if op not in name_to_function]
def stripped_op_list_for_graph(graph_def):
"""Collect the stripped OpDefs for ops used by a graph.
This function computes the `stripped_op_list` field of `MetaGraphDef` and
similar protos. The result can be communicated from the producer to the
consumer, which can then use the C++ function
`RemoveNewDefaultAttrsFromGraphDef` to improve forwards compatibility.
Args:
graph_def: A `GraphDef` proto, as from `graph.as_graph_def()`.
Returns:
An `OpList` of ops used by the graph.
Raises:
ValueError: If an unregistered op is used.
"""
# This is the Python equivalent of StrippedOpListForGraph in C++.
# Unfortunately, since the Python op registry can differ from that in C++, we
# can't remove the duplication using swig (at least naively).
# TODO(irving): Support taking graphs directly.
used_ops = ops_used_by_graph_def(graph_def)
# Verify that all used ops are registered.
registered_ops = op_def_registry.get_registered_ops()
# These internal ops used by functions are not registered, so we need to
# whitelist them. # TODO(irving): Do something better here.
op_whitelist = ("_Arg", "_Retval", "_ListToArray", "_ArrayToList")
for op in used_ops:
if op not in registered_ops and op not in op_whitelist:
raise ValueError("Op %s is used by the graph, but is not registered" % op)
# Build the stripped op list in sorted order
return op_def_pb2.OpList(op=[registered_ops[op] for op in sorted(used_ops)
if op in registered_ops])
def _get_kind_name(item):
"""Returns the kind name in CollectionDef.
Args:
item: A data item.
Returns:
The string representation of the kind in CollectionDef.
"""
if isinstance(item, (six.string_types, six.binary_type)):
kind = "bytes_list"
elif isinstance(item, six.integer_types):
kind = "int64_list"
elif isinstance(item, float):
kind = "float_list"
elif isinstance(item, Any):
kind = "any_list"
else:
kind = "node_list"
return kind
def _should_include_node(node_or_node_name, export_scope):
"""Returns `True` if a node should be included.
Args:
node_or_node_name: A node or `string` node name.
export_scope: `string`. Name scope under which to extract the subgraph. The
scope name will be striped from the node definitions for easy import later
into new name scopes.
Returns:
`True` if the node should be included.
"""
if not isinstance(node_or_node_name, six.string_types):
try:
node_name = node_or_node_name.name
except AttributeError:
# Keep the object that we don't know how to process.
return True
else:
node_name = node_or_node_name
return (node_name.startswith(_UNBOUND_INPUT_PREFIX) or
(not export_scope or node_name.startswith(export_scope)))
def add_collection_def(meta_graph_def, key, graph=None,
export_scope=None):
"""Adds a collection to MetaGraphDef protocol buffer.
Args:
meta_graph_def: MetaGraphDef protocol buffer.
key: One of the GraphKeys or user-defined string.
graph: The `Graph` from which to get collections.
export_scope: Optional `string`. Name scope to remove.
"""
if graph and not isinstance(graph, ops.Graph):
raise TypeError("graph must be of type Graph, not %s", type(graph))
if not isinstance(key, six.string_types) and not isinstance(key, bytes):
logging.warning("Only collections with string type keys will be "
"serialized. This key has %s", type(key))
return
# Sets graph to default graph if it's not passed in.
graph = graph or ops.get_default_graph()
collection_list = graph.get_collection(key)
# Remove nodes that should not be exported from the collection list.
collection_list = [x for x in collection_list if
_should_include_node(x, export_scope)]
if not collection_list:
return
try:
col_def = meta_graph_def.collection_def[key]
to_proto = ops.get_to_proto_function(key)
proto_type = ops.get_collection_proto_type(key)
if to_proto:
kind = "bytes_list"
for x in collection_list:
# Additional type check to make sure the returned proto is indeed
# what we expect.
proto = to_proto(x, export_scope=export_scope)
if proto:
assert isinstance(proto, proto_type)
getattr(col_def, kind).value.append(proto.SerializeToString())
else:
kind = _get_kind_name(collection_list[0])
if kind == "node_list":
for x in collection_list:
if not export_scope or x.name.startswith(export_scope):
getattr(col_def, kind).value.append(
ops.strip_name_scope(x.name, export_scope))
elif kind == "bytes_list":
# NOTE(opensource): This force conversion is to work around the fact
# that Python3 distinguishes between bytes and strings.
getattr(col_def, kind).value.extend(
[compat.as_bytes(x) for x in collection_list])
else:
getattr(col_def, kind).value.extend([x for x in collection_list])
except Exception as e: # pylint: disable=broad-except
logging.warning("Error encountered when serializing %s.\n"
"Type is unsupported, or the types of the items don't "
"match field type in CollectionDef.\n%s", key, str(e))
if key in meta_graph_def.collection_def:
del meta_graph_def.collection_def[key]
return
def create_meta_graph_def(meta_info_def=None,
graph_def=None,
saver_def=None,
collection_list=None,
graph=None,
export_scope=None):
"""Construct and returns a `MetaGraphDef` protocol buffer.
Args:
meta_info_def: `MetaInfoDef` protocol buffer.
graph_def: `GraphDef` protocol buffer.
saver_def: `SaverDef` protocol buffer.
collection_list: List of string keys to collect.
graph: The `Graph` to create `MetaGraphDef` out of.
export_scope: Optional `string`. Name scope to remove.
Returns:
MetaGraphDef protocol buffer.
Raises:
TypeError: If the arguments are not of the correct proto buffer type.
"""
# Type check.
if graph and not isinstance(graph, ops.Graph):
raise TypeError("graph must be of type Graph, not %s", type(graph))
if meta_info_def and not isinstance(meta_info_def,
meta_graph_pb2.MetaGraphDef.MetaInfoDef):
raise TypeError("meta_info_def must be of type MetaInfoDef, not %s",
type(meta_info_def))
if graph_def and not isinstance(graph_def, graph_pb2.GraphDef):
raise TypeError("graph_def must be of type GraphDef, not %s",
type(graph_def))
if saver_def and not isinstance(saver_def, saver_pb2.SaverDef):
raise TypeError("saver_def must be of type SaverDef, not %s",
type(saver_def))
# Sets graph to default graph if it's not passed in.
graph = graph or ops.get_default_graph()
# Creates a MetaGraphDef proto.
meta_graph_def = meta_graph_pb2.MetaGraphDef()
# Adds meta_info_def.
if not meta_info_def:
meta_info_def = meta_graph_pb2.MetaGraphDef.MetaInfoDef()
# Set the tf version strings to the current tf build.
meta_info_def.tensorflow_version = versions.__version__
meta_info_def.tensorflow_git_version = versions.__git_version__
meta_graph_def.meta_info_def.MergeFrom(meta_info_def)
# Adds graph_def or the default.
if not graph_def:
meta_graph_def.graph_def.MergeFrom(graph.as_graph_def(add_shapes=True))
else:
meta_graph_def.graph_def.MergeFrom(graph_def)
# Fills in meta_info_def.stripped_op_list using the ops from graph_def.
# pylint: disable=g-explicit-length-test
if len(meta_graph_def.meta_info_def.stripped_op_list.op) == 0:
meta_graph_def.meta_info_def.stripped_op_list.MergeFrom(
stripped_op_list_for_graph(meta_graph_def.graph_def))
# pylint: enable=g-explicit-length-test
# Adds saver_def.
if saver_def:
meta_graph_def.saver_def.MergeFrom(saver_def)
# Adds collection_list.
if collection_list:
clist = collection_list
else:
clist = graph.get_all_collection_keys()
for ctype in clist:
add_collection_def(meta_graph_def, ctype,
graph=graph,
export_scope=export_scope)
return meta_graph_def
def read_meta_graph_file(filename):
"""Reads a file containing `MetaGraphDef` and returns the protocol buffer.
Args:
filename: `meta_graph_def` filename including the path.
Returns:
A `MetaGraphDef` protocol buffer.
Raises:
IOError: If the file doesn't exist, or cannot be successfully parsed.
"""
meta_graph_def = meta_graph_pb2.MetaGraphDef()
if not file_io.file_exists(filename):
raise IOError("File %s does not exist." % filename)
# First try to read it as a binary file.
file_content = file_io.FileIO(filename, "rb").read()
try:
meta_graph_def.ParseFromString(file_content)
return meta_graph_def
except Exception: # pylint: disable=broad-except
pass
# Next try to read it as a text file.
try:
text_format.Merge(file_content.decode("utf-8"), meta_graph_def)
except text_format.ParseError as e:
raise IOError("Cannot parse file %s: %s." % (filename, str(e)))
return meta_graph_def
def import_scoped_meta_graph(meta_graph_or_file,
clear_devices=False,
graph=None,
import_scope=None,
input_map=None,
unbound_inputs_col_name="unbound_inputs"):
"""Recreates a`Graph` saved in a `MetaGraphDef` proto.
This function takes a `MetaGraphDef` protocol buffer as input. If
the argument is a file containing a `MetaGraphDef` protocol buffer ,
it constructs a protocol buffer from the file content. The function
then adds all the nodes from the `graph_def` field to the
current graph, recreates all the collections, and returns a saver
constructed from the `saver_def` field.
In combination with `export_scoped_meta_graph()`, this function can be used to
* Serialize a graph along with other Python objects such as `QueueRunner`,
`Variable` into a `MetaGraphDef`.
* Restart training from a saved graph and checkpoints.
* Run inference from a saved graph and checkpoints.
Args:
meta_graph_or_file: `MetaGraphDef` protocol buffer or filename (including
the path) containing a `MetaGraphDef`.
clear_devices: Boolean which controls whether to clear device information
from graph_def. Default false.
graph: The `Graph` to import into. If `None`, use the default graph.
import_scope: Optional `string`. Name scope into which to import the
subgraph. If `None`, the graph is imported to the root name scope.
input_map: A dictionary mapping input names (as strings) in `graph_def` to
`Tensor` objects. The values of the named input tensors in the imported
graph will be re-mapped to the respective `Tensor` values.
unbound_inputs_col_name: Collection name for looking up unbound inputs.
Returns:
A dictionary of all the `Variables` imported into the name scope.
Raises:
ValueError: If the graph_def contains unbound inputs.
"""
if isinstance(meta_graph_or_file, meta_graph_pb2.MetaGraphDef):
meta_graph_def = meta_graph_or_file
else:
meta_graph_def = read_meta_graph_file(meta_graph_or_file)
if unbound_inputs_col_name:
for key, col_def in meta_graph_def.collection_def.items():
if key == unbound_inputs_col_name:
kind = col_def.WhichOneof("kind")
field = getattr(col_def, kind)
if field.value and (
not input_map or
sorted([compat.as_str(v) for v in field.value]) !=
sorted(input_map)):
raise ValueError("Graph contains unbound inputs: %s. Must "
"provide these inputs through input_map." %
",".join([compat.as_str(v) for v in field.value
if not input_map or v not in input_map]))
break
# Sets graph to default graph if it's not passed in.
graph = graph or ops.get_default_graph()
# Gathers the list of nodes we are interested in.
with graph.as_default():
producer_op_list = None
if meta_graph_def.meta_info_def.HasField("stripped_op_list"):
producer_op_list = meta_graph_def.meta_info_def.stripped_op_list
input_graph_def = meta_graph_def.graph_def
# Remove all the explicit device specifications for this node. This helps to
# make the graph more portable.
if clear_devices:
for node in input_graph_def.node:
node.device = ""
importer.import_graph_def(
input_graph_def, name=(import_scope or ""), input_map=input_map,
producer_op_list=producer_op_list)
# Restores all the other collections.
for key, col_def in meta_graph_def.collection_def.items():
# Don't add unbound_inputs to the new graph.
if key == unbound_inputs_col_name:
continue
kind = col_def.WhichOneof("kind")
if kind is None:
logging.error("Cannot identify data type for collection %s. Skipping.",
key)
continue
from_proto = ops.get_from_proto_function(key)
if from_proto:
assert kind == "bytes_list"
proto_type = ops.get_collection_proto_type(key)
for value in col_def.bytes_list.value:
proto = proto_type()
proto.ParseFromString(value)
graph.add_to_collection(
key, from_proto(proto, import_scope=import_scope))
else:
field = getattr(col_def, kind)
if kind == "node_list":
for value in field.value:
col_op = graph.as_graph_element(
ops.prepend_name_scope(value, import_scope))
graph.add_to_collection(key, col_op)
elif kind == "int64_list":
# NOTE(opensource): This force conversion is to work around the fact
# that Python2 distinguishes between int and long, while Python3 has
# only int.
for value in field.value:
graph.add_to_collection(key, int(value))
else:
for value in field.value:
graph.add_to_collection(
key, ops.prepend_name_scope(value, import_scope))
var_list = {}
variables = graph.get_collection(ops.GraphKeys.GLOBAL_VARIABLES,
scope=import_scope)
for v in variables:
var_list[ops.strip_name_scope(v.name, import_scope)] = v
return var_list
def export_scoped_meta_graph(filename=None,
graph_def=None,
graph=None,
export_scope=None,
as_text=False,
unbound_inputs_col_name="unbound_inputs",
clear_devices=False,
**kwargs):
"""Returns `MetaGraphDef` proto. Optionally writes it to filename.
This function exports the graph, saver, and collection objects into
`MetaGraphDef` protocol buffer with the intention of it being imported
at a later time or location to restart training, run inference, or be
a subgraph.
Args:
filename: Optional filename including the path for writing the
generated `MetaGraphDef` protocol buffer.
graph_def: `GraphDef` protocol buffer.
graph: The `Graph` to import into. If `None`, use the default graph.
export_scope: Optional `string`. Name scope under which to extract
the subgraph. The scope name will be striped from the node definitions
for easy import later into new name scopes. If `None`, the whole graph
is exported. graph_def and export_scope cannot both be specified.
as_text: If `True`, writes the `MetaGraphDef` as an ASCII proto.
unbound_inputs_col_name: Optional `string`. If provided, a string collection
with the given name will be added to the returned `MetaGraphDef`,
containing the names of tensors that must be remapped when importing the
`MetaGraphDef`.
clear_devices: Boolean which controls whether to clear device information
before exporting the graph.
**kwargs: Optional keyed arguments, including meta_info_def,
saver_def, collection_list.
Returns:
A `MetaGraphDef` proto and dictionary of `Variables` in the exported
name scope.
Raises:
ValueError: When the `GraphDef` is larger than 2GB.
"""
graph = graph or ops.get_default_graph()
unbound_inputs = []
if export_scope or clear_devices:
if graph_def:
new_graph_def = graph_pb2.GraphDef()
new_graph_def.versions.CopyFrom(graph_def.versions)
for node_def in graph_def.node:
if _should_include_node(node_def.name, export_scope):
new_node_def = _node_def(node_def, export_scope, unbound_inputs,
clear_devices=clear_devices)
new_graph_def.node.extend([new_node_def])
graph_def = new_graph_def
else:
# Only do this complicated work if we want to remove a name scope.
graph_def = graph_pb2.GraphDef()
# pylint: disable=protected-access
graph_def.versions.CopyFrom(graph.graph_def_versions)
bytesize = 0
for key in sorted(graph._nodes_by_id):
if _should_include_node(graph._nodes_by_id[key].name, export_scope):
value = graph._nodes_by_id[key]
# pylint: enable=protected-access
node_def = _node_def(value.node_def, export_scope, unbound_inputs,
clear_devices=clear_devices)
graph_def.node.extend([node_def])
if value.outputs:
assert "_output_shapes" not in graph_def.node[-1].attr
graph_def.node[-1].attr["_output_shapes"].list.shape.extend([
output.get_shape().as_proto() for output in value.outputs])
bytesize += value.node_def.ByteSize()
if bytesize >= (1 << 31) or bytesize < 0:
raise ValueError("GraphDef cannot be larger than 2GB.")
# It's possible that not all the inputs are in the export_scope.
# If we would like such information included in the exported meta_graph,
# add them to a special unbound_inputs collection.
if unbound_inputs_col_name:
# Clears the unbound_inputs collections.
graph.clear_collection(unbound_inputs_col_name)
for k in unbound_inputs:
graph.add_to_collection(unbound_inputs_col_name, k)
var_list = {}
variables = graph.get_collection(ops.GraphKeys.GLOBAL_VARIABLES,
scope=export_scope)
for v in variables:
if _should_include_node(v, export_scope):
var_list[ops.strip_name_scope(v.name, export_scope)] = v
scoped_meta_graph_def = create_meta_graph_def(
graph_def=graph_def,
graph=graph,
export_scope=export_scope,
**kwargs)
if filename:
graph_io.write_graph(
scoped_meta_graph_def,
os.path.dirname(filename),
os.path.basename(filename),
as_text=as_text)
return scoped_meta_graph_def, var_list
def copy_scoped_meta_graph(from_scope, to_scope,
from_graph=None, to_graph=None):
"""Copies a sub-meta_graph from one scope to another.
Args:
from_scope: `String` name scope containing the subgraph to be copied.
to_scope: `String` name scope under which the copied subgraph will reside.
from_graph: Optional `Graph` from which to copy the subgraph. If `None`, the
default graph is use.
to_graph: Optional `Graph` to which to copy the subgraph. If `None`, the
default graph is used.
Returns:
A dictionary of `Variables` that has been copied into `to_scope`.
Raises:
ValueError: If `from_scope` and `to_scope` are the same while
`from_graph` and `to_graph` are also the same.
"""
from_graph = from_graph or ops.get_default_graph()
to_graph = to_graph or ops.get_default_graph()
if from_graph == to_graph and from_scope == to_scope:
raise ValueError("'from_scope' and 'to_scope' need to be different "
"when performing copy in the same graph.")
orig_meta_graph, var_list = export_scoped_meta_graph(
export_scope=from_scope, graph=from_graph)
var_list = import_scoped_meta_graph(orig_meta_graph,
graph=to_graph,
import_scope=to_scope)
return var_list
| apache-2.0 | 8,348,980,136,943,049,000 | 37.083333 | 80 | 0.657568 | false |
fuzzysteve/yamlloader | tableloader/tableFunctions/groups.py | 1 | 1801 | # -*- coding: utf-8 -*-
from yaml import load, dump
try:
from yaml import CSafeLoader as SafeLoader
print "Using CSafeLoader"
except ImportError:
from yaml import SafeLoader
print "Using Python SafeLoader"
import os
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
from sqlalchemy import Table
def importyaml(connection,metadata,sourcePath,language='en'):
invGroups = Table('invGroups',metadata)
trnTranslations = Table('trnTranslations',metadata)
print "Importing Groups"
print "opening Yaml"
with open(os.path.join(sourcePath,'fsd','groupIDs.yaml'),'r') as yamlstream:
trans = connection.begin()
groupids=load(yamlstream,Loader=SafeLoader)
print "Yaml Processed into memory"
for groupid in groupids:
connection.execute(invGroups.insert(),
groupID=groupid,
categoryID=groupids[groupid].get('categoryID',0),
groupName=groupids[groupid].get('name',{}).get(language,'').decode('utf-8'),
iconID=groupids[groupid].get('iconID'),
useBasePrice=groupids[groupid].get('useBasePrice'),
anchored=groupids[groupid].get('anchored',0),
anchorable=groupids[groupid].get('anchorable',0),
fittableNonSingleton=groupids[groupid].get('fittableNonSingleton',0),
published=groupids[groupid].get('published',0))
if (groupids[groupid].has_key('name')):
for lang in groupids[groupid]['name']:
connection.execute(trnTranslations.insert(),tcID=7,keyID=groupid,languageID=lang,text=groupids[groupid]['name'][lang].decode('utf-8'));
trans.commit()
| mit | -532,967,351,756,892,740 | 45.179487 | 155 | 0.616324 | false |
gonzolino/heat | heat/db/sqlalchemy/migrate_repo/versions/047_stack_nested_depth.py | 13 | 1602 | #
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import sqlalchemy
def upgrade(migrate_engine):
meta = sqlalchemy.MetaData(bind=migrate_engine)
stack = sqlalchemy.Table('stack', meta, autoload=True)
nested_depth = sqlalchemy.Column(
'nested_depth', sqlalchemy.Integer(), default=0)
nested_depth.create(stack)
def get_stacks(owner_id):
stmt = stack.select().where(stack.c.owner_id == owner_id)
return migrate_engine.execute(stmt)
def set_nested_depth(st, nested_depth):
if st.backup:
return
values = {'nested_depth': nested_depth}
update = stack.update().where(
stack.c.id == st.id).values(values)
migrate_engine.execute(update)
# Recurse down the tree
child_stacks = get_stacks(owner_id=st.id)
child_nested_depth = nested_depth + 1
for ch in child_stacks:
set_nested_depth(ch, child_nested_depth)
# Iterate over all top-level non nested stacks
for st in get_stacks(owner_id=None):
set_nested_depth(st, 0)
| apache-2.0 | 5,128,205,200,936,451,000 | 34.6 | 78 | 0.663546 | false |
realsystem/CloudFerry | cloudferrylib/base/action/action.py | 11 | 1065 | # Copyright (c) 2014 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the License);
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an AS IS BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and#
# limitations under the License.
from cloudferrylib.scheduler import task
class Action(task.Task):
def __init__(self, init, cloud=None):
self.cloud = None
self.src_cloud = None
self.dst_cloud = None
self.cfg = None
self.__dict__.update(init)
self.init = init
if cloud:
self.cloud = init[cloud]
super(Action, self).__init__()
def run(self, **kwargs):
pass
def save(self):
pass
def restore(self):
pass
| apache-2.0 | -73,018,738,342,877,380 | 26.307692 | 70 | 0.660094 | false |
oberstet/crossbarexamples | rest/needs_cleanup/python/lib/crossbarconnect/client.py | 9 | 8266 | ###############################################################################
##
## Copyright (C) 2012-2014 Tavendo GmbH
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##
###############################################################################
__all__ = ['Client']
try:
import ssl
_HAS_SSL = True
except ImportError:
_HAS_SSL = False
import sys
_HAS_SSL_CLIENT_CONTEXT = sys.version_info >= (2,7,9)
import json
import hmac
import hashlib
import base64
import random
from datetime import datetime
import six
from six.moves.urllib import parse
from six.moves.http_client import HTTPConnection, HTTPSConnection
def _utcnow():
"""
Get current time in UTC as ISO 8601 string.
:returns str -- Current time as string in ISO 8601 format.
"""
now = datetime.utcnow()
return now.strftime("%Y-%m-%dT%H:%M:%S.%f")[:-3] + "Z"
def _parse_url(url):
"""
Parses a Crossbar.io HTTP bridge URL.
"""
parsed = parse.urlparse(url)
if parsed.scheme not in ["http", "https"]:
raise Exception("invalid Push URL scheme '%s'" % parsed.scheme)
if parsed.port is None or parsed.port == "":
if parsed.scheme == "http":
port = 80
elif parsed.scheme == "https":
port = 443
else:
raise Exception("logic error")
else:
port = int(parsed.port)
if parsed.fragment is not None and parsed.fragment != "":
raise Exception("invalid Push URL: non-empty fragment '%s" % parsed.fragment)
if parsed.query is not None and parsed.query != "":
raise Exception("invalid Push URL: non-empty query string '%s" % parsed.query)
if parsed.path is not None and parsed.path != "":
ppath = parsed.path
path = parse.unquote(ppath)
else:
ppath = "/"
path = ppath
return {'secure': parsed.scheme == "https",
'host': parsed.hostname,
'port': port,
'path': path}
class Client:
"""
Crossbar.io HTTP bridge client.
"""
def __init__(self, url, key = None, secret = None, timeout = 5, context = None):
"""
Create a new Crossbar.io push client.
The only mandatory argument is the Push service endpoint of the Crossbar.io
instance to push to.
For signed pushes, provide authentication key and secret. If those are not
given, unsigned pushes are performed.
:param url: URL of the HTTP bridge of Crossbar.io (e.g. http://example.com:8080/push).
:type url: str
:param key: Optional key to use for signing requests.
:type key: str
:param secret: When using signed request, the secret corresponding to key.
:type secret: str
:param timeout: Timeout for requests.
:type timeout: int
:param context: If the HTTP bridge is running on HTTPS (that is securely over TLS),
then the context provides the SSL settings the client should use (e.g. the
certificate chain against which to verify the server certificate). This parameter
is only available on Python 2.7.9+ and Python 3 (otherwise the parameter is silently
ignored!). See: https://docs.python.org/2/library/ssl.html#ssl.SSLContext
:type context: obj or None
"""
if six.PY2:
if type(url) == str:
url = six.u(url)
if type(key) == str:
key = six.u(key)
if type(secret) == str:
secret = six.u(secret)
assert(type(url) == six.text_type)
assert((key and secret) or (not key and not secret))
assert(key is None or type(key) == six.text_type)
assert(secret is None or type(secret) == six.text_type)
assert(type(timeout) == int)
if _HAS_SSL and _HAS_SSL_CLIENT_CONTEXT:
assert(context is None or isinstance(context, ssl.SSLContext))
self._seq = 1
self._key = key
self._secret = secret
self._endpoint = _parse_url(url)
self._endpoint['headers'] = {
"Content-type": "application/json",
"User-agent": "crossbarconnect-python"
}
if self._endpoint['secure']:
if not _HAS_SSL:
raise Exception("Bridge URL is using HTTPS, but Python SSL module is missing")
if _HAS_SSL_CLIENT_CONTEXT:
self._connection = HTTPSConnection(self._endpoint['host'],
self._endpoint['port'], timeout = timeout, context = context)
else:
self._connection = HTTPSConnection(self._endpoint['host'],
self._endpoint['port'], timeout = timeout)
else:
self._connection = HTTPConnection(self._endpoint['host'],
self._endpoint['port'], timeout = timeout)
def publish(self, topic, *args, **kwargs):
"""
Publish an event to subscribers on specified topic via Crossbar.io HTTP bridge.
The event payload (positional and keyword) can be of any type that can be
serialized to JSON.
If `kwargs` contains an `options` attribute, this is expected to
be a dictionary with the following possible parameters:
* `exclude`: A list of WAMP session IDs to exclude from receivers.
* `eligible`: A list of WAMP session IDs eligible as receivers.
:param topic: Topic to push to.
:type topic: str
:param args: Arbitrary application payload for the event (positional arguments).
:type args: list
:param kwargs: Arbitrary application payload for the event (keyword arguments).
:type kwargs: dict
:returns int -- The event publication ID assigned by the broker.
"""
if six.PY2 and type(topic) == str:
topic = six.u(topic)
assert(type(topic) == six.text_type)
## this will get filled and later serialized into HTTP/POST body
##
event = {
'topic': topic
}
if 'options' in kwargs:
event['options'] = kwargs.pop('options')
assert(type(event['options']) == dict)
if args:
event['args'] = args
if kwargs:
event['kwargs'] = kwargs
try:
body = json.dumps(event, separators = (',',':'))
if six.PY3:
body = body.encode('utf8')
except Exception as e:
raise Exception("invalid event payload - not JSON serializable: {0}".format(e))
params = {
'timestamp': _utcnow(),
'seq': self._seq,
}
if self._key:
## if the request is to be signed, create extra fields and signature
params['key'] = self._key
params['nonce'] = random.randint(0, 9007199254740992)
# HMAC[SHA256]_{secret} (key | timestamp | seq | nonce | body) => signature
hm = hmac.new(self._secret.encode('utf8'), None, hashlib.sha256)
hm.update(params['key'].encode('utf8'))
hm.update(params['timestamp'].encode('utf8'))
hm.update(u"{0}".format(params['seq']).encode('utf8'))
hm.update(u"{0}".format(params['nonce']).encode('utf8'))
hm.update(body)
signature = base64.urlsafe_b64encode(hm.digest())
params['signature'] = signature
self._seq += 1
path = "{0}?{1}".format(parse.quote(self._endpoint['path']), parse.urlencode(params))
## now issue the HTTP/POST
##
self._connection.request('POST', path, body, self._endpoint['headers'])
response = self._connection.getresponse()
response_body = response.read()
if response.status != 202:
raise Exception("publication request failed {0} [{1}] - {2}".format(response.status, response.reason, response_body))
try:
res = json.loads(response_body)
except Exception as e:
raise Exception("publication request bogus result - {0}".format(e))
return res['id']
| apache-2.0 | 6,130,401,927,103,038,000 | 32.064 | 126 | 0.604767 | false |
CiscoSystems/nova | nova/tests/api/openstack/compute/contrib/test_snapshots.py | 30 | 8037 | # Copyright 2011 Denali Systems, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from lxml import etree
import webob
from nova.api.openstack.compute.contrib import volumes
from nova import context
from nova.openstack.common import jsonutils
from nova.openstack.common import timeutils
from nova import test
from nova.tests.api.openstack import fakes
from nova.volume import cinder
class SnapshotApiTest(test.NoDBTestCase):
def setUp(self):
super(SnapshotApiTest, self).setUp()
fakes.stub_out_networking(self.stubs)
fakes.stub_out_rate_limiting(self.stubs)
self.stubs.Set(cinder.API, "create_snapshot",
fakes.stub_snapshot_create)
self.stubs.Set(cinder.API, "create_snapshot_force",
fakes.stub_snapshot_create)
self.stubs.Set(cinder.API, "delete_snapshot",
fakes.stub_snapshot_delete)
self.stubs.Set(cinder.API, "get_snapshot", fakes.stub_snapshot_get)
self.stubs.Set(cinder.API, "get_all_snapshots",
fakes.stub_snapshot_get_all)
self.stubs.Set(cinder.API, "get", fakes.stub_volume_get)
self.flags(
osapi_compute_extension=[
'nova.api.openstack.compute.contrib.select_extensions'],
osapi_compute_ext_list=['Volumes'])
self.context = context.get_admin_context()
self.app = fakes.wsgi_app(init_only=('os-snapshots',))
def test_snapshot_create(self):
snapshot = {"volume_id": 12,
"force": False,
"display_name": "Snapshot Test Name",
"display_description": "Snapshot Test Desc"}
body = dict(snapshot=snapshot)
req = webob.Request.blank('/v2/fake/os-snapshots')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers['content-type'] = 'application/json'
resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 200)
resp_dict = jsonutils.loads(resp.body)
self.assertIn('snapshot', resp_dict)
self.assertEqual(resp_dict['snapshot']['displayName'],
snapshot['display_name'])
self.assertEqual(resp_dict['snapshot']['displayDescription'],
snapshot['display_description'])
self.assertEqual(resp_dict['snapshot']['volumeId'],
snapshot['volume_id'])
def test_snapshot_create_force(self):
snapshot = {"volume_id": 12,
"force": True,
"display_name": "Snapshot Test Name",
"display_description": "Snapshot Test Desc"}
body = dict(snapshot=snapshot)
req = webob.Request.blank('/v2/fake/os-snapshots')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers['content-type'] = 'application/json'
resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 200)
resp_dict = jsonutils.loads(resp.body)
self.assertIn('snapshot', resp_dict)
self.assertEqual(resp_dict['snapshot']['displayName'],
snapshot['display_name'])
self.assertEqual(resp_dict['snapshot']['displayDescription'],
snapshot['display_description'])
self.assertEqual(resp_dict['snapshot']['volumeId'],
snapshot['volume_id'])
# Test invalid force paramter
snapshot = {"volume_id": 12,
"force": '**&&^^%%$$##@@'}
body = dict(snapshot=snapshot)
req = webob.Request.blank('/v2/fake/os-snapshots')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers['content-type'] = 'application/json'
resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 400)
def test_snapshot_delete(self):
snapshot_id = 123
req = webob.Request.blank('/v2/fake/os-snapshots/%d' % snapshot_id)
req.method = 'DELETE'
resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 202)
def test_snapshot_delete_invalid_id(self):
snapshot_id = -1
req = webob.Request.blank('/v2/fake/os-snapshots/%d' % snapshot_id)
req.method = 'DELETE'
resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 404)
def test_snapshot_show(self):
snapshot_id = 123
req = webob.Request.blank('/v2/fake/os-snapshots/%d' % snapshot_id)
req.method = 'GET'
resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 200)
resp_dict = jsonutils.loads(resp.body)
self.assertIn('snapshot', resp_dict)
self.assertEqual(resp_dict['snapshot']['id'], str(snapshot_id))
def test_snapshot_show_invalid_id(self):
snapshot_id = -1
req = webob.Request.blank('/v2/fake/os-snapshots/%d' % snapshot_id)
req.method = 'GET'
resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 404)
def test_snapshot_detail(self):
req = webob.Request.blank('/v2/fake/os-snapshots/detail')
req.method = 'GET'
resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 200)
resp_dict = jsonutils.loads(resp.body)
self.assertIn('snapshots', resp_dict)
resp_snapshots = resp_dict['snapshots']
self.assertEqual(len(resp_snapshots), 3)
resp_snapshot = resp_snapshots.pop()
self.assertEqual(resp_snapshot['id'], 102)
class SnapshotSerializerTest(test.NoDBTestCase):
def _verify_snapshot(self, snap, tree):
self.assertEqual(tree.tag, 'snapshot')
for attr in ('id', 'status', 'size', 'createdAt',
'displayName', 'displayDescription', 'volumeId'):
self.assertEqual(str(snap[attr]), tree.get(attr))
def test_snapshot_show_create_serializer(self):
serializer = volumes.SnapshotTemplate()
raw_snapshot = dict(
id='snap_id',
status='snap_status',
size=1024,
createdAt=timeutils.utcnow(),
displayName='snap_name',
displayDescription='snap_desc',
volumeId='vol_id',
)
text = serializer.serialize(dict(snapshot=raw_snapshot))
tree = etree.fromstring(text)
self._verify_snapshot(raw_snapshot, tree)
def test_snapshot_index_detail_serializer(self):
serializer = volumes.SnapshotsTemplate()
raw_snapshots = [dict(
id='snap1_id',
status='snap1_status',
size=1024,
createdAt=timeutils.utcnow(),
displayName='snap1_name',
displayDescription='snap1_desc',
volumeId='vol1_id',
),
dict(
id='snap2_id',
status='snap2_status',
size=1024,
createdAt=timeutils.utcnow(),
displayName='snap2_name',
displayDescription='snap2_desc',
volumeId='vol2_id',
)]
text = serializer.serialize(dict(snapshots=raw_snapshots))
tree = etree.fromstring(text)
self.assertEqual('snapshots', tree.tag)
self.assertEqual(len(raw_snapshots), len(tree))
for idx, child in enumerate(tree):
self._verify_snapshot(raw_snapshots[idx], child)
| apache-2.0 | 3,658,086,337,553,203,000 | 37.454545 | 78 | 0.598358 | false |
diagramsoftware/odoomrp-utils | product_uom_change_fix/models/product.py | 13 | 1920 |
# -*- encoding: utf-8 -*-
##############################################################################
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses/.
#
##############################################################################
from openerp import models, api
class ProductTemplate(models.Model):
_inherit = 'product.template'
@api.multi
def write(self, vals):
res = {}
for product_tmpl in self:
write_vals = {}
if 'uom_po_id' in vals:
write_vals['uom_po_id'] = vals.pop("uom_po_id", None)
write_vals['uom_id'] = vals.pop("uom_id", None)
if vals:
res = super(ProductTemplate, self).write(vals)
if write_vals:
product_obj = self.env['product.product']
st_mv_obj = self.env['stock.move']
product_lst = product_obj.search([('product_tmpl_id', '=',
product_tmpl.id)])
if not st_mv_obj.search([('product_id', 'in',
product_lst.ids)]):
models.Model.write(self, write_vals)
else:
res = super(ProductTemplate, self).write(write_vals)
return res
| agpl-3.0 | 8,803,350,959,548,142,000 | 40.73913 | 78 | 0.521875 | false |
noba3/KoTos | addons/script.module.urlresolver/lib/urlresolver/plugins/watchfreeinhd.py | 4 | 2097 | '''
watchfreeinhd urlresolver plugin
Copyright (C) 2013 voinage
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import re
from t0mm0.common.net import Net
from urlresolver.plugnplay.interfaces import UrlResolver
from urlresolver.plugnplay.interfaces import PluginSettings
from urlresolver.plugnplay import Plugin
from urlresolver import common
class WatchFreeResolver(Plugin, UrlResolver, PluginSettings):
implements = [UrlResolver, PluginSettings]
name = "watchfreeinhd"
domains = ["watchfreeinhd.com"]
def __init__(self):
p = self.get_setting('priority') or 100
self.priority = int(p)
self.net = Net()
def get_media_url(self, host, media_id):
web_url = self.get_url(host, media_id)
html = self.net.http_POST(web_url, {'agree': 'Yes, let me watch'}).content
link = re.findall('<a href="(.+?)" id="player" name="player">', html)
if link:
return link[0]
else:
raise UrlResolver.ResolverError('File Not Found or removed')
def get_url(self, host, media_id):
return 'http://www.%s.com/%s' % (host, media_id)
def get_host_and_id(self, url):
r = re.match(r'http://www.(watchfreeinhd).com/([0-9A-Za-z]+)', url)
if r:
return r.groups()
else:
return False
def valid_url(self, url, host):
if self.get_setting('enabled') == 'false': return False
return (re.match(r'http://www.(watchfreeinhd).com/([0-9A-Za-z]+)', url) or 'watchfree' in host)
| gpl-2.0 | 5,861,438,281,677,494,000 | 35.789474 | 103 | 0.677635 | false |
OldHuntsman/DefilerWings | game/pythoncode/girls.py | 1 | 16984 | # coding=utf-8
import random
import data
import renpy.exports as renpy
import renpy.store as store
import girls_data
from treasures import gen_treas
from utils import call
from characters import Girl
from data import achieve_target
class GirlsList(object):
def __init__(self, game_ref, base_character):
self.game = game_ref
self.character = base_character
self.prisoners = [] # список заключенных девушек
self.free_list = [] # список свободных девушек
self.spawn = [] # список отродий, приходящих после пробуждения
self.active = 0 # номер текущей девушки
self.offspring = [] # типы потомков для выполнения квеста
def new_girl(self, girl_type='peasant'):
"""
Генерация новой девушки указанного типа.
"""
self.game.girl = Girl(game_ref=self.game, girl_type=girl_type)
self.game.girl.treasure = self.gen_tres()
return self.description('new')
def gen_tres(self):
"""
Создание списка индивидуальных сокровищ для текущей девушки
"""
g_type = self.game.girl.type # упрощение обращения к типу девушки
girl_info = girls_data.girls_info[g_type] # упрощение обращения к информации для данного типа девушки
count = random.randint(girl_info['t_count_min'], girl_info['t_count_max'])
t_list = girl_info['t_list']
alignment = girl_info['t_alignment']
min_cost = girl_info['t_price_min']
max_cost = girl_info['t_price_max']
obtained = u"Принадлежало красавице по имени %s" % self.game.girl.name
return gen_treas(count, t_list, alignment, min_cost, max_cost, obtained)
def impregnate(self):
"""
Осеменение женщины.
"""
# self.description('prelude', True)
# self.description('sex', True)
# self.description('impregnate', True)
self.game.girl.virgin = False
if self.game.girl.quality < self.game.dragon.magic or \
'impregnator' in self.game.dragon.modifiers():
self.game.girl.pregnant = 2
else:
self.game.girl.pregnant = 1
self.game.dragon.lust -= 1
achieve_target(self.game.girl.type, "impregnate")
return self.description('shout')
def free_girl(self):
"""
Выпустить текущую девушку на свободу.
"""
# девушка отслеживается только если беременна
if self.game.girl.pregnant:
self.free_list.append(self.game.girl)
if self.game.girl.jailed:
return self.description('free_prison')
else:
return self.description('free')
def free_all_girls(self):
"""
Выпустить на свободу всех девушек.
"""
for girl_i in reversed(xrange(self.prisoners_count)):
self.game.girl = self.prisoners[girl_i]
if self.game.girl.pregnant:
self.free_list.append(self.game.girl)
self.prisoners = []
def steal_girl(self):
return self.description('steal')
def jail_girl(self):
"""
Посадить текущую девушку за решетку.
"""
if self.game.girl.jailed:
text = self.description('jailed')
self.prisoners.insert(self.active, self.game.girl)
else:
text = self.description('jail')
self.game.girl.jailed = True
self.prisoners.append(self.game.girl)
return text
def set_active(self, index):
"""
Достать девушку с номером index из темницы
"""
self.game.girl = self.prisoners[index]
self.active = index
del self.prisoners[index]
def eat_girl(self):
"""
Скушать девушку.
"""
self.game.dragon.hunger -= 1
if self.game.dragon.lust < 3:
self.game.dragon.lust += 1
self.game.dragon.bloodiness = 0
return self.description('eat')
def rob_girl(self):
"""
Ограбить девушку.
"""
self.game.lair.treasury.receive_treasures(self.game.girl.treasure)
return self.description('rob')
def prisoners_list(self):
"""
Возвращает список плененных девушек.
"""
jail_list = []
for girl_i in xrange(len(self.prisoners)):
jail_list.append(self.prisoners[girl_i].name)
return jail_list
@property
def prisoners_count(self):
"""
Возвращает количество плененных девушек.
"""
return len(self.prisoners)
def description(self, status, say=False):
"""
Генерация описания ситуации для текущей девушки (self.game.girl).
status - кодовое описание ситуации
say - если истина - описание выводится сразу на экран
Возвращается текст описания или None, если текст в списке не найден
"""
format_dict = {
'dragon_name': self.game.dragon.name,
'dragon_name_full': self.game.dragon.fullname,
'dragon_type': self.game.dragon.kind,
'girl_name': self.game.girl.name,
'girl_title': girls_data.girls_info[self.game.girl.type]['description'],
}
girl_type = self.game.girl.type
if girl_type not in girls_data.girls_texts or status not in girls_data.girls_texts[girl_type]:
girl_type = 'girl'
if status in girls_data.girls_texts[girl_type]:
text = random.choice(girls_data.girls_texts[girl_type][status])
if self.spawn:
# Если список отродий не пуст - получаем имя последнего для возможной подстановки
format_dict['spawn_name'] = girls_data.spawn_info[self.spawn[-1]]['born'].capitalize()
if status == 'rob':
treas_description = self.game.lair.treasury.treasures_description(self.game.girl.treasure)
treas_description = '\n'.join(treas_description) + u'.'
self.game.girl.treasure = []
format_dict['rob_list'] = treas_description
text = text % format_dict
else:
text = None
if say and text:
self.game.girl.third(text) # выдача сообщения
store.nvl_list = [] # вариант nvl clear на питоне
else:
return text
@staticmethod
def event(event_type, *args, **kwargs):
if event_type in girls_data.girl_events:
if girls_data.girl_events[event_type] is not None:
call(girls_data.girl_events[event_type], *args, **kwargs)
else:
raise Exception("Unknown event: %s" % event_type)
return
def next_year(self):
"""
Все действия с девушками за год.
"""
# плененные девушки
for girl_i in reversed(xrange(self.prisoners_count)):
self.game.girl = self.prisoners[girl_i]
# попытка побега
if (random.randint(1, 2) == 1) and self.game.lair.reachable([]) and \
'regular_guards' not in self.game.lair.upgrades and \
'elite_guards' not in self.game.lair.upgrades and \
'smuggler_guards' not in self.game.lair.upgrades:
# Девушка сбежала из камеры
del self.prisoners[girl_i]
self.event('escape') # событие "побег из заключения"
if self.game.girl.pregnant:
self.free_list.append(self.game.girl)
else:
# девушка не убежала
if ('servant' in self.game.lair.upgrades) or ('gremlin_servant' in self.game.lair.upgrades):
if self.game.girl.pregnant:
girl_type = girls_data.girls_info[self.game.girl.type]
if self.game.girl.pregnant == 1:
spawn_class = 'regular_spawn'
else:
spawn_class = 'advanced_spawn'
if 'educated_spawn' not in self.offspring:
self.offspring.append('educated_spawn')
if girl_type['giantess']:
girl_size = 'giantess'
else:
girl_size = 'common_size'
if girl_size not in self.offspring:
self.offspring.append(girl_size)
self.spawn.append(girl_type[spawn_class])
self.event('spawn', girl_type[spawn_class]) # событие "рождение отродий"
self.game.girl.pregnant = 0
else:
self.event('hunger_death') # событие "смерть девушки от голода"
del self.prisoners[girl_i]
# свободные, в том числе только что сбежавшие. Отслеживаются только беременные
for girl_i in xrange(len(self.free_list)):
self.game.girl = self.free_list[girl_i]
if (random.randint(1, 3) == 1) and not girls_data.girls_info[self.game.girl.type]['giantess']:
self.event('kill') # событие "беременную девушку убивают на свободе"
else:
girl_type = girls_data.girls_info[self.game.girl.type]
if self.game.girl.pregnant == 1:
spawn_class = 'regular_spawn'
else:
spawn_class = 'advanced_spawn'
if 'free_spawn' not in self.offspring:
self.offspring.append('free_spawn')
if girl_type['giantess']:
girl_size = 'giantess'
else:
girl_size = 'common_size'
if girl_size not in self.offspring:
self.offspring.append(girl_size)
spawn_type = girls_data.girls_info[self.game.girl.type][spawn_class]
spawn = girls_data.spawn_info[spawn_type]
self.event('free_spawn', spawn_type) # событие "рождение отродий на воле"
self.free_spawn(spawn['power'])
self.free_list = [] # очистка списка - либо родила, либо убили - отслеживать дальше не имеет смысла
def before_sleep(self):
"""
Все действия до начала сна - смерть с тоски, может быть что-то еще?
"""
for girl_i in reversed(xrange(self.prisoners_count)):
self.game.girl = self.prisoners[girl_i]
if (not self.game.girl.virgin) and (not self.game.girl.pregnant):
self.description('anguish', True) # умирает c тоски
del self.prisoners[girl_i]
# noinspection PyTypeChecker
def after_awakening(self):
"""
Все действия после пробуждения - разбираемся с воспитанными отродьями.
"""
for spawn_i in xrange(len(self.spawn)):
spawn_type = self.spawn[spawn_i] # упрощение обращения к типу отродий
spawn = girls_data.spawn_info[spawn_type] # упрощение обращения к данным отродий
renpy.show("meow", what=store.Image("img/scene/spawn/%s.jpg" % spawn_type))
spawn_mod = spawn['modifier'] # упрощение обращения к списку модификаторов отродий
# Делаем проверку. Истина, если не морское отродье или морское в подводном логове
# TODO: Возможно стоит сделать умирание слуги, если оно не морское и в морском логове.
marine_check = ('marine' not in spawn_mod) or \
(self.game.lair.type.require and 'swimming' in self.game.lair.type.require)
spawn_menu = [(u"К Вам приходит %s и просит назначения" % spawn['name'], None)] # меню отродий
# Возможные пункты меню
if ('poisonous' in spawn_mod) and ('poison_guards' not in self.game.lair.upgrades) and marine_check:
spawn_menu.append((u"Выпустить в логово", u'poison_guards'))
if ('servant' in spawn_mod) and ('servant' not in self.game.lair.upgrades) and marine_check:
spawn_menu.append((u"Сделать слугой", 'servant'))
if ('warrior' in spawn_mod) and ('regular_guards' not in self.game.lair.upgrades) and marine_check:
spawn_menu.append((u"Сделать охранником", 'regular_guards'))
if ('elite' in spawn_mod) and ('elite_guards' not in self.game.lair.upgrades) and marine_check:
spawn_menu.append((u"Сделать элитным охранником", 'elite_guards'))
spawn_menu.append((u"Выпустить в королевство", 'free'))
if (('servant' in spawn_mod) or
('warrior' in spawn_mod) or
('elite' in spawn_mod)) and \
('marine' not in spawn_mod):
spawn_menu.append((u"Отправить в армию тьмы", 'army_of_darkness'))
menu_action = renpy.display_menu(spawn_menu)
if menu_action == 'free':
renpy.say(self.game.narrator, u"%s отправляется бесчинствовать в королевстве." % spawn['name'])
self.free_spawn(spawn['power'])
elif menu_action == 'army_of_darkness':
renpy.say(self.game.narrator, u"%s отправляется в армию тьмы." % spawn['name'])
self.army_of_darkness(spawn_type)
else:
# выдача сообщения о начале работы
renpy.say(self.game.narrator, u"%s приступает к выполнению обязанностей." % spawn['name'])
# выдача сообщения о конце работы, если это необходимо
if 'replaces' in data.lair_upgrades[menu_action].keys():
replace = data.lair_upgrades[menu_action]['replaces']
renpy.say(self.game.narrator,
u"%s больше не требуются и уходят." % data.lair_upgrades[replace]['name'])
# добавление в улучшение логова
self.game.lair.add_upgrade(menu_action)
renpy.hide("meow")
self.spawn = []
def free_spawn(self, power):
"""
Действия отродий на свободе
"""
# Растёт разруха. Надо проверить чтобы это срабатывало по одному разу на тип отродий.
self.game.poverty.value += 1
pass
def army_of_darkness(self, warrior_type):
"""
Отправка в армию тьмы
"""
self.game.army.add_warrior(warrior_type)
@property
def is_mating_possible(self):
"""
Возвращает возможность совокупления - истину или ложь.
# TODO: проверка на превращение в человека
"""
assert self.game.girl, "Girl not found"
mating_possible = self.game.girl.virgin and self.game.dragon.lust > 0
if girls_data.girls_info[self.game.girl.type]['giantess']:
mating_possible = self.game.dragon.size > 3 and mating_possible
return mating_possible | bsd-3-clause | 8,058,773,759,381,603,000 | 42.362573 | 112 | 0.570504 | false |
Reddine/dzlibs | tweeza/users/views.py | 9 | 2145 | from flask import (Blueprint, render_template, flash, request, redirect,
url_for)
from flask.ext.login import login_required, current_user
from users.models import User
from items.models import Item
from users.forms import EditProfileForm
from flask.ext.babel import gettext as _
users = Blueprint('users', __name__, url_prefix='/profile')
@users.route('/')
@login_required
def index():
items_count = Item.objects(submitter=current_user.id).count()
return render_template('users/user_profile.html', user=current_user,
items_count=items_count)
@users.route('/edit/', methods=['GET', 'POST'])
@login_required
def edit():
form = EditProfileForm()
if request.method == 'POST':
if form.validate_on_submit():
user = User.objects.get(id=current_user.id)
user.name = form.name.data.strip()
user.email = form.email.data.strip()
user.website = form.website.data.strip()
user.twitter_username = form.twitter.data.strip('@')
facebook = form.facebook.data.strip().strip('/').split('/')[-1]
user.facebook_username = facebook
user.location = form.location.data.strip()
user.hireable = bool(form.hireable.data)
user.bio = form.bio.data.strip()
user.save()
flash(_('Profile updated successfully'), category='success')
return redirect(url_for('users.index'))
else:
flash(_('Error happened, see below'), category='alert')
return render_template('users/edit_profile.html', form=form)
else:
form.hireable.default = int(bool(current_user.hireable))
form.bio.default = current_user.bio or ''
form.process()
return render_template('users/edit_profile.html', form=form)
@users.route('/user/<int:id>')
def user_profile(id):
user = User.objects.get_or_404(user_id=id)
items_count = Item.objects(submitter=user).count()
return render_template('users/user_profile.html',
user=user,
items_count=items_count)
| mpl-2.0 | 2,603,199,521,902,704,600 | 35.355932 | 75 | 0.618182 | false |
hujiajie/pa-chromium | tools/git/for-all-touched-files.py | 130 | 3879 | #!/usr/bin/env python
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Invokes the specified (quoted) command for all files modified
between the current git branch and the specified branch or commit.
The special token [[FILENAME]] (or whatever you choose using the -t
flag) is replaced with each of the filenames of new or modified files.
Deleted files are not included. Neither are untracked files.
Synopsis:
%prog [-b BRANCH] [-d] [-x EXTENSIONS|-c] [-t TOKEN] QUOTED_COMMAND
Examples:
%prog -x gyp,gypi "tools/format_xml.py [[FILENAME]]"
%prog -c "tools/sort-headers.py [[FILENAME]]"
%prog -t "~~BINGO~~" "echo I modified ~~BINGO~~"
"""
import optparse
import os
import subprocess
import sys
# List of C++-like source file extensions.
_CPP_EXTENSIONS = ('h', 'hh', 'hpp', 'c', 'cc', 'cpp', 'cxx', 'mm',)
def GitShell(args, ignore_return=False):
"""A shell invocation suitable for communicating with git. Returns
output as list of lines, raises exception on error.
"""
job = subprocess.Popen(args,
shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
(out, err) = job.communicate()
if job.returncode != 0 and not ignore_return:
print out
raise Exception("Error %d running command %s" % (
job.returncode, args))
return out.split('\n')
def FilenamesFromGit(branch_name, extensions):
"""Provides a list of all new and modified files listed by [git diff
branch_name] where branch_name can be blank to get a diff of the
workspace.
Excludes deleted files.
If extensions is not an empty list, include only files with one of
the extensions on the list.
"""
lines = GitShell('git diff --stat=600,500 %s' % branch_name)
filenames = []
for line in lines:
line = line.lstrip()
# Avoid summary line, and files that have been deleted (no plus).
if line.find('|') != -1 and line.find('+') != -1:
filename = line.split()[0]
if filename:
filename = filename.rstrip()
ext = filename.rsplit('.')[-1]
if not extensions or ext in extensions:
filenames.append(filename)
return filenames
def ForAllTouchedFiles(branch_name, extensions, token, command):
"""For each new or modified file output by [git diff branch_name],
run command with token replaced with the filename. If extensions is
not empty, do this only for files with one of the extensions in that
list.
"""
filenames = FilenamesFromGit(branch_name, extensions)
for filename in filenames:
os.system(command.replace(token, filename))
def main():
parser = optparse.OptionParser(usage=__doc__)
parser.add_option('-x', '--extensions', default='', dest='extensions',
help='Limits to files with given extensions '
'(comma-separated).')
parser.add_option('-c', '--cpp', default=False, action='store_true',
dest='cpp_only',
help='Runs your command only on C++-like source files.')
parser.add_option('-t', '--token', default='[[FILENAME]]', dest='token',
help='Sets the token to be replaced for each file '
'in your command (default [[FILENAME]]).')
parser.add_option('-b', '--branch', default='origin/master', dest='branch',
help='Sets what to diff to (default origin/master). Set '
'to empty to diff workspace against HEAD.')
opts, args = parser.parse_args()
if not args:
parser.print_help()
sys.exit(1)
extensions = opts.extensions
if opts.cpp_only:
extensions = _CPP_EXTENSIONS
ForAllTouchedFiles(opts.branch, extensions, opts.token, args[0])
if __name__ == '__main__':
main()
| bsd-3-clause | 8,868,940,145,502,394,000 | 33.026316 | 77 | 0.644754 | false |
asampat3090/readthedocs.org | readthedocs/restapi/urls.py | 4 | 1407 | from django.conf.urls import url, patterns, include
from rest_framework import routers
from .views.model_views import BuildViewSet, ProjectViewSet, NotificationViewSet, VersionViewSet
from readthedocs.comments.views import CommentViewSet
router = routers.DefaultRouter()
router.register(r'build', BuildViewSet)
router.register(r'version', VersionViewSet)
router.register(r'project', ProjectViewSet)
router.register(r'notification', NotificationViewSet)
router.register(r'comments', CommentViewSet, base_name="comments")
urlpatterns = patterns(
'',
url(r'^', include(router.urls)),
url(r'embed/', 'readthedocs.restapi.views.core_views.embed', name='embed'),
url(r'docurl/', 'readthedocs.restapi.views.core_views.docurl', name='docurl'),
url(r'cname/', 'readthedocs.restapi.views.core_views.cname', name='cname'),
url(r'footer_html/', 'readthedocs.restapi.views.footer_views.footer_html', name='footer_html'),
url(r'index_search/',
'readthedocs.restapi.views.search_views.index_search',
name='index_search'),
url(r'search/$', 'readthedocs.restapi.views.search_views.search', name='api_search'),
url(r'search/project/$',
'readthedocs.restapi.views.search_views.project_search',
name='api_project_search'),
url(r'search/section/$',
'readthedocs.restapi.views.search_views.section_search',
name='api_section_search'),
)
| mit | 6,997,323,850,555,082,000 | 42.96875 | 99 | 0.724236 | false |
kustodian/ansible | test/units/module_utils/network/ftd/test_fdm_swagger_parser.py | 37 | 17322 | # Copyright (c) 2018 Cisco and/or its affiliates.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
import copy
import os
import unittest
from ansible.module_utils.network.ftd.common import HTTPMethod
from ansible.module_utils.network.ftd.fdm_swagger_client import FdmSwaggerParser
DIR_PATH = os.path.dirname(os.path.realpath(__file__))
TEST_DATA_FOLDER = os.path.join(DIR_PATH, 'test_data')
base = {
'basePath': "/api/fdm/v2",
'definitions': {"NetworkObject": {"type": "object",
"properties": {"version": {"type": "string"}, "name": {"type": "string"},
"description": {"type": "string"},
"subType": {"type": "object",
"$ref": "#/definitions/NetworkObjectType"},
"value": {"type": "string"},
"isSystemDefined": {"type": "boolean"},
"dnsResolution": {"type": "object",
"$ref": "#/definitions/FQDNDNSResolution"},
"id": {"type": "string"},
"type": {"type": "string", "default": "networkobject"}},
"required": ["subType", "type", "value", "name"]},
"NetworkObjectWrapper": {
"allOf": [{"$ref": "#/definitions/NetworkObject"}, {"$ref": "#/definitions/LinksWrapper"}]}
},
'paths': {
"/object/networks": {
"get": {"tags": ["NetworkObject"],
"operationId": "getNetworkObjectList",
"responses": {
"200": {
"description": "",
"schema": {"type": "object",
"title": "NetworkObjectList",
"properties": {
"items": {
"type": "array",
"items": {"$ref": "#/definitions/NetworkObjectWrapper"}},
"paging": {
"$ref": "#/definitions/Paging"}},
"required": ["items", "paging"]}}},
"parameters": [
{"name": "offset", "in": "query", "required": False, "type": "integer"},
{"name": "limit", "in": "query", "required": False, "type": "integer"},
{"name": "sort", "in": "query", "required": False, "type": "string"},
{"name": "filter", "in": "query", "required": False, "type": "string"}]},
"post": {"tags": ["NetworkObject"], "operationId": "addNetworkObject",
"responses": {
"200": {"description": "",
"schema": {"type": "object",
"$ref": "#/definitions/NetworkObjectWrapper"}},
"422": {"description": "",
"schema": {"type": "object", "$ref": "#/definitions/ErrorWrapper"}}},
"parameters": [{"in": "body", "name": "body",
"required": True,
"schema": {"$ref": "#/definitions/NetworkObject"}}]}
},
"/object/networks/{objId}": {
"get": {"tags": ["NetworkObject"], "operationId": "getNetworkObject",
"responses": {"200": {"description": "",
"schema": {"type": "object",
"$ref": "#/definitions/NetworkObjectWrapper"}},
"404": {"description": "",
"schema": {"type": "object",
"$ref": "#/definitions/ErrorWrapper"}}},
"parameters": [{"name": "objId", "in": "path", "required": True,
"type": "string"}]},
"put": {"tags": ["NetworkObject"], "operationId": "editNetworkObject",
"responses": {"200": {"description": "",
"schema": {"type": "object",
"$ref": "#/definitions/NetworkObjectWrapper"}},
"422": {"description": "",
"schema": {"type": "object",
"$ref": "#/definitions/ErrorWrapper"}}},
"parameters": [{"name": "objId", "in": "path", "required": True,
"type": "string"},
{"in": "body", "name": "body", "required": True,
"schema": {"$ref": "#/definitions/NetworkObject"}}]},
"delete": {"tags": ["NetworkObject"], "operationId": "deleteNetworkObject",
"responses": {"204": {"description": ""},
"422": {"description": "",
"schema": {"type": "object",
"$ref": "#/definitions/ErrorWrapper"}}},
"parameters": [{"name": "objId", "in": "path", "required": True,
"type": "string"}]}}}
}
def _get_objects(base_object, key_names):
return dict((_key, base_object[_key]) for _key in key_names)
class TestFdmSwaggerParser(unittest.TestCase):
def test_simple_object(self):
self._data = copy.deepcopy(base)
self.fdm_data = FdmSwaggerParser().parse_spec(self._data)
expected_operations = {
'getNetworkObjectList': {
'method': HTTPMethod.GET,
'url': '/api/fdm/v2/object/networks',
'modelName': 'NetworkObject',
'parameters': {
'path': {},
'query': {
'offset': {
'required': False,
'type': 'integer'
},
'limit': {
'required': False,
'type': 'integer'
},
'sort': {
'required': False,
'type': 'string'
},
'filter': {
'required': False,
'type': 'string'
}
}
},
'returnMultipleItems': True,
"tags": ["NetworkObject"]
},
'addNetworkObject': {
'method': HTTPMethod.POST,
'url': '/api/fdm/v2/object/networks',
'modelName': 'NetworkObject',
'parameters': {'path': {},
'query': {}},
'returnMultipleItems': False,
"tags": ["NetworkObject"]
},
'getNetworkObject': {
'method': HTTPMethod.GET,
'url': '/api/fdm/v2/object/networks/{objId}',
'modelName': 'NetworkObject',
'parameters': {
'path': {
'objId': {
'required': True,
'type': "string"
}
},
'query': {}
},
'returnMultipleItems': False,
"tags": ["NetworkObject"]
},
'editNetworkObject': {
'method': HTTPMethod.PUT,
'url': '/api/fdm/v2/object/networks/{objId}',
'modelName': 'NetworkObject',
'parameters': {
'path': {
'objId': {
'required': True,
'type': "string"
}
},
'query': {}
},
'returnMultipleItems': False,
"tags": ["NetworkObject"]
},
'deleteNetworkObject': {
'method': HTTPMethod.DELETE,
'url': '/api/fdm/v2/object/networks/{objId}',
'modelName': 'NetworkObject',
'parameters': {
'path': {
'objId': {
'required': True,
'type': "string"
}
},
'query': {}
},
'returnMultipleItems': False,
"tags": ["NetworkObject"]
}
}
assert sorted(['NetworkObject', 'NetworkObjectWrapper']) == sorted(self.fdm_data['models'].keys())
assert expected_operations == self.fdm_data['operations']
assert {'NetworkObject': expected_operations} == self.fdm_data['model_operations']
def test_simple_object_with_documentation(self):
api_spec = copy.deepcopy(base)
docs = {
'definitions': {
'NetworkObject': {
'description': 'Description for Network Object',
'properties': {'name': 'Description for name field'}
}
},
'paths': {
'/object/networks': {
'get': {
'description': 'Description for getNetworkObjectList operation',
'parameters': [{'name': 'offset', 'description': 'Description for offset field'}]
},
'post': {'description': 'Description for addNetworkObject operation'}
}
}
}
self.fdm_data = FdmSwaggerParser().parse_spec(api_spec, docs)
assert 'Description for Network Object' == self.fdm_data['models']['NetworkObject']['description']
assert '' == self.fdm_data['models']['NetworkObjectWrapper']['description']
network_properties = self.fdm_data['models']['NetworkObject']['properties']
assert '' == network_properties['id']['description']
assert not network_properties['id']['required']
assert 'Description for name field' == network_properties['name']['description']
assert network_properties['name']['required']
ops = self.fdm_data['operations']
assert 'Description for getNetworkObjectList operation' == ops['getNetworkObjectList']['description']
assert 'Description for addNetworkObject operation' == ops['addNetworkObject']['description']
assert '' == ops['deleteNetworkObject']['description']
get_op_params = ops['getNetworkObjectList']['parameters']
assert 'Description for offset field' == get_op_params['query']['offset']['description']
assert '' == get_op_params['query']['limit']['description']
def test_model_operations_should_contain_all_operations(self):
data = {
'basePath': '/v2/',
'definitions': {
'Model1': {"type": "object"},
'Model2': {"type": "object"},
'Model3': {"type": "object"}
},
'paths': {
'path1': {
'get': {
'operationId': 'getSomeModelList',
"responses": {
"200": {"description": "",
"schema": {"type": "object",
"title": "NetworkObjectList",
"properties": {
"items": {
"type": "array",
"items": {
"$ref": "#/definitions/Model1"
}
}
}}
}
}
},
"post": {
"operationId": "addSomeModel",
"parameters": [{"in": "body",
"name": "body",
"schema": {"$ref": "#/definitions/Model2"}
}]}
},
'path2/{id}': {
"get": {"operationId": "getSomeModel",
"responses": {"200": {"description": "",
"schema": {"type": "object",
"$ref": "#/definitions/Model3"}},
}
},
"put": {"operationId": "editSomeModel",
"parameters": [{"in": "body",
"name": "body",
"schema": {"$ref": "#/definitions/Model1"}}
]},
"delete": {
"operationId": "deleteModel3",
}},
'path3': {
"delete": {
"operationId": "deleteNoneModel",
}
}
}
}
expected_operations = {
'getSomeModelList': {
'method': HTTPMethod.GET,
'url': '/v2/path1',
'modelName': 'Model1',
'returnMultipleItems': True,
'tags': []
},
'addSomeModel': {
'method': HTTPMethod.POST,
'url': '/v2/path1',
'modelName': 'Model2',
'parameters': {
'path': {},
'query': {}
},
'returnMultipleItems': False,
'tags': []
},
'getSomeModel': {
'method': HTTPMethod.GET,
'url': '/v2/path2/{id}',
'modelName': 'Model3',
'returnMultipleItems': False,
'tags': []
},
'editSomeModel': {
'method': HTTPMethod.PUT,
'url': '/v2/path2/{id}',
'modelName': 'Model1',
'parameters': {
'path': {},
'query': {}
},
'returnMultipleItems': False,
'tags': []
},
'deleteModel3': {
'method': HTTPMethod.DELETE,
'url': '/v2/path2/{id}',
'modelName': 'Model3',
'returnMultipleItems': False,
'tags': []
},
'deleteNoneModel': {
'method': HTTPMethod.DELETE,
'url': '/v2/path3',
'modelName': None,
'returnMultipleItems': False,
'tags': []
}
}
fdm_data = FdmSwaggerParser().parse_spec(data)
assert sorted(['Model1', 'Model2', 'Model3']) == sorted(fdm_data['models'].keys())
assert expected_operations == fdm_data['operations']
assert {
'Model1': {
'getSomeModelList': expected_operations['getSomeModelList'],
'editSomeModel': expected_operations['editSomeModel'],
},
'Model2': {
'addSomeModel': expected_operations['addSomeModel']
},
'Model3': {
'getSomeModel': expected_operations['getSomeModel'],
'deleteModel3': expected_operations['deleteModel3']
},
None: {
'deleteNoneModel': expected_operations['deleteNoneModel']
}
} == fdm_data['model_operations']
| gpl-3.0 | -4,849,606,375,866,482,000 | 44.704485 | 115 | 0.381192 | false |
ebukoz/thrive | erpnext/maintenance/doctype/maintenance_visit/maintenance_visit.py | 11 | 3120 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
from erpnext.utilities.transaction_base import TransactionBase
class MaintenanceVisit(TransactionBase):
def get_feed(self):
return _("To {0}").format(self.customer_name)
def validate_serial_no(self):
for d in self.get('purposes'):
if d.serial_no and not frappe.db.exists("Serial No", d.serial_no):
frappe.throw(_("Serial No {0} does not exist").format(d.serial_no))
def validate(self):
self.validate_serial_no()
def update_customer_issue(self, flag):
for d in self.get('purposes'):
if d.prevdoc_docname and d.prevdoc_doctype == 'Warranty Claim' :
if flag==1:
mntc_date = self.mntc_date
service_person = d.service_person
work_done = d.work_done
status = "Open"
if self.completion_status == 'Fully Completed':
status = 'Closed'
elif self.completion_status == 'Partially Completed':
status = 'Work In Progress'
else:
nm = frappe.db.sql("select t1.name, t1.mntc_date, t2.service_person, t2.work_done from `tabMaintenance Visit` t1, `tabMaintenance Visit Purpose` t2 where t2.parent = t1.name and t1.completion_status = 'Partially Completed' and t2.prevdoc_docname = %s and t1.name!=%s and t1.docstatus = 1 order by t1.name desc limit 1", (d.prevdoc_docname, self.name))
if nm:
status = 'Work In Progress'
mntc_date = nm and nm[0][1] or ''
service_person = nm and nm[0][2] or ''
work_done = nm and nm[0][3] or ''
else:
status = 'Open'
mntc_date = None
service_person = None
work_done = None
wc_doc = frappe.get_doc('Warranty Claim', d.prevdoc_docname)
wc_doc.update({
'resolution_date': mntc_date,
'resolved_by': service_person,
'resolution_details': work_done,
'status': status
})
wc_doc.db_update()
def check_if_last_visit(self):
"""check if last maintenance visit against same sales order/ Warranty Claim"""
check_for_docname = None
for d in self.get('purposes'):
if d.prevdoc_docname:
check_for_docname = d.prevdoc_docname
#check_for_doctype = d.prevdoc_doctype
if check_for_docname:
check = frappe.db.sql("select t1.name from `tabMaintenance Visit` t1, `tabMaintenance Visit Purpose` t2 where t2.parent = t1.name and t1.name!=%s and t2.prevdoc_docname=%s and t1.docstatus = 1 and (t1.mntc_date > %s or (t1.mntc_date = %s and t1.mntc_time > %s))", (self.name, check_for_docname, self.mntc_date, self.mntc_date, self.mntc_time))
if check:
check_lst = [x[0] for x in check]
check_lst =','.join(check_lst)
frappe.throw(_("Cancel Material Visits {0} before cancelling this Maintenance Visit").format(check_lst))
raise Exception
else:
self.update_customer_issue(0)
def on_submit(self):
self.update_customer_issue(1)
frappe.db.set(self, 'status', 'Submitted')
def on_cancel(self):
self.check_if_last_visit()
frappe.db.set(self, 'status', 'Cancelled')
def on_update(self):
pass
| gpl-3.0 | -2,751,693,802,528,694,300 | 35.27907 | 356 | 0.674359 | false |
joker946/nova | nova/tests/functional/v3/test_multiple_create.py | 30 | 2240 | # Copyright 2012 Nebula, Inc.
# Copyright 2014 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from nova.tests.functional.v3 import test_servers
from nova.tests.unit.image import fake
CONF = cfg.CONF
CONF.import_opt('osapi_compute_extension',
'nova.api.openstack.compute.extensions')
class MultipleCreateJsonTest(test_servers.ServersSampleBase):
extension_name = "os-multiple-create"
_api_version = 'v2'
def _get_flags(self):
f = super(MultipleCreateJsonTest, self)._get_flags()
f['osapi_compute_extension'] = CONF.osapi_compute_extension[:]
f['osapi_compute_extension'].append(
'nova.api.openstack.compute.contrib.multiple_create.'
'Multiple_create')
return f
def test_multiple_create(self):
subs = {
'image_id': fake.get_valid_image_id(),
'host': self._get_host(),
'min_count': "2",
'max_count': "3"
}
response = self._do_post('servers', 'multiple-create-post-req', subs)
subs.update(self._get_regexes())
self._verify_response('multiple-create-post-resp', subs, response, 202)
def test_multiple_create_without_reservation_id(self):
subs = {
'image_id': fake.get_valid_image_id(),
'host': self._get_host(),
'min_count': "2",
'max_count': "3"
}
response = self._do_post('servers', 'multiple-create-no-resv-post-req',
subs)
subs.update(self._get_regexes())
self._verify_response('multiple-create-no-resv-post-resp', subs,
response, 202)
| apache-2.0 | 5,971,094,824,780,547,000 | 36.333333 | 79 | 0.620536 | false |
owattenmaker/PythonFighter | fighterV02.py | 1 | 14644 | '''
Worst fighting game ever
By: Owen Wattenmaker, Max Lambek
'''
#TODO
############################################################################################
### -add in frames for attacking, looks too choppy, need more pictures ###
### -fix yellow hit marker ###
### -fix framerate issues when loading background ###
### -add knockback ###
### - ###
### -walking animation, possibly this: http://www.pygame.org/project-GIFImage-1039-.html ###
############################################################################################
import pygame, sys, time, random, os
from pygame.locals import *
#from GIFImage import GIFImage
#change this to false to disable the background and vastly improve performance
draw_background = True
def spriteMask(sprite, player, state):
sprite.mask = pygame.mask.from_surface(player[state])
return sprite
def dplayer1():
if os.name == 'nt':
#right stationary
StationaryRight = pygame.transform.scale(pygame.image.load('character\player1\player1_right_stationary.png'), (350, 350))
#left stationary
StationaryLeft = pygame.transform.scale(pygame.image.load('character\player1\player1_left_stationary.png'), (350, 350))
#right punch
PunchRight = pygame.transform.scale(pygame.image.load('character\player1\player1_right_punch.png'), (350, 350))
#left punch
PunchLeft = pygame.transform.scale(pygame.image.load('character\player1\player1_left_punch.png'), (350, 350))
#right kick
KickRight = pygame.transform.scale(pygame.image.load('character\player1\player1_right_kick.png'), (350, 350))
#left kick
KickLeft = pygame.transform.scale(pygame.image.load('character\player1\player1_left_kick.png'), (350, 350))
else:
#right stationary
StationaryRight = pygame.transform.scale(pygame.image.load('character/player1/player1_right_stationary.png'), (350, 350))
#left stationary
StationaryLeft = pygame.transform.scale(pygame.image.load('character/player1/player1_left_stationary.png'), (350, 350))
#right punch
PunchRight = pygame.transform.scale(pygame.image.load('character/player1/player1_right_punch.png'), (350, 350))
#left punch
PunchLeft = pygame.transform.scale(pygame.image.load('character/player1/player1_left_punch.png'), (350, 350))
#right kick
KickRight = pygame.transform.scale(pygame.image.load('character/player1/player1_right_kick.png'), (350, 350))
#left kick
KickLeft = pygame.transform.scale(pygame.image.load('character/player1/player1_left_kick.png'), (350, 350))
player1 = {'right_stationary':StationaryRight, 'left_stationary':StationaryLeft, 'right_punch':PunchRight, 'left_punch':PunchLeft, 'right_kick':KickRight, 'left_kick':KickLeft}
return player1
def dplayer2():
if os.name == 'nt':
#right stationary
StationaryRight = pygame.transform.scale(pygame.image.load('character\player2\player2_right_stationary.png'), (350, 350))
#left stationary
StationaryLeft = pygame.transform.scale(pygame.image.load('character\player2\player2_left_stationary.png'), (350, 350))
#right punch
PunchRight = pygame.transform.scale(pygame.image.load('character\player2\player2_right_punch.png'), (350, 350))
#left punch
PunchLeft = pygame.transform.scale(pygame.image.load('character\player2\player2_left_punch.png'), (350, 350))
#right kick
KickRight = pygame.transform.scale(pygame.image.load('character\player2\player2_right_kick.png'), (350, 350))
#left kick
KickLeft = pygame.transform.scale(pygame.image.load('character\player2\player2_left_kick.png'), (350, 350))
else:
StationaryRight = pygame.transform.scale(pygame.image.load('character/player2/player2_right_stationary.png'), (350, 350))
#left stationary
StationaryLeft = pygame.transform.scale(pygame.image.load('character/player2/player2_left_stationary.png'), (350, 350))
#right punch
PunchRight = pygame.transform.scale(pygame.image.load('character/player2/player2_right_punch.png'), (350, 350))
#left punch
PunchLeft = pygame.transform.scale(pygame.image.load('character/player2/player2_left_punch.png'), (350, 350))
#right kick
KickRight = pygame.transform.scale(pygame.image.load('character/player2/player2_right_kick.png'), (350, 350))
#left kick
KickLeft = pygame.transform.scale(pygame.image.load('character/player2/player2_left_kick.png'), (350, 350))
player2 = {'right_stationary':StationaryRight, 'left_stationary':StationaryLeft, 'right_punch':PunchRight, 'left_punch':PunchLeft, 'right_kick':KickRight, 'left_kick':KickLeft}
return player2
def collision(sprite1, sprite2):
a = pygame.sprite.collide_mask(sprite1, sprite2)
return a
def movement(moveLeft, moveRight, player_left, player_right, MOVESPEED, WINDOWWIDTH):
# move the player
if moveLeft and player_left > -100:
player_right -= MOVESPEED
if moveRight and player_right < WINDOWWIDTH - 300:
player_right += MOVESPEED
return player_left, player_right
def jumping1(player_top, airborn, verticalVelocity):
if airborn:
verticalVelocity += .7
player_top += verticalVelocity
if player_top >= 360:
airborn = False
return player_top, airborn, verticalVelocity
def jumping2(player_top, airborn, verticalVelocity):
if airborn:
verticalVelocity += .7
player_top += verticalVelocity
if player_top >= 360:
airborn = False
return player_top, airborn, verticalVelocity
def score(hpplayer1, hpplayer2, punch1, kick1, punch2, kick2, hit):
if punch1:
hpplayer2 -= random.randint(23, 33)
hit = True
if kick1:
hpplayer2 -= random.randint(38, 45)
hit = True
if punch2:
hpplayer1 -= random.randint(23, 33)
hit = True
if kick2:
hpplayer1 -= random.randint(38, 45)
hit = True
return hpplayer1, hpplayer2, hit
def main():
i = 0
# set up pygame
pygame.init()
font = pygame.font.SysFont("monospace", 72)
mainClock = pygame.time.Clock()
background = pygame.transform.scale(pygame.image.load('background.jpg'), (1300, 1300))
hit_background = pygame.transform.scale(pygame.image.load('flash_back.png'), (1300, 1300))
# set up the window
WINDOWWIDTH = 1280
WINDOWHEIGHT = 760
r=0
windowSurface = pygame.display.set_mode((WINDOWWIDTH, WINDOWHEIGHT), 0, 32)
pygame.display.set_caption('Terrible Fighting Game')
rectplayer1 = pygame.Rect(1, 360, 5, 5)
rectplayer2 = pygame.Rect(600, 360, 5, 5)
splayer1 = pygame.sprite.Sprite()
splayer1.image = pygame.transform.scale(pygame.image.load('character/player1/player1_right_stationary.png'), (350, 350))
splayer1.rect = splayer1.image.get_rect()
splayer1.rect.topleft = [0, 350]
splayer2 = pygame.sprite.Sprite()
splayer2.image = pygame.transform.scale(pygame.image.load('character/player1/player1_right_stationary.png'), (350, 350))
splayer2.rect = splayer2.image.get_rect()
splayer2.rect.topleft = [450, 350]
#hit_effect = pygame.transform.scale(pygame.image.load('hit_effect.png'), (100, 100))
hit = False
collide = False
airborn1 = False
airborn2 = False
moveLeft1 = False
moveLeft2 = False
moveRight1 = False
moveRight2 = False
MOVESPEED = 6
orientation1 = 'right'
orientation2 = 'left'
state1 = 'right_stationary'
state2 = 'left_stationary'
verticalVelocity1 = 0
verticalVelocity2 = 0
hpplayer1 = 500
hpplayer2 = 500
player1 = dplayer1()
player2 = dplayer2()
gameStart = False
while not gameStart:
windowSurface.blit(background,(0,-450))
windowSurface.blit(hit_background, [0,0])
pressenter = font.render('<Press enter to start>', 1, (255, 255, 0))
windowSurface.blit(pressenter, [150, 400])
for event in pygame.event.get():
if event.type == QUIT:
pygame.quit()
sys.exit()
pressed_keys = pygame.key.get_pressed()
if pressed_keys[K_KP_ENTER] or pressed_keys[K_RETURN]:
gameStart = True
pygame.display.update()
while True:
while hpplayer1 > 0 and hpplayer2 > 0:
for event in pygame.event.get():
if event.type == QUIT:
pygame.quit()
sys.exit()
pressed_keys = pygame.key.get_pressed()
#player1
if pressed_keys[K_a]:
moveLeft1 = True
moveRight1 = False
orientation1 = 'left'
state1 = 'left_stationary'
if pressed_keys[K_d]:
moveLeft1 = False
moveRight1 = True
orientation1 = 'right'
state1 = 'right_stationary'
if pressed_keys[K_w]:
if not airborn1:
airborn1 = True
verticalVelocity1 = -20
#player2
if pressed_keys[K_LEFT]:
moveLeft2 = True
moveRight2 = False
orientation2 = 'left'
state2 = 'left_stationary'
if pressed_keys[K_RIGHT]:
moveLeft2 = False
moveRight2 = True
orientation2 = 'right'
state2 = 'right_stationary'
if pressed_keys[K_UP]:
if not airborn2:
airborn2 = True
verticalVelocity2 = -20
#player1
if not pressed_keys[K_a]:
moveLeft1 = False
if not pressed_keys[K_d]:
moveRight1 = False
#player2
if not pressed_keys[K_LEFT]:
moveLeft2 = False
if not pressed_keys[K_RIGHT]:
moveRight2 = False
if event.type == KEYDOWN:
# change the keyboard variables
#player1
if event.key == ord('t'):
kick1 = True
if not airborn1:
moveLeft1 = False
moveRight1 = False
if orientation1 == 'right':
state1 = 'right_kick'
if orientation1 == 'left':
state1 = 'left_kick'
if airborn1:
if orientation1 == 'right':
state1 = 'right_kick'
if orientation1 == 'left':
state1 = 'left_kick'
if event.key == ord('y'):
punch1 = True
if not airborn1:
moveLeft = False
moveRight = False
if orientation1 == 'right':
state1 = 'right_punch'
if orientation1 == 'left':
state1 = 'left_punch'
if airborn1:
if orientation1 == 'right':
state1 = 'right_punch'
if orientation1 == 'left':
state1 = 'left_punch'
#player2
if event.key == ord('.'):
kick2 = True
if not airborn2:
moveLeft2 = False
moveRight2 = False
if orientation2 == 'right':
state2 = 'right_kick'
if orientation2 == 'left':
state2 = 'left_kick'
if airborn2:
if orientation2 == 'right':
state2 = 'right_kick'
if orientation2 == 'left':
state2 = 'left_kick'
if event.key == ord('/'):
punch2 = True
if not airborn2:
moveLeft2 = False
moveRight2 = False
if orientation2 == 'right':
state2 = 'right_punch'
if orientation2 == 'left':
state2 = 'left_punch'
if airborn2:
if orientation2 == 'right':
state2 = 'right_punch'
if orientation2 == 'left':
state2 = 'left_punch'
if event.type == KEYUP:
if event.key == K_ESCAPE:
pygame.quit()
sys.exit()
#player1
if event.key == ord('t') and orientation1 == 'right':
state1 = 'right_stationary'
if event.key == ord('t') and orientation1 == 'left':
state1 = 'left_stationary'
if event.key == ord('y') and orientation1 == 'right':
state1 = 'right_stationary'
if event.key == ord('y') and orientation1 == 'left':
state1 = 'left_stationary'
#player2
if event.key == ord('.') and orientation2 == 'right':
state2 = 'right_stationary'
if event.key == ord('.') and orientation2 == 'left':
state2 = 'left_stationary'
if event.key == ord('/') and orientation2 == 'right':
state2 = 'right_stationary'
if event.key == ord('/') and orientation2 == 'left':
state2 = 'left_stationary'
#sprite.mask = pygame.mask.from_surface(sprite.image)
#moveplayer
rectplayer1.left, rectplayer1.right = movement(moveLeft1, moveRight1, rectplayer1.left, rectplayer1.right, MOVESPEED, WINDOWWIDTH)
rectplayer2.left, rectplayer2.right = movement(moveLeft2, moveRight2, rectplayer2.left, rectplayer2.right, MOVESPEED, WINDOWWIDTH)
#jump player
rectplayer1.top, airborn1, verticalVelocity1 = jumping1(rectplayer1.top, airborn1, verticalVelocity1)
rectplayer2.top, airborn2, verticalVelocity2 = jumping2(rectplayer2.top, airborn2, verticalVelocity2)
if draw_background:
windowSurface.blit(background,(0,-450))
else:
windowSurface.fill((50,50,50))
#assign the image state to the sprite
splayer1.image = player1[state1]
splayer2.image = player2[state2]
#do the mask, do the monster mask, it was a 2 player smash
splayer1.mask = pygame.mask.from_surface(splayer1.image)
splayer2.mask = pygame.mask.from_surface(splayer2.image)
#assign the player rectangle to the sprite
splayer1.rect.topleft = [rectplayer1.left, rectplayer1.top]
splayer2.rect.topleft = [rectplayer2.left, rectplayer2.top]
hitcoordinates = collision(splayer1, splayer2)
#hitcoordinates = pygame.sprite.collide_mask(splayer1, splayer2)
if hitcoordinates != None:
hpplayer1, hpplayer2, hit = score(hpplayer1, hpplayer2, punch1, kick1, punch2, kick2, hit)
if hit:
windowSurface.blit(hit_background, [0,0])
pygame.draw.rect(windowSurface, (216,0,0), (620,30,-500,30), 0)
pygame.draw.rect(windowSurface, (216,0,0), (660,30, 500,30), 0)
if hpplayer1 > 0:
pygame.draw.rect(windowSurface, (19,193,0), (620,30,-hpplayer1,30), 0)
if hpplayer2 > 0:
pygame.draw.rect(windowSurface, (19,193,0), (660,30, hpplayer2,30), 0)
#draw players
windowSurface.blit(splayer1.image, splayer1.rect)
windowSurface.blit(splayer2.image, splayer2.rect)
#if hit:
# windowSurface.blit(hit_effect, [hitcoordinates[0] - 40 , hitcoordinates[1] + 324])
#draw the window onto the screen
pygame.display.update()
#pause for dramatic effect
if hit:
pygame.time.delay(350)
hit = False
mainClock.tick(60)
punch1 = False
punch2 = False
kick1 = False
kick2 = False
if hpplayer1 > 0:
print 'Player 1 wins!'
return 0
if hpplayer2 > 0:
print 'Player 2 wins!'
return 0
else:
print "rip both players"
return 0
main()
| mit | -6,023,337,996,907,088,000 | 31.741935 | 177 | 0.637531 | false |
FluidityProject/fluidity | python/elementtree/SimpleXMLWriter.py | 103 | 8616 | #
# SimpleXMLWriter
# $Id: SimpleXMLWriter.py 2312 2005-03-02 18:13:39Z fredrik $
#
# a simple XML writer
#
# history:
# 2001-12-28 fl created
# 2002-11-25 fl fixed attribute encoding
# 2002-12-02 fl minor fixes for 1.5.2
# 2004-06-17 fl added pythondoc markup
# 2004-07-23 fl added flush method (from Jay Graves)
# 2004-10-03 fl added declaration method
#
# Copyright (c) 2001-2004 by Fredrik Lundh
#
# [email protected]
# http://www.pythonware.com
#
# --------------------------------------------------------------------
# The SimpleXMLWriter module is
#
# Copyright (c) 2001-2004 by Fredrik Lundh
#
# By obtaining, using, and/or copying this software and/or its
# associated documentation, you agree that you have read, understood,
# and will comply with the following terms and conditions:
#
# Permission to use, copy, modify, and distribute this software and
# its associated documentation for any purpose and without fee is
# hereby granted, provided that the above copyright notice appears in
# all copies, and that both that copyright notice and this permission
# notice appear in supporting documentation, and that the name of
# Secret Labs AB or the author not be used in advertising or publicity
# pertaining to distribution of the software without specific, written
# prior permission.
#
# SECRET LABS AB AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD
# TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANT-
# ABILITY AND FITNESS. IN NO EVENT SHALL SECRET LABS AB OR THE AUTHOR
# BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY
# DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
# OF THIS SOFTWARE.
# --------------------------------------------------------------------
##
# Tools to write XML files, without having to deal with encoding
# issues, well-formedness, etc.
# <p>
# The current version does not provide built-in support for
# namespaces. To create files using namespaces, you have to provide
# "xmlns" attributes and explicitly add prefixes to tags and
# attributes.
#
# <h3>Patterns</h3>
#
# The following example generates a small XHTML document.
# <pre>
#
# from elementtree.SimpleXMLWriter import XMLWriter
# import sys
#
# w = XMLWriter(sys.stdout)
#
# html = w.start("html")
#
# w.start("head")
# w.element("title", "my document")
# w.element("meta", name="generator", value="my application 1.0")
# w.end()
#
# w.start("body")
# w.element("h1", "this is a heading")
# w.element("p", "this is a paragraph")
#
# w.start("p")
# w.data("this is ")
# w.element("b", "bold")
# w.data(" and ")
# w.element("i", "italic")
# w.data(".")
# w.end("p")
#
# w.close(html)
# </pre>
##
import re, sys, string
try:
unicode("")
except NameError:
def encode(s, encoding):
# 1.5.2: application must use the right encoding
return s
_escape = re.compile(r"[&<>\"\x80-\xff]+") # 1.5.2
else:
def encode(s, encoding):
return s.encode(encoding)
_escape = re.compile(eval(r'u"[&<>\"\u0080-\uffff]+"'))
def encode_entity(text, pattern=_escape):
# map reserved and non-ascii characters to numerical entities
def escape_entities(m):
out = []
for char in m.group():
out.append("&#%d;" % ord(char))
return string.join(out, "")
return encode(pattern.sub(escape_entities, text), "ascii")
del _escape
#
# the following functions assume an ascii-compatible encoding
# (or "utf-16")
def escape_cdata(s, encoding=None, replace=string.replace):
s = replace(s, "&", "&")
s = replace(s, "<", "<")
s = replace(s, ">", ">")
if encoding:
try:
return encode(s, encoding)
except UnicodeError:
return encode_entity(s)
return s
def escape_attrib(s, encoding=None, replace=string.replace):
s = replace(s, "&", "&")
s = replace(s, "'", "'")
s = replace(s, "\"", """)
s = replace(s, "<", "<")
s = replace(s, ">", ">")
if encoding:
try:
return encode(s, encoding)
except UnicodeError:
return encode_entity(s)
return s
##
# XML writer class.
#
# @param file A file or file-like object. This object must implement
# a <b>write</b> method that takes an 8-bit string.
# @param encoding Optional encoding.
class XMLWriter:
def __init__(self, file, encoding="us-ascii"):
if not hasattr(file, "write"):
file = open(file, "w")
self.__write = file.write
if hasattr(file, "flush"):
self.flush = file.flush
self.__open = 0 # true if start tag is open
self.__tags = []
self.__data = []
self.__encoding = encoding
def __flush(self):
# flush internal buffers
if self.__open:
self.__write(">")
self.__open = 0
if self.__data:
data = string.join(self.__data, "")
self.__write(escape_cdata(data, self.__encoding))
self.__data = []
##
# Writes an XML declaration.
def declaration(self):
encoding = self.__encoding
if encoding == "us-ascii" or encoding == "utf-8":
self.__write("<?xml version='1.0'?>\n")
else:
self.__write("<?xml version='1.0' encoding='%s'?>\n" % encoding)
##
# Opens a new element. Attributes can be given as keyword
# arguments, or as a string/string dictionary. You can pass in
# 8-bit strings or Unicode strings; the former are assumed to use
# the encoding passed to the constructor. The method returns an
# opaque identifier that can be passed to the <b>close</b> method,
# to close all open elements up to and including this one.
#
# @param tag Element tag.
# @param attrib Attribute dictionary. Alternatively, attributes
# can be given as keyword arguments.
# @return An element identifier.
def start(self, tag, attrib={}, **extra):
self.__flush()
tag = escape_cdata(tag, self.__encoding)
self.__data = []
self.__tags.append(tag)
self.__write("<%s" % tag)
if attrib or extra:
attrib = attrib.copy()
attrib.update(extra)
attrib = attrib.items()
attrib.sort()
for k, v in attrib:
k = escape_cdata(k, self.__encoding)
v = escape_attrib(v, self.__encoding)
self.__write(" %s=\"%s\"" % (k, v))
self.__open = 1
return len(self.__tags)-1
##
# Adds a comment to the output stream.
#
# @param comment Comment text, as an 8-bit string or Unicode string.
def comment(self, comment):
self.__flush()
self.__write("<!-- %s -->\n" % escape_cdata(comment, self.__encoding))
##
# Adds character data to the output stream.
#
# @param text Character data, as an 8-bit string or Unicode string.
def data(self, text):
self.__data.append(text)
##
# Closes the current element (opened by the most recent call to
# <b>start</b>).
#
# @param tag Element tag. If given, the tag must match the start
# tag. If omitted, the current element is closed.
def end(self, tag=None):
if tag:
assert self.__tags, "unbalanced end(%s)" % tag
assert escape_cdata(tag, self.__encoding) == self.__tags[-1],\
"expected end(%s), got %s" % (self.__tags[-1], tag)
else:
assert self.__tags, "unbalanced end()"
tag = self.__tags.pop()
if self.__data:
self.__flush()
elif self.__open:
self.__open = 0
self.__write(" />")
return
self.__write("</%s>" % tag)
##
# Closes open elements, up to (and including) the element identified
# by the given identifier.
#
# @param id Element identifier, as returned by the <b>start</b> method.
def close(self, id):
while len(self.__tags) > id:
self.end()
##
# Adds an entire element. This is the same as calling <b>start</b>,
# <b>data</b>, and <b>end</b> in sequence. The <b>text</b> argument
# can be omitted.
def element(self, tag, text=None, attrib={}, **extra):
apply(self.start, (tag, attrib), extra)
if text:
self.data(text)
self.end()
##
# Flushes the output stream.
def flush(self):
pass # replaced by the constructor
| lgpl-2.1 | 7,714,591,521,416,884,000 | 29.88172 | 78 | 0.59552 | false |
alivecor/tensorflow | tensorflow/contrib/learn/python/learn/datasets/base_test.py | 136 | 3072 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.learn.python.learn.datasets import base
from tensorflow.python.platform import test
mock = test.mock
_TIMEOUT = IOError(110, "timeout")
class BaseTest(test.TestCase):
"""Test load csv functions."""
def testUrlretrieveRetriesOnIOError(self):
with mock.patch.object(base, "time") as mock_time:
with mock.patch.object(base, "urllib") as mock_urllib:
mock_urllib.request.urlretrieve.side_effect = [
_TIMEOUT, _TIMEOUT, _TIMEOUT, _TIMEOUT, _TIMEOUT, None
]
base.urlretrieve_with_retry("http://dummy.com", "/tmp/dummy")
# Assert full backoff was tried
actual_list = [arg[0][0] for arg in mock_time.sleep.call_args_list]
expected_list = [1, 2, 4, 8, 16]
for actual, expected in zip(actual_list, expected_list):
self.assertLessEqual(abs(actual - expected), 0.25 * expected)
self.assertEquals(len(actual_list), len(expected_list))
def testUrlretrieveRaisesAfterRetriesAreExhausted(self):
with mock.patch.object(base, "time") as mock_time:
with mock.patch.object(base, "urllib") as mock_urllib:
mock_urllib.request.urlretrieve.side_effect = [
_TIMEOUT,
_TIMEOUT,
_TIMEOUT,
_TIMEOUT,
_TIMEOUT,
_TIMEOUT,
]
with self.assertRaises(IOError):
base.urlretrieve_with_retry("http://dummy.com", "/tmp/dummy")
# Assert full backoff was tried
actual_list = [arg[0][0] for arg in mock_time.sleep.call_args_list]
expected_list = [1, 2, 4, 8, 16]
for actual, expected in zip(actual_list, expected_list):
self.assertLessEqual(abs(actual - expected), 0.25 * expected)
self.assertEquals(len(actual_list), len(expected_list))
def testUrlretrieveRaisesOnNonRetriableErrorWithoutRetry(self):
with mock.patch.object(base, "time") as mock_time:
with mock.patch.object(base, "urllib") as mock_urllib:
mock_urllib.request.urlretrieve.side_effect = [
IOError(2, "No such file or directory"),
]
with self.assertRaises(IOError):
base.urlretrieve_with_retry("http://dummy.com", "/tmp/dummy")
# Assert no retries
self.assertFalse(mock_time.called)
if __name__ == "__main__":
test.main()
| apache-2.0 | 9,082,588,630,421,991,000 | 36.925926 | 80 | 0.660807 | false |
MER-GROUP/intellij-community | python/helpers/py3only/docutils/languages/de.py | 200 | 1722 | # $Id: de.py 4564 2006-05-21 20:44:42Z wiemann $
# Author: Gunnar Schwant <[email protected]>
# Copyright: This module has been placed in the public domain.
# New language mappings are welcome. Before doing a new translation, please
# read <http://docutils.sf.net/docs/howto/i18n.html>. Two files must be
# translated for each language: one in docutils/languages, the other in
# docutils/parsers/rst/languages.
"""
German language mappings for language-dependent features of Docutils.
"""
__docformat__ = 'reStructuredText'
labels = {
'author': 'Autor',
'authors': 'Autoren',
'organization': 'Organisation',
'address': 'Adresse',
'contact': 'Kontakt',
'version': 'Version',
'revision': 'Revision',
'status': 'Status',
'date': 'Datum',
'dedication': 'Widmung',
'copyright': 'Copyright',
'abstract': 'Zusammenfassung',
'attention': 'Achtung!',
'caution': 'Vorsicht!',
'danger': '!GEFAHR!',
'error': 'Fehler',
'hint': 'Hinweis',
'important': 'Wichtig',
'note': 'Bemerkung',
'tip': 'Tipp',
'warning': 'Warnung',
'contents': 'Inhalt'}
"""Mapping of node class name to label text."""
bibliographic_fields = {
'autor': 'author',
'autoren': 'authors',
'organisation': 'organization',
'adresse': 'address',
'kontakt': 'contact',
'version': 'version',
'revision': 'revision',
'status': 'status',
'datum': 'date',
'copyright': 'copyright',
'widmung': 'dedication',
'zusammenfassung': 'abstract'}
"""German (lowcased) to canonical name mapping for bibliographic fields."""
author_separators = [';', ',']
"""List of separator strings for the 'Authors' bibliographic field. Tried in
order."""
| apache-2.0 | 5,806,921,949,485,640,000 | 28.689655 | 76 | 0.634727 | false |
KanoComputing/kano-toolset | tests/fixtures/keyboard.py | 1 | 1887 | #
# keyboard.py
#
# Copyright (C) 2018 Kano Computing Ltd.
# License: http://www.gnu.org/licenses/gpl-2.0.txt GNU GPL v2
#
# Fixtures for fake keyboards
#
import imp
import os
import pytest
KEYBOARD_LSUSB_DIR = os.path.join(
os.path.dirname(os.path.realpath(__file__)), 'keyboard'
)
KEYBOARD_LSUSB_OUTPUTS = [
('no_keyboard', None),
('other_keyboard', None),
('en_keyboard', 'en'),
('es_keyboard', 'es'),
]
@pytest.fixture(scope='function', params=KEYBOARD_LSUSB_OUTPUTS)
def keyboard(request, fs, monkeypatch):
'''
Simulates different keyboards, mainly by their outputs from terminal
commands.
Note: This fixture auto-reimports the `kano.utils.hardware` module, which we
expect to be the one which requires the patch, however depending on
the module, it may be required to re-import the module being tested
as this fixture patches `kano.utils.run_cmd` and if the tested module
depends on this directly and has already been loaded then the updated
version will not propagate. To re-import use:
import imp
import module.to.be.tested
imp.reload(module.to.be.tested)
'''
kb_file, version = request.param
lsusb_output_path = os.path.join(
KEYBOARD_LSUSB_DIR,
'{}.dump'.format(kb_file)
)
fs.add_real_file(lsusb_output_path)
with open(lsusb_output_path, 'r') as lsusb_output_f:
lsusb_output = lsusb_output_f.read()
def fake_lsusb_out(cmd):
if cmd.startswith('lsusb'):
return lsusb_output, None, None
else:
raise NotImplementedError(
'Command run is not lsusb: {}'.format(cmd)
)
import kano.utils.shell
monkeypatch.setattr(kano.utils.shell, 'run_cmd', fake_lsusb_out)
imp.reload(kano.utils.hardware)
return version
| gpl-2.0 | -6,783,581,242,436,749,000 | 26.347826 | 80 | 0.63911 | false |
sudheesh001/oh-mainline | vendor/packages/PyYaml/lib/yaml/composer.py | 534 | 4921 |
__all__ = ['Composer', 'ComposerError']
from error import MarkedYAMLError
from events import *
from nodes import *
class ComposerError(MarkedYAMLError):
pass
class Composer(object):
def __init__(self):
self.anchors = {}
def check_node(self):
# Drop the STREAM-START event.
if self.check_event(StreamStartEvent):
self.get_event()
# If there are more documents available?
return not self.check_event(StreamEndEvent)
def get_node(self):
# Get the root node of the next document.
if not self.check_event(StreamEndEvent):
return self.compose_document()
def get_single_node(self):
# Drop the STREAM-START event.
self.get_event()
# Compose a document if the stream is not empty.
document = None
if not self.check_event(StreamEndEvent):
document = self.compose_document()
# Ensure that the stream contains no more documents.
if not self.check_event(StreamEndEvent):
event = self.get_event()
raise ComposerError("expected a single document in the stream",
document.start_mark, "but found another document",
event.start_mark)
# Drop the STREAM-END event.
self.get_event()
return document
def compose_document(self):
# Drop the DOCUMENT-START event.
self.get_event()
# Compose the root node.
node = self.compose_node(None, None)
# Drop the DOCUMENT-END event.
self.get_event()
self.anchors = {}
return node
def compose_node(self, parent, index):
if self.check_event(AliasEvent):
event = self.get_event()
anchor = event.anchor
if anchor not in self.anchors:
raise ComposerError(None, None, "found undefined alias %r"
% anchor.encode('utf-8'), event.start_mark)
return self.anchors[anchor]
event = self.peek_event()
anchor = event.anchor
if anchor is not None:
if anchor in self.anchors:
raise ComposerError("found duplicate anchor %r; first occurence"
% anchor.encode('utf-8'), self.anchors[anchor].start_mark,
"second occurence", event.start_mark)
self.descend_resolver(parent, index)
if self.check_event(ScalarEvent):
node = self.compose_scalar_node(anchor)
elif self.check_event(SequenceStartEvent):
node = self.compose_sequence_node(anchor)
elif self.check_event(MappingStartEvent):
node = self.compose_mapping_node(anchor)
self.ascend_resolver()
return node
def compose_scalar_node(self, anchor):
event = self.get_event()
tag = event.tag
if tag is None or tag == u'!':
tag = self.resolve(ScalarNode, event.value, event.implicit)
node = ScalarNode(tag, event.value,
event.start_mark, event.end_mark, style=event.style)
if anchor is not None:
self.anchors[anchor] = node
return node
def compose_sequence_node(self, anchor):
start_event = self.get_event()
tag = start_event.tag
if tag is None or tag == u'!':
tag = self.resolve(SequenceNode, None, start_event.implicit)
node = SequenceNode(tag, [],
start_event.start_mark, None,
flow_style=start_event.flow_style)
if anchor is not None:
self.anchors[anchor] = node
index = 0
while not self.check_event(SequenceEndEvent):
node.value.append(self.compose_node(node, index))
index += 1
end_event = self.get_event()
node.end_mark = end_event.end_mark
return node
def compose_mapping_node(self, anchor):
start_event = self.get_event()
tag = start_event.tag
if tag is None or tag == u'!':
tag = self.resolve(MappingNode, None, start_event.implicit)
node = MappingNode(tag, [],
start_event.start_mark, None,
flow_style=start_event.flow_style)
if anchor is not None:
self.anchors[anchor] = node
while not self.check_event(MappingEndEvent):
#key_event = self.peek_event()
item_key = self.compose_node(node, None)
#if item_key in node.value:
# raise ComposerError("while composing a mapping", start_event.start_mark,
# "found duplicate key", key_event.start_mark)
item_value = self.compose_node(node, item_key)
#node.value[item_key] = item_value
node.value.append((item_key, item_value))
end_event = self.get_event()
node.end_mark = end_event.end_mark
return node
| agpl-3.0 | -7,846,214,511,187,724,000 | 34.402878 | 89 | 0.581589 | false |
NeCTAR-RC/horizon | openstack_dashboard/templatetags/context_selection.py | 1 | 4352 | # Copyright 2014 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import
from django.conf import settings
from django import template
from openstack_dashboard.api import keystone
register = template.Library()
def is_multi_region_configured(request):
return False
def is_multidomain_supported():
return (keystone.VERSIONS.active >= 3 and
getattr(settings,
'OPENSTACK_KEYSTONE_MULTIDOMAIN_SUPPORT',
False))
@register.simple_tag(takes_context=True)
def is_multi_region(context):
if 'request' not in context:
return False
return is_multi_region_configured(context['request'])
@register.simple_tag
def is_multidomain():
return is_multidomain_supported()
@register.inclusion_tag('context_selection/_overview.html',
takes_context=True)
def show_overview(context):
if 'request' not in context:
return {}
request = context['request']
project_name = get_project_name(request.user.project_id,
context['authorized_tenants'])
context = {'domain_supported': is_multidomain_supported(),
'domain_name': request.user.user_domain_name,
'project_name': project_name or request.user.project_name,
'multi_region': is_multi_region_configured(request),
'region_name': request.user.services_region,
'request': request}
return context
@register.inclusion_tag('context_selection/_domain_list.html',
takes_context=True)
def show_domain_list(context):
# TODO(Thai): once domain switching is support, need to revisit
if 'request' not in context:
return {}
request = context['request']
context = {'domain_name': request.user.user_domain_name,
'request': request}
return context
@register.inclusion_tag('context_selection/_project_list.html',
takes_context=True)
def show_project_list(context):
max_proj = getattr(settings, 'DROPDOWN_MAX_ITEMS', 30)
if 'request' not in context:
return {}
request = context['request']
projects = sorted(context['authorized_tenants'],
key=lambda project: project.name.lower())
panel = request.horizon.get('panel')
context = {'projects': projects[:max_proj],
'project_id': request.user.project_id,
'page_url': panel.get_absolute_url() if panel else None}
return context
@register.inclusion_tag('context_selection/_region_list.html',
takes_context=True)
def show_region_list(context):
if 'request' not in context:
return {}
request = context['request']
panel = request.horizon.get('panel')
context = {'region_name': request.user.services_region,
'regions': sorted(request.user.available_services_regions,
key=lambda x: (x or '').lower()),
'page_url': panel.get_absolute_url() if panel else None}
return context
@register.inclusion_tag('context_selection/_anti_clickjack.html',
takes_context=True)
def iframe_embed_settings(context):
disallow_iframe_embed = getattr(settings,
'DISALLOW_IFRAME_EMBED',
True)
context = {'disallow_iframe_embed': disallow_iframe_embed}
return context
def get_project_name(project_id, projects):
"""Retrieves project name for given project id
Args:
projects: List of projects
project_id: project id
Returns: Project name or None if there is no match
"""
for project in projects:
if project_id == project.id:
return project.name
| apache-2.0 | -3,776,427,272,437,218,000 | 32.221374 | 78 | 0.633502 | false |
bj7/pwndbg | ida_script.py | 2 | 1687 | import idaapi
import idautils
import idc
import functools
import datetime
import threading
import xmlrpclib
from SimpleXMLRPCServer import SimpleXMLRPCServer
import idaapi
import idautils
import idc
# Save the database so nothing gets lost.
idc.SaveBase(idc.GetIdbPath() + '.' + datetime.datetime.now().isoformat())
xmlrpclib.Marshaller.dispatch[type(0L)] = lambda _, v, w: w("<value><i8>%d</i8></value>" % v)
xmlrpclib.Marshaller.dispatch[type(0)] = lambda _, v, w: w("<value><i8>%d</i8></value>" % v)
port = 8888
orig_LineA = idc.LineA
def LineA(*a,**kw):
v = orig_LineA(*a,**kw)
if v and v.startswith('\x01\x04; '):
v = v[4:]
return v
idc.LineA = LineA
mutex = threading.Condition()
def wrap(f):
def wrapper(*a, **kw):
try:
rv = []
def work(): rv.append(f(*a,**kw))
with mutex:
flags = idaapi.MFF_WRITE
if f == idc.SetColor:
flags |= idaapi.MFF_NOWAIT
rv.append(None)
idaapi.execute_sync(work, flags)
return rv[0]
except:
import traceback
traceback.print_exc()
raise
return wrapper
def register_module(module):
for name, function in module.__dict__.items():
if hasattr(function, '__call__'):
server.register_function(wrap(function), name)
server = SimpleXMLRPCServer(('127.0.0.1', port), logRequests=True, allow_none=True)
register_module(idc)
register_module(idautils)
register_module(idaapi)
server.register_introspection_functions()
thread = threading.Thread(target=server.serve_forever)
thread.daemon = True
thread.start()
| mit | -5,278,602,193,776,303,000 | 25.359375 | 93 | 0.619443 | false |
7digital/troposphere | examples/ApiGateway.py | 4 | 4609 | from troposphere import Ref, Template, Output
from troposphere.apigateway import RestApi, Method
from troposphere.apigateway import Resource, MethodResponse
from troposphere.apigateway import Integration, IntegrationResponse
from troposphere.apigateway import Deployment, Stage, ApiStage
from troposphere.apigateway import UsagePlan, QuotaSettings, ThrottleSettings
from troposphere.apigateway import ApiKey, StageKey, UsagePlanKey
from troposphere.iam import Role, Policy
from troposphere.awslambda import Function, Code
from troposphere import GetAtt, Join
t = Template()
# Create the Api Gateway
rest_api = t.add_resource(RestApi(
"ExampleApi",
Name="ExampleApi"
))
# Create a Lambda function that will be mapped
code = [
"var response = require('cfn-response');",
"exports.handler = function(event, context) {",
" context.succeed('foobar!');",
" return 'foobar!';",
"};",
]
# Create a role for the lambda function
t.add_resource(Role(
"LambdaExecutionRole",
Path="/",
Policies=[Policy(
PolicyName="root",
PolicyDocument={
"Version": "2012-10-17",
"Statement": [{
"Action": ["logs:*"],
"Resource": "arn:aws:logs:*:*:*",
"Effect": "Allow"
}, {
"Action": ["lambda:*"],
"Resource": "*",
"Effect": "Allow"
}]
})],
AssumeRolePolicyDocument={"Version": "2012-10-17", "Statement": [
{
"Action": ["sts:AssumeRole"],
"Effect": "Allow",
"Principal": {
"Service": [
"lambda.amazonaws.com",
"apigateway.amazonaws.com"
]
}
}
]},
))
# Create the Lambda function
foobar_function = t.add_resource(Function(
"FoobarFunction",
Code=Code(
ZipFile=Join("", code)
),
Handler="index.handler",
Role=GetAtt("LambdaExecutionRole", "Arn"),
Runtime="nodejs4.3",
))
# Create a resource to map the lambda function to
resource = t.add_resource(Resource(
"FoobarResource",
RestApiId=Ref(rest_api),
PathPart="foobar",
ParentId=GetAtt("ExampleApi", "RootResourceId"),
))
# Create a Lambda API method for the Lambda resource
method = t.add_resource(Method(
"LambdaMethod",
DependsOn='FoobarFunction',
RestApiId=Ref(rest_api),
AuthorizationType="NONE",
ResourceId=Ref(resource),
HttpMethod="GET",
Integration=Integration(
Credentials=GetAtt("LambdaExecutionRole", "Arn"),
Type="AWS",
IntegrationHttpMethod='POST',
IntegrationResponses=[
IntegrationResponse(
StatusCode='200'
)
],
Uri=Join("", [
"arn:aws:apigateway:eu-west-1:lambda:path/2015-03-31/functions/",
GetAtt("FoobarFunction", "Arn"),
"/invocations"
])
),
MethodResponses=[
MethodResponse(
"CatResponse",
StatusCode='200'
)
]
))
# Create a deployment
stage_name = 'v1'
deployment = t.add_resource(Deployment(
"%sDeployment" % stage_name,
DependsOn="LambdaMethod",
RestApiId=Ref(rest_api),
))
stage = t.add_resource(Stage(
'%sStage' % stage_name,
StageName=stage_name,
RestApiId=Ref(rest_api),
DeploymentId=Ref(deployment)
))
key = t.add_resource(ApiKey(
"ApiKey",
StageKeys=[StageKey(
RestApiId=Ref(rest_api),
StageName=Ref(stage)
)]
))
# Create an API usage plan
usagePlan = t.add_resource(UsagePlan(
"ExampleUsagePlan",
UsagePlanName="ExampleUsagePlan",
Description="Example usage plan",
Quota=QuotaSettings(
Limit=50000,
Period="MONTH"
),
Throttle=ThrottleSettings(
BurstLimit=500,
RateLimit=5000
),
ApiStages=[
ApiStage(
ApiId=Ref(rest_api),
Stage=Ref(stage)
)]
))
# tie the usage plan and key together
usagePlanKey = t.add_resource(UsagePlanKey(
"ExampleUsagePlanKey",
KeyId=Ref(key),
KeyType="API_KEY",
UsagePlanId=Ref(usagePlan)
))
# Add the deployment endpoint as an output
t.add_output([
Output(
"ApiEndpoint",
Value=Join("", [
"https://",
Ref(rest_api),
".execute-api.eu-west-1.amazonaws.com/",
stage_name
]),
Description="Endpoint for this stage of the api"
),
Output(
"ApiKey",
Value=Ref(key),
Description="API key"
),
])
print(t.to_json())
| bsd-2-clause | -7,410,159,825,665,922,000 | 24.048913 | 77 | 0.588631 | false |
andreparrish/python-for-android | python-build/python-libs/gdata/build/lib/gdata/blogger/__init__.py | 140 | 6426 | #!/usr/bin/python
#
# Copyright (C) 2007, 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Contains extensions to Atom objects used with Blogger."""
__author__ = 'api.jscudder (Jeffrey Scudder)'
import atom
import gdata
import re
LABEL_SCHEME = 'http://www.blogger.com/atom/ns#'
THR_NAMESPACE = 'http://purl.org/syndication/thread/1.0'
class BloggerEntry(gdata.GDataEntry):
"""Adds convenience methods inherited by all Blogger entries."""
blog_name_pattern = re.compile('(http://)(\w*)')
blog_id_pattern = re.compile('(tag:blogger.com,1999:blog-)(\w*)')
blog_id2_pattern = re.compile('tag:blogger.com,1999:user-(\d+)\.blog-(\d+)')
def GetBlogId(self):
"""Extracts the Blogger id of this blog.
This method is useful when contructing URLs by hand. The blog id is
often used in blogger operation URLs. This should not be confused with
the id member of a BloggerBlog. The id element is the Atom id XML element.
The blog id which this method returns is a part of the Atom id.
Returns:
The blog's unique id as a string.
"""
if self.id.text:
match = self.blog_id_pattern.match(self.id.text)
if match:
return match.group(2)
else:
return self.blog_id2_pattern.match(self.id.text).group(2)
return None
def GetBlogName(self):
"""Finds the name of this blog as used in the 'alternate' URL.
An alternate URL is in the form 'http://blogName.blogspot.com/'. For an
entry representing the above example, this method would return 'blogName'.
Returns:
The blog's URL name component as a string.
"""
for link in self.link:
if link.rel == 'alternate':
return self.blog_name_pattern.match(link.href).group(2)
return None
class BlogEntry(BloggerEntry):
"""Describes a blog entry in the feed listing a user's blogs."""
def BlogEntryFromString(xml_string):
return atom.CreateClassFromXMLString(BlogEntry, xml_string)
class BlogFeed(gdata.GDataFeed):
"""Describes a feed of a user's blogs."""
_children = gdata.GDataFeed._children.copy()
_children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry', [BlogEntry])
def BlogFeedFromString(xml_string):
return atom.CreateClassFromXMLString(BlogFeed, xml_string)
class BlogPostEntry(BloggerEntry):
"""Describes a blog post entry in the feed of a blog's posts."""
post_id_pattern = re.compile('(tag:blogger.com,1999:blog-)(\w*)(.post-)(\w*)')
def AddLabel(self, label):
"""Adds a label to the blog post.
The label is represented by an Atom category element, so this method
is shorthand for appending a new atom.Category object.
Args:
label: str
"""
self.category.append(atom.Category(scheme=LABEL_SCHEME, term=label))
def GetPostId(self):
"""Extracts the postID string from the entry's Atom id.
Returns: A string of digits which identify this post within the blog.
"""
if self.id.text:
return self.post_id_pattern.match(self.id.text).group(4)
return None
def BlogPostEntryFromString(xml_string):
return atom.CreateClassFromXMLString(BlogPostEntry, xml_string)
class BlogPostFeed(gdata.GDataFeed):
"""Describes a feed of a blog's posts."""
_children = gdata.GDataFeed._children.copy()
_children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry', [BlogPostEntry])
def BlogPostFeedFromString(xml_string):
return atom.CreateClassFromXMLString(BlogPostFeed, xml_string)
class InReplyTo(atom.AtomBase):
_tag = 'in-reply-to'
_namespace = THR_NAMESPACE
_attributes = atom.AtomBase._attributes.copy()
_attributes['href'] = 'href'
_attributes['ref'] = 'ref'
_attributes['source'] = 'source'
_attributes['type'] = 'type'
def __init__(self, href=None, ref=None, source=None, type=None,
extension_elements=None, extension_attributes=None, text=None):
self.href = href
self.ref = ref
self.source = source
self.type = type
self.extension_elements = extension_elements or []
self.extension_attributes = extension_attributes or {}
self.text = text
def InReplyToFromString(xml_string):
return atom.CreateClassFromXMLString(InReplyTo, xml_string)
class CommentEntry(BloggerEntry):
"""Describes a blog post comment entry in the feed of a blog post's
comments."""
_children = BloggerEntry._children.copy()
_children['{%s}in-reply-to' % THR_NAMESPACE] = ('in_reply_to', InReplyTo)
comment_id_pattern = re.compile('.*-(\w*)$')
def __init__(self, author=None, category=None, content=None,
contributor=None, atom_id=None, link=None, published=None, rights=None,
source=None, summary=None, control=None, title=None, updated=None,
in_reply_to=None, extension_elements=None, extension_attributes=None,
text=None):
BloggerEntry.__init__(self, author=author, category=category,
content=content, contributor=contributor, atom_id=atom_id, link=link,
published=published, rights=rights, source=source, summary=summary,
control=control, title=title, updated=updated,
extension_elements=extension_elements,
extension_attributes=extension_attributes, text=text)
self.in_reply_to = in_reply_to
def GetCommentId(self):
"""Extracts the commentID string from the entry's Atom id.
Returns: A string of digits which identify this post within the blog.
"""
if self.id.text:
return self.comment_id_pattern.match(self.id.text).group(1)
return None
def CommentEntryFromString(xml_string):
return atom.CreateClassFromXMLString(CommentEntry, xml_string)
class CommentFeed(gdata.GDataFeed):
"""Describes a feed of a blog post's comments."""
_children = gdata.GDataFeed._children.copy()
_children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry', [CommentEntry])
def CommentFeedFromString(xml_string):
return atom.CreateClassFromXMLString(CommentFeed, xml_string)
| apache-2.0 | 2,902,015,068,329,859,600 | 30.811881 | 80 | 0.700124 | false |
kobotoolbox/kobocat | onadata/libs/constants.py | 1 | 1349 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals, absolute_import
# Userprofile Permissions
CAN_ADD_USERPROFILE = 'add_userprofile'
CAN_CHANGE_USERPROFILE = 'change_userprofile'
CAN_DELETE_USERPROFILE = 'delete_userprofile'
CAN_ADD_XFORM_TO_PROFILE = 'can_add_xform'
CAN_VIEW_PROFILE = 'view_profile'
# Organization Permissions
CAN_VIEW_ORGANIZATION_PROFILE = 'view_organizationprofile'
CAN_ADD_ORGANIZATION_PROFILE = 'add_organizationprofile'
CAN_ADD_ORGANIZATION_XFORM = 'can_add_xform'
CAN_CHANGE_ORGANIZATION_PROFILE = 'change_organizationprofile'
CAN_DELETE_ORGANIZATION_PROFILE = 'delete_organizationprofile'
IS_ORGANIZATION_OWNER = 'is_org_owner'
# Xform Permissions
CAN_CHANGE_XFORM = 'change_xform'
CAN_VALIDATE_XFORM = 'validate_xform'
CAN_DELETE_DATA_XFORM = 'delete_data_xform'
CAN_ADD_XFORM = 'add_xform'
CAN_DELETE_XFORM = 'delete_xform'
CAN_VIEW_XFORM = 'view_xform'
CAN_ADD_SUBMISSIONS = 'report_xform'
CAN_TRANSFER_OWNERSHIP = 'transfer_xform'
CAN_MOVE_TO_FOLDER = 'move_xform'
# Project Permissions
CAN_VIEW_PROJECT = 'view_project'
CAN_CHANGE_PROJECT = 'change_project'
CAN_TRANSFER_PROJECT_OWNERSHIP = 'transfer_project'
CAN_DELETE_PROJECT = 'delete_project'
CAN_ADD_DATADICTIONARY = 'add_datadictionary'
CAN_CHANGE_DATADICTIONARY = 'change_datadictionary'
CAN_DELETE_DATADICTIONARY = 'delete_datadictionary'
| bsd-2-clause | 5,707,182,612,514,873,000 | 34.5 | 62 | 0.774648 | false |
alexteodor/odoo | addons/pos_restaurant/__init__.py | 332 | 1074 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import restaurant
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | -7,123,556,356,118,463,000 | 41.96 | 79 | 0.60987 | false |
GeoNode/geonode | geonode/geoserver/tests/test_helpers.py | 2 | 7983 | # -*- coding: utf-8 -*-
#########################################################################
#
# Copyright (C) 2019 OSGeo
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#########################################################################
from django.contrib.auth import get_user_model
from geonode.tests.base import GeoNodeBaseTestSupport
import os
import re
import gisdata
from urllib.parse import urljoin
from django.conf import settings
from geonode import geoserver
from geonode.decorators import on_ogc_backend
from geonode.layers.models import Layer
from geonode.layers.utils import file_upload
from geonode.layers.populate_layers_data import create_layer_data
from geonode.geoserver.views import _response_callback
import logging
logger = logging.getLogger(__name__)
class HelperTest(GeoNodeBaseTestSupport):
type = 'layer'
def setUp(self):
super(HelperTest, self).setUp()
self.user = 'admin'
self.passwd = 'admin'
create_layer_data()
@on_ogc_backend(geoserver.BACKEND_PACKAGE)
def test_replace_layer(self):
"""
Ensures the layer_style_manage route returns a 200.
"""
admin = get_user_model().objects.get(username="admin")
layer = Layer.objects.all()[0]
logger.debug(Layer.objects.all())
self.assertIsNotNone(layer)
logger.debug("Attempting to replace a vector layer with a raster.")
filename = filename = os.path.join(
gisdata.GOOD_DATA,
'vector/san_andres_y_providencia_administrative.shp')
vector_layer = file_upload(filename, user=admin)
self.assertTrue(vector_layer.is_vector())
filename = os.path.join(gisdata.GOOD_DATA, 'raster/test_grid.tif')
with self.assertRaisesRegex(Exception, "You are attempting to replace a vector layer with a raster."):
file_upload(filename, layer=vector_layer, overwrite=True)
logger.debug("Attempting to replace a raster layer with a vector.")
raster_layer = file_upload(filename, user=admin)
self.assertFalse(raster_layer.is_vector())
filename = filename = os.path.join(
gisdata.GOOD_DATA,
'vector/san_andres_y_providencia_administrative.shp')
with self.assertRaisesRegex(Exception, "You are attempting to replace a raster layer with a vector."):
file_upload(filename, layer=raster_layer, overwrite=True)
logger.debug("Attempting to replace a vector layer.")
replaced = file_upload(filename, layer=vector_layer, overwrite=True, gtype='LineString')
self.assertIsNotNone(replaced)
self.assertTrue(replaced.is_vector())
@on_ogc_backend(geoserver.BACKEND_PACKAGE)
def test_replace_callback(self):
content = f"""<Layer>
<Title>GeoNode Local GeoServer</Title>
<Abstract>This is a description of your Web Map Server.</Abstract>
<!--Limited list of EPSG projections:-->
<CRS>EPSG:4326</CRS>
<CRS>EPSG:3785</CRS>
<CRS>EPSG:3857</CRS>
<CRS>EPSG:900913</CRS>
<CRS>EPSG:32647</CRS>
<CRS>EPSG:32736</CRS>
<CRS>CRS:84</CRS>
<EX_GeographicBoundingBox>
<westBoundLongitude>-124.731422</westBoundLongitude>
<eastBoundLongitude>12.512771464573753</eastBoundLongitude>
<southBoundLatitude>12.4801497</southBoundLatitude>
<northBoundLatitude>49.371735</northBoundLatitude>
</EX_GeographicBoundingBox>
<BoundingBox CRS="CRS:84" ..../>
<BoundingBox CRS="EPSG:4326" ..../>
<BoundingBox CRS="EPSG:3785" ..../>
<BoundingBox CRS="EPSG:3857" ..../>
<BoundingBox CRS="EPSG:900913" ..../>
<BoundingBox CRS="EPSG:32647" ..../>
<BoundingBox CRS="EPSG:32736" ..../>
<Layer queryable="1" opaque="0">
<Name>geonode:DE_USNG_UTM18</Name>
<Title>DE_USNG_UTM18</Title>
<Abstract>No abstract provided</Abstract>
<KeywordList>
<Keyword>DE_USNG_UTM18</Keyword>
<Keyword>features</Keyword>
</KeywordList>
<CRS>EPSG:26918</CRS>
<CRS>CRS:84</CRS>
<EX_GeographicBoundingBox>
<westBoundLongitude>-75.93570725669369</westBoundLongitude>
<eastBoundLongitude>-75.00000000000001</eastBoundLongitude>
<southBoundLatitude>38.3856300861002</southBoundLatitude>
<northBoundLatitude>39.89406880610797</northBoundLatitude>
</EX_GeographicBoundingBox>
<BoundingBox CRS="CRS:84" .01" maxy="39.89406880610797"/>
<BoundingBox CRS="EPSG:26918" ..../>
<BoundingBox CRS="EPSG:4326" ..../>
<BoundingBox CRS="EPSG:3785" ..../>
<BoundingBox CRS="EPSG:3857" ..../>
<BoundingBox CRS="EPSG:900913" ..../>
<BoundingBox CRS="EPSG:32647" ..../>
<BoundingBox CRS="EPSG:32736" ..../>
<MetadataURL type="other">
<Format>other</Format>
<OnlineResource xlink:type="simple"
xlink:href="{settings.GEOSERVER_LOCATION}catalogue/csw?outputschema=...."/>
</MetadataURL>
<MetadataURL type="other">
<Format>other</Format>
<OnlineResource xlink:type="simple"
xlink:href="{settings.GEOSERVER_LOCATION}catalogue/csw?outputschema=...."/>
</MetadataURL>
<MetadataURL type="other">
<Format>other</Format>
<OnlineResource xlink:type="simple"
xlink:href="{settings.GEOSERVER_LOCATION}catalogue/csw?outputschema=...."/>
</MetadataURL>
<MetadataURL type="other">
<Format>other</Format>
<OnlineResource xlink:type="simple"
xlink:href="{settings.GEOSERVER_LOCATION}catalogue/csw?outputschema=...."/>
</MetadataURL>
<MetadataURL type="FGDC">
<Format>text/xml</Format>
<OnlineResource xlink:type="simple"
xlink:href="{settings.GEOSERVER_LOCATION}catalogue/csw?outputschema=...."/>
</MetadataURL>
<MetadataURL type="other">
<Format>other</Format>
<OnlineResource xlink:type="simple"
xlink:href="{settings.GEOSERVER_LOCATION}catalogue/csw?outputschema=...."/>
</MetadataURL>
<MetadataURL type="other">
<Format>other</Format>
<OnlineResource xlink:type="simple"
xlink:href="{settings.GEOSERVER_LOCATION}showmetadata/xsl/584"/>
</MetadataURL>
<Style>
<Name>geonode:DE_USNG_UTM18</Name>
<Title>Default Polygon</Title>
<Abstract>A sample style that draws a polygon</Abstract>
<LegendURL width="20" height="20">
<Format>image/png</Format>
<OnlineResource
xmlns:xlink="http://www.w3.org/1999/xlink" xlink:type="simple"
xlink:href="{settings.GEOSERVER_LOCATION}ows?service=WMS&request=GetLegendGraphic&...."/>
</LegendURL>
</Style>
</Layer>"""
kwargs = {
'content': content,
'status': 200,
'content_type': 'application/xml'
}
_content = _response_callback(**kwargs).content
self.assertTrue(re.findall(f'{urljoin(settings.SITEURL, "/gs/")}ows', str(_content)))
kwargs = {
'content': content,
'status': 200,
'content_type': 'text/xml; charset=UTF-8'
}
_content = _response_callback(**kwargs).content
self.assertTrue(re.findall(f'{urljoin(settings.SITEURL, "/gs/")}ows', str(_content)))
| gpl-3.0 | 1,011,767,441,808,122,400 | 39.318182 | 110 | 0.634223 | false |
sajuptpm/neutron-ipam | neutron/plugins/ml2/drivers/mech_linuxbridge.py | 26 | 2340 | # Copyright (c) 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron.common import constants
from neutron.extensions import portbindings
from neutron.openstack.common import log
from neutron.plugins.ml2 import driver_api as api
from neutron.plugins.ml2.drivers import mech_agent
LOG = log.getLogger(__name__)
class LinuxbridgeMechanismDriver(mech_agent.SimpleAgentMechanismDriverBase):
"""Attach to networks using linuxbridge L2 agent.
The LinuxbridgeMechanismDriver integrates the ml2 plugin with the
linuxbridge L2 agent. Port binding with this driver requires the
linuxbridge agent to be running on the port's host, and that agent
to have connectivity to at least one segment of the port's
network.
"""
def __init__(self):
super(LinuxbridgeMechanismDriver, self).__init__(
constants.AGENT_TYPE_LINUXBRIDGE,
portbindings.VIF_TYPE_BRIDGE,
{portbindings.CAP_PORT_FILTER: True})
def check_segment_for_agent(self, segment, agent):
mappings = agent['configurations'].get('interface_mappings', {})
tunnel_types = agent['configurations'].get('tunnel_types', [])
LOG.debug(_("Checking segment: %(segment)s "
"for mappings: %(mappings)s "
"with tunnel_types: %(tunnel_types)s"),
{'segment': segment, 'mappings': mappings,
'tunnel_types': tunnel_types})
network_type = segment[api.NETWORK_TYPE]
if network_type == 'local':
return True
elif network_type in tunnel_types:
return True
elif network_type in ['flat', 'vlan']:
return segment[api.PHYSICAL_NETWORK] in mappings
else:
return False
| apache-2.0 | 8,434,575,220,404,979,000 | 40.052632 | 78 | 0.673077 | false |
nerdvegas/rez | src/rez/data/tests/builds/packages/foo/1.1.0/build.py | 1 | 1617 | from __future__ import print_function
from build_util import build_directory_recurse, check_visible
import os.path
def build(source_path, build_path, install_path, targets):
# build requirement 'floob' should be visible
check_visible("foo", "floob")
import floob
print(floob.hello())
# do the build
if "install" not in (targets or []):
install_path = None
build_directory_recurse(src_dir="foo",
dest_dir=os.path.join("python", "foo"),
source_path=source_path,
build_path=build_path,
install_path=install_path)
if __name__ == '__main__':
import os, sys
build(
source_path=os.environ['REZ_BUILD_SOURCE_PATH'],
build_path=os.environ['REZ_BUILD_PATH'],
install_path=os.environ['REZ_BUILD_INSTALL_PATH'],
targets=sys.argv[1:]
)
# Copyright 2013-2016 Allan Johns.
#
# This library is free software: you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation, either
# version 3 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library. If not, see <http://www.gnu.org/licenses/>.
| lgpl-3.0 | -6,371,292,564,097,938,000 | 32.6875 | 79 | 0.656772 | false |
cfg2015/EPT-2015-2 | addons/marketing_campaign/report/__init__.py | 441 | 1071 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import campaign_analysis
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | -8,746,986,861,130,917,000 | 43.625 | 78 | 0.61718 | false |
SheepDogInc/ssheepdog | ssheepdog/migrations/0007_auto__add_loginlog__chg_field_applicationkey_public_key__chg_field_use.py | 1 | 8799 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'LoginLog'
db.create_table('ssheepdog_loginlog', (
('stdout', self.gf('django.db.models.fields.TextField')(default='')),
('actor', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'], null=True)),
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('stderr', self.gf('django.db.models.fields.TextField')(default='')),
('date', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('login', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['ssheepdog.Login'], null=True)),
('message', self.gf('django.db.models.fields.TextField')()),
))
db.send_create_signal('ssheepdog', ['LoginLog'])
# Changing field 'ApplicationKey.public_key'
db.alter_column('ssheepdog_applicationkey', 'public_key', self.gf('ssheepdog.fields.PublicKeyField')())
# Changing field 'UserProfile.ssh_key'
db.alter_column('ssheepdog_userprofile', 'ssh_key', self.gf('ssheepdog.fields.PublicKeyField')())
def backwards(self, orm):
# Deleting model 'LoginLog'
db.delete_table('ssheepdog_loginlog')
# Changing field 'ApplicationKey.public_key'
db.alter_column('ssheepdog_applicationkey', 'public_key', self.gf('django.db.models.fields.TextField')())
# Changing field 'UserProfile.ssh_key'
db.alter_column('ssheepdog_userprofile', 'ssh_key', self.gf('django.db.models.fields.TextField')())
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'ssheepdog.applicationkey': {
'Meta': {'object_name': 'ApplicationKey'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'private_key': ('django.db.models.fields.TextField', [], {}),
'public_key': ('ssheepdog.fields.PublicKeyField', [], {})
},
'ssheepdog.client': {
'Meta': {'object_name': 'Client'},
'description': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nickname': ('django.db.models.fields.CharField', [], {'max_length': '256'})
},
'ssheepdog.login': {
'Meta': {'object_name': 'Login'},
'application_key': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['ssheepdog.ApplicationKey']", 'null': 'True'}),
'client': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['ssheepdog.Client']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'is_dirty': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'machine': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['ssheepdog.Machine']"}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'users': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.User']", 'symmetrical': 'False', 'blank': 'True'})
},
'ssheepdog.loginlog': {
'Meta': {'object_name': 'LoginLog'},
'actor': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'login': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['ssheepdog.Login']", 'null': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'stderr': ('django.db.models.fields.TextField', [], {'default': "''"}),
'stdout': ('django.db.models.fields.TextField', [], {'default': "''"})
},
'ssheepdog.machine': {
'Meta': {'object_name': 'Machine'},
'client': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['ssheepdog.Client']", 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {}),
'hostname': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_down': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'nickname': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'port': ('django.db.models.fields.IntegerField', [], {'default': '22'})
},
'ssheepdog.userprofile': {
'Meta': {'object_name': 'UserProfile'},
'nickname': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'ssh_key': ('ssheepdog.fields.PublicKeyField', [], {}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'_profile_cache'", 'unique': 'True', 'primary_key': 'True', 'to': "orm['auth.User']"})
}
}
complete_apps = ['ssheepdog']
| bsd-3-clause | 4,565,284,130,976,736,000 | 65.659091 | 178 | 0.5562 | false |
flyher/pymo | symbian/PythonForS60_1.9.6/module-repo/standard-modules/encodings/gb2312.py | 816 | 1027 | #
# gb2312.py: Python Unicode Codec for GB2312
#
# Written by Hye-Shik Chang <[email protected]>
#
import _codecs_cn, codecs
import _multibytecodec as mbc
codec = _codecs_cn.getcodec('gb2312')
class Codec(codecs.Codec):
encode = codec.encode
decode = codec.decode
class IncrementalEncoder(mbc.MultibyteIncrementalEncoder,
codecs.IncrementalEncoder):
codec = codec
class IncrementalDecoder(mbc.MultibyteIncrementalDecoder,
codecs.IncrementalDecoder):
codec = codec
class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader):
codec = codec
class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter):
codec = codec
def getregentry():
return codecs.CodecInfo(
name='gb2312',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
| mit | 7,419,779,822,632,666,000 | 25.333333 | 74 | 0.700097 | false |
Edraak/edx-platform | lms/djangoapps/shoppingcart/context_processor.py | 173 | 1679 | """
This is the shoppingcart context_processor module.
Currently the only context_processor detects whether request.user has a cart that should be displayed in the
navigation. We want to do this in the context_processor to
1) keep database accesses out of templates (this led to a transaction bug with user email changes)
2) because navigation.html is "called" by being included in other templates, there's no "views.py" to put this.
"""
from .models import Order, PaidCourseRegistration, CourseRegCodeItem
from .utils import is_shopping_cart_enabled
def user_has_cart_context_processor(request):
"""
Checks if request has an authenticated user. If so, checks if request.user has a cart that should
be displayed. Anonymous users don't.
Adds `display_shopping_cart` to the context
"""
def should_display_shopping_cart():
"""
Returns a boolean if the user has an items in a cart whereby the shopping cart should be
displayed to the logged in user
"""
return (
# user is logged in and
request.user.is_authenticated() and
# do we have the feature turned on
is_shopping_cart_enabled() and
# does the user actually have a cart (optimized query to prevent creation of a cart when not needed)
Order.does_user_have_cart(request.user) and
# user's cart has PaidCourseRegistrations or CourseRegCodeItem
Order.user_cart_has_items(
request.user,
[PaidCourseRegistration, CourseRegCodeItem]
)
)
return {'should_display_shopping_cart_func': should_display_shopping_cart}
| agpl-3.0 | -5,993,320,272,630,864,000 | 43.184211 | 112 | 0.682549 | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.