repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
---|---|---|---|---|
lxybox1/MissionPlanner | refs/heads/master | Lib/site-packages/numpy/lib/tests/test_format.py | 58 | r''' Test the .npy file format.
Set up:
>>> import sys
>>> if sys.version_info[0] >= 3:
... from io import BytesIO as StringIO
... else:
... from cStringIO import StringIO
>>> from numpy.lib import format
>>>
>>> scalars = [
... np.uint8,
... np.int8,
... np.uint16,
... np.int16,
... np.uint32,
... np.int32,
... np.uint64,
... np.int64,
... np.float32,
... np.float64,
... np.complex64,
... np.complex128,
... object,
... ]
>>>
>>> basic_arrays = []
>>>
>>> for scalar in scalars:
... for endian in '<>':
... dtype = np.dtype(scalar).newbyteorder(endian)
... basic = np.arange(15).astype(dtype)
... basic_arrays.extend([
... np.array([], dtype=dtype),
... np.array(10, dtype=dtype),
... basic,
... basic.reshape((3,5)),
... basic.reshape((3,5)).T,
... basic.reshape((3,5))[::-1,::2],
... ])
...
>>>
>>> Pdescr = [
... ('x', 'i4', (2,)),
... ('y', 'f8', (2, 2)),
... ('z', 'u1')]
>>>
>>>
>>> PbufferT = [
... ([3,2], [[6.,4.],[6.,4.]], 8),
... ([4,3], [[7.,5.],[7.,5.]], 9),
... ]
>>>
>>>
>>> Ndescr = [
... ('x', 'i4', (2,)),
... ('Info', [
... ('value', 'c16'),
... ('y2', 'f8'),
... ('Info2', [
... ('name', 'S2'),
... ('value', 'c16', (2,)),
... ('y3', 'f8', (2,)),
... ('z3', 'u4', (2,))]),
... ('name', 'S2'),
... ('z2', 'b1')]),
... ('color', 'S2'),
... ('info', [
... ('Name', 'U8'),
... ('Value', 'c16')]),
... ('y', 'f8', (2, 2)),
... ('z', 'u1')]
>>>
>>>
>>> NbufferT = [
... ([3,2], (6j, 6., ('nn', [6j,4j], [6.,4.], [1,2]), 'NN', True), 'cc', ('NN', 6j), [[6.,4.],[6.,4.]], 8),
... ([4,3], (7j, 7., ('oo', [7j,5j], [7.,5.], [2,1]), 'OO', False), 'dd', ('OO', 7j), [[7.,5.],[7.,5.]], 9),
... ]
>>>
>>>
>>> record_arrays = [
... np.array(PbufferT, dtype=np.dtype(Pdescr).newbyteorder('<')),
... np.array(NbufferT, dtype=np.dtype(Ndescr).newbyteorder('<')),
... np.array(PbufferT, dtype=np.dtype(Pdescr).newbyteorder('>')),
... np.array(NbufferT, dtype=np.dtype(Ndescr).newbyteorder('>')),
... ]
Test the magic string writing.
>>> format.magic(1, 0)
'\x93NUMPY\x01\x00'
>>> format.magic(0, 0)
'\x93NUMPY\x00\x00'
>>> format.magic(255, 255)
'\x93NUMPY\xff\xff'
>>> format.magic(2, 5)
'\x93NUMPY\x02\x05'
Test the magic string reading.
>>> format.read_magic(StringIO(format.magic(1, 0)))
(1, 0)
>>> format.read_magic(StringIO(format.magic(0, 0)))
(0, 0)
>>> format.read_magic(StringIO(format.magic(255, 255)))
(255, 255)
>>> format.read_magic(StringIO(format.magic(2, 5)))
(2, 5)
Test the header writing.
>>> for arr in basic_arrays + record_arrays:
... f = StringIO()
... format.write_array_header_1_0(f, arr) # XXX: arr is not a dict, items gets called on it
... print repr(f.getvalue())
...
"F\x00{'descr': '|u1', 'fortran_order': False, 'shape': (0,)} \n"
"F\x00{'descr': '|u1', 'fortran_order': False, 'shape': ()} \n"
"F\x00{'descr': '|u1', 'fortran_order': False, 'shape': (15,)} \n"
"F\x00{'descr': '|u1', 'fortran_order': False, 'shape': (3, 5)} \n"
"F\x00{'descr': '|u1', 'fortran_order': True, 'shape': (5, 3)} \n"
"F\x00{'descr': '|u1', 'fortran_order': False, 'shape': (3, 3)} \n"
"F\x00{'descr': '|u1', 'fortran_order': False, 'shape': (0,)} \n"
"F\x00{'descr': '|u1', 'fortran_order': False, 'shape': ()} \n"
"F\x00{'descr': '|u1', 'fortran_order': False, 'shape': (15,)} \n"
"F\x00{'descr': '|u1', 'fortran_order': False, 'shape': (3, 5)} \n"
"F\x00{'descr': '|u1', 'fortran_order': True, 'shape': (5, 3)} \n"
"F\x00{'descr': '|u1', 'fortran_order': False, 'shape': (3, 3)} \n"
"F\x00{'descr': '|i1', 'fortran_order': False, 'shape': (0,)} \n"
"F\x00{'descr': '|i1', 'fortran_order': False, 'shape': ()} \n"
"F\x00{'descr': '|i1', 'fortran_order': False, 'shape': (15,)} \n"
"F\x00{'descr': '|i1', 'fortran_order': False, 'shape': (3, 5)} \n"
"F\x00{'descr': '|i1', 'fortran_order': True, 'shape': (5, 3)} \n"
"F\x00{'descr': '|i1', 'fortran_order': False, 'shape': (3, 3)} \n"
"F\x00{'descr': '|i1', 'fortran_order': False, 'shape': (0,)} \n"
"F\x00{'descr': '|i1', 'fortran_order': False, 'shape': ()} \n"
"F\x00{'descr': '|i1', 'fortran_order': False, 'shape': (15,)} \n"
"F\x00{'descr': '|i1', 'fortran_order': False, 'shape': (3, 5)} \n"
"F\x00{'descr': '|i1', 'fortran_order': True, 'shape': (5, 3)} \n"
"F\x00{'descr': '|i1', 'fortran_order': False, 'shape': (3, 3)} \n"
"F\x00{'descr': '<u2', 'fortran_order': False, 'shape': (0,)} \n"
"F\x00{'descr': '<u2', 'fortran_order': False, 'shape': ()} \n"
"F\x00{'descr': '<u2', 'fortran_order': False, 'shape': (15,)} \n"
"F\x00{'descr': '<u2', 'fortran_order': False, 'shape': (3, 5)} \n"
"F\x00{'descr': '<u2', 'fortran_order': True, 'shape': (5, 3)} \n"
"F\x00{'descr': '<u2', 'fortran_order': False, 'shape': (3, 3)} \n"
"F\x00{'descr': '>u2', 'fortran_order': False, 'shape': (0,)} \n"
"F\x00{'descr': '>u2', 'fortran_order': False, 'shape': ()} \n"
"F\x00{'descr': '>u2', 'fortran_order': False, 'shape': (15,)} \n"
"F\x00{'descr': '>u2', 'fortran_order': False, 'shape': (3, 5)} \n"
"F\x00{'descr': '>u2', 'fortran_order': True, 'shape': (5, 3)} \n"
"F\x00{'descr': '>u2', 'fortran_order': False, 'shape': (3, 3)} \n"
"F\x00{'descr': '<i2', 'fortran_order': False, 'shape': (0,)} \n"
"F\x00{'descr': '<i2', 'fortran_order': False, 'shape': ()} \n"
"F\x00{'descr': '<i2', 'fortran_order': False, 'shape': (15,)} \n"
"F\x00{'descr': '<i2', 'fortran_order': False, 'shape': (3, 5)} \n"
"F\x00{'descr': '<i2', 'fortran_order': True, 'shape': (5, 3)} \n"
"F\x00{'descr': '<i2', 'fortran_order': False, 'shape': (3, 3)} \n"
"F\x00{'descr': '>i2', 'fortran_order': False, 'shape': (0,)} \n"
"F\x00{'descr': '>i2', 'fortran_order': False, 'shape': ()} \n"
"F\x00{'descr': '>i2', 'fortran_order': False, 'shape': (15,)} \n"
"F\x00{'descr': '>i2', 'fortran_order': False, 'shape': (3, 5)} \n"
"F\x00{'descr': '>i2', 'fortran_order': True, 'shape': (5, 3)} \n"
"F\x00{'descr': '>i2', 'fortran_order': False, 'shape': (3, 3)} \n"
"F\x00{'descr': '<u4', 'fortran_order': False, 'shape': (0,)} \n"
"F\x00{'descr': '<u4', 'fortran_order': False, 'shape': ()} \n"
"F\x00{'descr': '<u4', 'fortran_order': False, 'shape': (15,)} \n"
"F\x00{'descr': '<u4', 'fortran_order': False, 'shape': (3, 5)} \n"
"F\x00{'descr': '<u4', 'fortran_order': True, 'shape': (5, 3)} \n"
"F\x00{'descr': '<u4', 'fortran_order': False, 'shape': (3, 3)} \n"
"F\x00{'descr': '>u4', 'fortran_order': False, 'shape': (0,)} \n"
"F\x00{'descr': '>u4', 'fortran_order': False, 'shape': ()} \n"
"F\x00{'descr': '>u4', 'fortran_order': False, 'shape': (15,)} \n"
"F\x00{'descr': '>u4', 'fortran_order': False, 'shape': (3, 5)} \n"
"F\x00{'descr': '>u4', 'fortran_order': True, 'shape': (5, 3)} \n"
"F\x00{'descr': '>u4', 'fortran_order': False, 'shape': (3, 3)} \n"
"F\x00{'descr': '<i4', 'fortran_order': False, 'shape': (0,)} \n"
"F\x00{'descr': '<i4', 'fortran_order': False, 'shape': ()} \n"
"F\x00{'descr': '<i4', 'fortran_order': False, 'shape': (15,)} \n"
"F\x00{'descr': '<i4', 'fortran_order': False, 'shape': (3, 5)} \n"
"F\x00{'descr': '<i4', 'fortran_order': True, 'shape': (5, 3)} \n"
"F\x00{'descr': '<i4', 'fortran_order': False, 'shape': (3, 3)} \n"
"F\x00{'descr': '>i4', 'fortran_order': False, 'shape': (0,)} \n"
"F\x00{'descr': '>i4', 'fortran_order': False, 'shape': ()} \n"
"F\x00{'descr': '>i4', 'fortran_order': False, 'shape': (15,)} \n"
"F\x00{'descr': '>i4', 'fortran_order': False, 'shape': (3, 5)} \n"
"F\x00{'descr': '>i4', 'fortran_order': True, 'shape': (5, 3)} \n"
"F\x00{'descr': '>i4', 'fortran_order': False, 'shape': (3, 3)} \n"
"F\x00{'descr': '<u8', 'fortran_order': False, 'shape': (0,)} \n"
"F\x00{'descr': '<u8', 'fortran_order': False, 'shape': ()} \n"
"F\x00{'descr': '<u8', 'fortran_order': False, 'shape': (15,)} \n"
"F\x00{'descr': '<u8', 'fortran_order': False, 'shape': (3, 5)} \n"
"F\x00{'descr': '<u8', 'fortran_order': True, 'shape': (5, 3)} \n"
"F\x00{'descr': '<u8', 'fortran_order': False, 'shape': (3, 3)} \n"
"F\x00{'descr': '>u8', 'fortran_order': False, 'shape': (0,)} \n"
"F\x00{'descr': '>u8', 'fortran_order': False, 'shape': ()} \n"
"F\x00{'descr': '>u8', 'fortran_order': False, 'shape': (15,)} \n"
"F\x00{'descr': '>u8', 'fortran_order': False, 'shape': (3, 5)} \n"
"F\x00{'descr': '>u8', 'fortran_order': True, 'shape': (5, 3)} \n"
"F\x00{'descr': '>u8', 'fortran_order': False, 'shape': (3, 3)} \n"
"F\x00{'descr': '<i8', 'fortran_order': False, 'shape': (0,)} \n"
"F\x00{'descr': '<i8', 'fortran_order': False, 'shape': ()} \n"
"F\x00{'descr': '<i8', 'fortran_order': False, 'shape': (15,)} \n"
"F\x00{'descr': '<i8', 'fortran_order': False, 'shape': (3, 5)} \n"
"F\x00{'descr': '<i8', 'fortran_order': True, 'shape': (5, 3)} \n"
"F\x00{'descr': '<i8', 'fortran_order': False, 'shape': (3, 3)} \n"
"F\x00{'descr': '>i8', 'fortran_order': False, 'shape': (0,)} \n"
"F\x00{'descr': '>i8', 'fortran_order': False, 'shape': ()} \n"
"F\x00{'descr': '>i8', 'fortran_order': False, 'shape': (15,)} \n"
"F\x00{'descr': '>i8', 'fortran_order': False, 'shape': (3, 5)} \n"
"F\x00{'descr': '>i8', 'fortran_order': True, 'shape': (5, 3)} \n"
"F\x00{'descr': '>i8', 'fortran_order': False, 'shape': (3, 3)} \n"
"F\x00{'descr': '<f4', 'fortran_order': False, 'shape': (0,)} \n"
"F\x00{'descr': '<f4', 'fortran_order': False, 'shape': ()} \n"
"F\x00{'descr': '<f4', 'fortran_order': False, 'shape': (15,)} \n"
"F\x00{'descr': '<f4', 'fortran_order': False, 'shape': (3, 5)} \n"
"F\x00{'descr': '<f4', 'fortran_order': True, 'shape': (5, 3)} \n"
"F\x00{'descr': '<f4', 'fortran_order': False, 'shape': (3, 3)} \n"
"F\x00{'descr': '>f4', 'fortran_order': False, 'shape': (0,)} \n"
"F\x00{'descr': '>f4', 'fortran_order': False, 'shape': ()} \n"
"F\x00{'descr': '>f4', 'fortran_order': False, 'shape': (15,)} \n"
"F\x00{'descr': '>f4', 'fortran_order': False, 'shape': (3, 5)} \n"
"F\x00{'descr': '>f4', 'fortran_order': True, 'shape': (5, 3)} \n"
"F\x00{'descr': '>f4', 'fortran_order': False, 'shape': (3, 3)} \n"
"F\x00{'descr': '<f8', 'fortran_order': False, 'shape': (0,)} \n"
"F\x00{'descr': '<f8', 'fortran_order': False, 'shape': ()} \n"
"F\x00{'descr': '<f8', 'fortran_order': False, 'shape': (15,)} \n"
"F\x00{'descr': '<f8', 'fortran_order': False, 'shape': (3, 5)} \n"
"F\x00{'descr': '<f8', 'fortran_order': True, 'shape': (5, 3)} \n"
"F\x00{'descr': '<f8', 'fortran_order': False, 'shape': (3, 3)} \n"
"F\x00{'descr': '>f8', 'fortran_order': False, 'shape': (0,)} \n"
"F\x00{'descr': '>f8', 'fortran_order': False, 'shape': ()} \n"
"F\x00{'descr': '>f8', 'fortran_order': False, 'shape': (15,)} \n"
"F\x00{'descr': '>f8', 'fortran_order': False, 'shape': (3, 5)} \n"
"F\x00{'descr': '>f8', 'fortran_order': True, 'shape': (5, 3)} \n"
"F\x00{'descr': '>f8', 'fortran_order': False, 'shape': (3, 3)} \n"
"F\x00{'descr': '<c8', 'fortran_order': False, 'shape': (0,)} \n"
"F\x00{'descr': '<c8', 'fortran_order': False, 'shape': ()} \n"
"F\x00{'descr': '<c8', 'fortran_order': False, 'shape': (15,)} \n"
"F\x00{'descr': '<c8', 'fortran_order': False, 'shape': (3, 5)} \n"
"F\x00{'descr': '<c8', 'fortran_order': True, 'shape': (5, 3)} \n"
"F\x00{'descr': '<c8', 'fortran_order': False, 'shape': (3, 3)} \n"
"F\x00{'descr': '>c8', 'fortran_order': False, 'shape': (0,)} \n"
"F\x00{'descr': '>c8', 'fortran_order': False, 'shape': ()} \n"
"F\x00{'descr': '>c8', 'fortran_order': False, 'shape': (15,)} \n"
"F\x00{'descr': '>c8', 'fortran_order': False, 'shape': (3, 5)} \n"
"F\x00{'descr': '>c8', 'fortran_order': True, 'shape': (5, 3)} \n"
"F\x00{'descr': '>c8', 'fortran_order': False, 'shape': (3, 3)} \n"
"F\x00{'descr': '<c16', 'fortran_order': False, 'shape': (0,)} \n"
"F\x00{'descr': '<c16', 'fortran_order': False, 'shape': ()} \n"
"F\x00{'descr': '<c16', 'fortran_order': False, 'shape': (15,)} \n"
"F\x00{'descr': '<c16', 'fortran_order': False, 'shape': (3, 5)} \n"
"F\x00{'descr': '<c16', 'fortran_order': True, 'shape': (5, 3)} \n"
"F\x00{'descr': '<c16', 'fortran_order': False, 'shape': (3, 3)} \n"
"F\x00{'descr': '>c16', 'fortran_order': False, 'shape': (0,)} \n"
"F\x00{'descr': '>c16', 'fortran_order': False, 'shape': ()} \n"
"F\x00{'descr': '>c16', 'fortran_order': False, 'shape': (15,)} \n"
"F\x00{'descr': '>c16', 'fortran_order': False, 'shape': (3, 5)} \n"
"F\x00{'descr': '>c16', 'fortran_order': True, 'shape': (5, 3)} \n"
"F\x00{'descr': '>c16', 'fortran_order': False, 'shape': (3, 3)} \n"
"F\x00{'descr': '|O4', 'fortran_order': False, 'shape': (0,)} \n"
"F\x00{'descr': '|O4', 'fortran_order': False, 'shape': ()} \n"
"F\x00{'descr': '|O4', 'fortran_order': False, 'shape': (15,)} \n"
"F\x00{'descr': '|O4', 'fortran_order': False, 'shape': (3, 5)} \n"
"F\x00{'descr': '|O4', 'fortran_order': True, 'shape': (5, 3)} \n"
"F\x00{'descr': '|O4', 'fortran_order': False, 'shape': (3, 3)} \n"
"F\x00{'descr': '|O4', 'fortran_order': False, 'shape': (0,)} \n"
"F\x00{'descr': '|O4', 'fortran_order': False, 'shape': ()} \n"
"F\x00{'descr': '|O4', 'fortran_order': False, 'shape': (15,)} \n"
"F\x00{'descr': '|O4', 'fortran_order': False, 'shape': (3, 5)} \n"
"F\x00{'descr': '|O4', 'fortran_order': True, 'shape': (5, 3)} \n"
"F\x00{'descr': '|O4', 'fortran_order': False, 'shape': (3, 3)} \n"
"v\x00{'descr': [('x', '<i4', (2,)), ('y', '<f8', (2, 2)), ('z', '|u1')],\n 'fortran_order': False,\n 'shape': (2,)} \n"
"\x16\x02{'descr': [('x', '<i4', (2,)),\n ('Info',\n [('value', '<c16'),\n ('y2', '<f8'),\n ('Info2',\n [('name', '|S2'),\n ('value', '<c16', (2,)),\n ('y3', '<f8', (2,)),\n ('z3', '<u4', (2,))]),\n ('name', '|S2'),\n ('z2', '|b1')]),\n ('color', '|S2'),\n ('info', [('Name', '<U8'), ('Value', '<c16')]),\n ('y', '<f8', (2, 2)),\n ('z', '|u1')],\n 'fortran_order': False,\n 'shape': (2,)} \n"
"v\x00{'descr': [('x', '>i4', (2,)), ('y', '>f8', (2, 2)), ('z', '|u1')],\n 'fortran_order': False,\n 'shape': (2,)} \n"
"\x16\x02{'descr': [('x', '>i4', (2,)),\n ('Info',\n [('value', '>c16'),\n ('y2', '>f8'),\n ('Info2',\n [('name', '|S2'),\n ('value', '>c16', (2,)),\n ('y3', '>f8', (2,)),\n ('z3', '>u4', (2,))]),\n ('name', '|S2'),\n ('z2', '|b1')]),\n ('color', '|S2'),\n ('info', [('Name', '>U8'), ('Value', '>c16')]),\n ('y', '>f8', (2, 2)),\n ('z', '|u1')],\n 'fortran_order': False,\n 'shape': (2,)} \n"
'''
import sys
import os
import shutil
import tempfile
if sys.version_info[0] >= 3:
from io import BytesIO as StringIO
else:
from cStringIO import StringIO
import numpy as np
from numpy.testing import *
from numpy.lib import format
from numpy.compat import asbytes, asbytes_nested
tempdir = None
# Module-level setup.
def setup_module():
global tempdir
tempdir = tempfile.mkdtemp()
def teardown_module():
global tempdir
if tempdir is not None and os.path.isdir(tempdir):
shutil.rmtree(tempdir)
tempdir = None
# Generate some basic arrays to test with.
scalars = [
np.uint8,
np.int8,
np.uint16,
np.int16,
np.uint32,
np.int32,
np.uint64,
np.int64,
np.float32,
np.float64,
np.complex64,
np.complex128,
object,
]
basic_arrays = []
for scalar in scalars:
for endian in '<>':
dtype = np.dtype(scalar).newbyteorder(endian)
basic = np.arange(15).astype(dtype)
basic_arrays.extend([
# Empty
np.array([], dtype=dtype),
# Rank-0
np.array(10, dtype=dtype),
# 1-D
basic,
# 2-D C-contiguous
basic.reshape((3,5)),
# 2-D F-contiguous
basic.reshape((3,5)).T,
# 2-D non-contiguous
basic.reshape((3,5))[::-1,::2],
])
# More complicated record arrays.
# This is the structure of the table used for plain objects:
#
# +-+-+-+
# |x|y|z|
# +-+-+-+
# Structure of a plain array description:
Pdescr = [
('x', 'i4', (2,)),
('y', 'f8', (2, 2)),
('z', 'u1')]
# A plain list of tuples with values for testing:
PbufferT = [
# x y z
([3,2], [[6.,4.],[6.,4.]], 8),
([4,3], [[7.,5.],[7.,5.]], 9),
]
# This is the structure of the table used for nested objects (DON'T PANIC!):
#
# +-+---------------------------------+-----+----------+-+-+
# |x|Info |color|info |y|z|
# | +-----+--+----------------+----+--+ +----+-----+ | |
# | |value|y2|Info2 |name|z2| |Name|Value| | |
# | | | +----+-----+--+--+ | | | | | | |
# | | | |name|value|y3|z3| | | | | | | |
# +-+-----+--+----+-----+--+--+----+--+-----+----+-----+-+-+
#
# The corresponding nested array description:
Ndescr = [
('x', 'i4', (2,)),
('Info', [
('value', 'c16'),
('y2', 'f8'),
('Info2', [
('name', 'S2'),
('value', 'c16', (2,)),
('y3', 'f8', (2,)),
('z3', 'u4', (2,))]),
('name', 'S2'),
('z2', 'b1')]),
('color', 'S2'),
('info', [
('Name', 'U8'),
('Value', 'c16')]),
('y', 'f8', (2, 2)),
('z', 'u1')]
NbufferT = [
# x Info color info y z
# value y2 Info2 name z2 Name Value
# name value y3 z3
([3,2], (6j, 6., ('nn', [6j,4j], [6.,4.], [1,2]), 'NN', True), 'cc', ('NN', 6j), [[6.,4.],[6.,4.]], 8),
([4,3], (7j, 7., ('oo', [7j,5j], [7.,5.], [2,1]), 'OO', False), 'dd', ('OO', 7j), [[7.,5.],[7.,5.]], 9),
]
record_arrays = [
np.array(PbufferT, dtype=np.dtype(Pdescr).newbyteorder('<')),
np.array(NbufferT, dtype=np.dtype(Ndescr).newbyteorder('<')),
np.array(PbufferT, dtype=np.dtype(Pdescr).newbyteorder('>')),
np.array(NbufferT, dtype=np.dtype(Ndescr).newbyteorder('>')),
]
def roundtrip(arr):
f = StringIO()
format.write_array(f, arr)
f2 = StringIO(f.getvalue())
arr2 = format.read_array(f2)
return arr2
def assert_equal(o1, o2):
assert o1 == o2
def test_roundtrip():
for arr in basic_arrays + record_arrays:
arr2 = roundtrip(arr)
yield assert_array_equal, arr, arr2
def test_memmap_roundtrip():
# XXX: test crashes nose on windows. Fix this
if not (sys.platform == 'win32' or sys.platform == 'cygwin'):
for arr in basic_arrays + record_arrays:
if arr.dtype.hasobject:
# Skip these since they can't be mmap'ed.
continue
# Write it out normally and through mmap.
nfn = os.path.join(tempdir, 'normal.npy')
mfn = os.path.join(tempdir, 'memmap.npy')
fp = open(nfn, 'wb')
try:
format.write_array(fp, arr)
finally:
fp.close()
fortran_order = (arr.flags.f_contiguous and not arr.flags.c_contiguous)
ma = format.open_memmap(mfn, mode='w+', dtype=arr.dtype,
shape=arr.shape, fortran_order=fortran_order)
ma[...] = arr
del ma
# Check that both of these files' contents are the same.
fp = open(nfn, 'rb')
normal_bytes = fp.read()
fp.close()
fp = open(mfn, 'rb')
memmap_bytes = fp.read()
fp.close()
yield assert_equal, normal_bytes, memmap_bytes
# Check that reading the file using memmap works.
ma = format.open_memmap(nfn, mode='r')
#yield assert_array_equal, ma, arr
#del ma
def test_write_version_1_0():
f = StringIO()
arr = np.arange(1)
# These should pass.
format.write_array(f, arr, version=(1, 0))
format.write_array(f, arr)
# These should all fail.
bad_versions = [
(1, 1),
(0, 0),
(0, 1),
(2, 0),
(2, 2),
(255, 255),
]
for version in bad_versions:
try:
format.write_array(f, arr, version=version)
except ValueError:
pass
else:
raise AssertionError("we should have raised a ValueError for the bad version %r" % (version,))
bad_version_magic = asbytes_nested([
'\x93NUMPY\x01\x01',
'\x93NUMPY\x00\x00',
'\x93NUMPY\x00\x01',
'\x93NUMPY\x02\x00',
'\x93NUMPY\x02\x02',
'\x93NUMPY\xff\xff',
])
malformed_magic = asbytes_nested([
'\x92NUMPY\x01\x00',
'\x00NUMPY\x01\x00',
'\x93numpy\x01\x00',
'\x93MATLB\x01\x00',
'\x93NUMPY\x01',
'\x93NUMPY',
'',
])
def test_read_magic_bad_magic():
for magic in malformed_magic:
f = StringIO(magic)
yield raises(ValueError)(format.read_magic), f
def test_read_version_1_0_bad_magic():
for magic in bad_version_magic + malformed_magic:
f = StringIO(magic)
yield raises(ValueError)(format.read_array), f
def test_bad_magic_args():
assert_raises(ValueError, format.magic, -1, 1)
assert_raises(ValueError, format.magic, 256, 1)
assert_raises(ValueError, format.magic, 1, -1)
assert_raises(ValueError, format.magic, 1, 256)
def test_large_header():
s = StringIO()
d = {'a':1,'b':2}
format.write_array_header_1_0(s,d)
s = StringIO()
d = {'a':1,'b':2,'c':'x'*256*256}
assert_raises(ValueError, format.write_array_header_1_0, s, d)
def test_bad_header():
# header of length less than 2 should fail
s = StringIO()
assert_raises(ValueError, format.read_array_header_1_0, s)
s = StringIO(asbytes('1'))
assert_raises(ValueError, format.read_array_header_1_0, s)
# header shorter than indicated size should fail
s = StringIO(asbytes('\x01\x00'))
assert_raises(ValueError, format.read_array_header_1_0, s)
# headers without the exact keys required should fail
d = {"shape":(1,2),
"descr":"x"}
s = StringIO()
format.write_array_header_1_0(s,d)
assert_raises(ValueError, format.read_array_header_1_0, s)
d = {"shape":(1,2),
"fortran_order":False,
"descr":"x",
"extrakey":-1}
s = StringIO()
format.write_array_header_1_0(s,d)
assert_raises(ValueError, format.read_array_header_1_0, s)
if __name__ == "__main__":
run_module_suite()
|
kivatu/kivy-bak | refs/heads/master | examples/shader/shadertree.py | 13 | '''
Tree shader
===========
This example is an experimentation to show how we can use shader for a tree
subset. Here, we made a ShaderTreeWidget, different than the ShaderWidget in the
plasma.py example.
The ShaderTree widget create a Frambuffer, render his children on it, and render
the Framebuffer with a specific Shader.
With this way, you can apply cool effect on your widgets :)
'''
from kivy.clock import Clock
from kivy.app import App
from kivy.uix.button import Button
from kivy.uix.scatter import Scatter
from kivy.uix.floatlayout import FloatLayout
from kivy.core.window import Window
from kivy.properties import StringProperty, ObjectProperty
from kivy.graphics import RenderContext, Fbo, Color, Rectangle
header = '''
#ifdef GL_ES
precision highp float;
#endif
/* Outputs from the vertex shader */
varying vec4 frag_color;
varying vec2 tex_coord0;
/* uniform texture samplers */
uniform sampler2D texture0;
uniform vec2 resolution;
uniform float time;
'''
# pulse (Danguafer/Silexars, 2010)
shader_pulse = header + '''
void main(void)
{
vec2 halfres = resolution.xy/2.0;
vec2 cPos = gl_FragCoord.xy;
cPos.x -= 0.5*halfres.x*sin(time/2.0)+0.3*halfres.x*cos(time)+halfres.x;
cPos.y -= 0.4*halfres.y*sin(time/5.0)+0.3*halfres.y*cos(time)+halfres.y;
float cLength = length(cPos);
vec2 uv = tex_coord0+(cPos/cLength)*sin(cLength/30.0-time*10.0)/25.0;
vec3 col = texture2D(texture0,uv).xyz*50.0/cLength;
gl_FragColor = vec4(col,1.0);
}
'''
# post processing (by iq, 2009)
shader_postprocessing = header + '''
uniform vec2 uvsize;
uniform vec2 uvpos;
void main(void)
{
vec2 q = tex_coord0 * vec2(1, -1);
vec2 uv = 0.5 + (q-0.5);//*(0.9);// + 0.1*sin(0.2*time));
vec3 oricol = texture2D(texture0,vec2(q.x,1.0-q.y)).xyz;
vec3 col;
col.r = texture2D(texture0,vec2(uv.x+0.003,-uv.y)).x;
col.g = texture2D(texture0,vec2(uv.x+0.000,-uv.y)).y;
col.b = texture2D(texture0,vec2(uv.x-0.003,-uv.y)).z;
col = clamp(col*0.5+0.5*col*col*1.2,0.0,1.0);
//col *= 0.5 + 0.5*16.0*uv.x*uv.y*(1.0-uv.x)*(1.0-uv.y);
col *= vec3(0.8,1.0,0.7);
col *= 0.9+0.1*sin(10.0*time+uv.y*1000.0);
col *= 0.97+0.03*sin(110.0*time);
float comp = smoothstep( 0.2, 0.7, sin(time) );
//col = mix( col, oricol, clamp(-2.0+2.0*q.x+3.0*comp,0.0,1.0) );
gl_FragColor = vec4(col,1.0);
}
'''
shader_monochrome = header + '''
void main() {
vec4 rgb = texture2D(texture0, tex_coord0);
float c = (rgb.x + rgb.y + rgb.z) * 0.3333;
gl_FragColor = vec4(c, c, c, 1.0);
}
'''
class ShaderWidget(FloatLayout):
# property to set the source code for fragment shader
fs = StringProperty(None)
# texture of the framebuffer
texture = ObjectProperty(None)
def __init__(self, **kwargs):
# Instead of using canvas, we will use a RenderContext,
# and change the default shader used.
self.canvas = RenderContext(use_parent_projection=True)
# We create a framebuffer at the size of the window
# FIXME: this should be created at the size of the widget
with self.canvas:
self.fbo = Fbo(size=Window.size, use_parent_projection=True)
# Set the fbo background to black.
with self.fbo:
Color(0, 0, 0)
Rectangle(size=Window.size)
# call the constructor of parent
# if they are any graphics object, they will be added on our new canvas
super(ShaderWidget, self).__init__(**kwargs)
# We'll update our glsl variables in a clock
Clock.schedule_interval(self.update_glsl, 0)
# Don't forget to set the texture property to the texture of framebuffer
self.texture = self.fbo.texture
def update_glsl(self, *largs):
self.canvas['time'] = Clock.get_boottime()
self.canvas['resolution'] = map(float, self.size)
def on_fs(self, instance, value):
# set the fragment shader to our source code
shader = self.canvas.shader
old_value = shader.fs
shader.fs = value
if not shader.success:
shader.fs = old_value
raise Exception('failed')
#
# now, if we have new widget to add,
# add their graphics canvas to our Framebuffer, not the usual canvas.
#
def add_widget(self, widget):
c = self.canvas
self.canvas = self.fbo
super(ShaderWidget, self).add_widget(widget)
self.canvas = c
def remove_widget(self, widget):
c = self.canvas
self.canvas = self.fbo
super(ShaderWidget, self).remove_widget(widget)
self.canvas = c
class ScatterImage(Scatter):
source = StringProperty(None)
class ShaderTreeApp(App):
def build(self):
# prepare shader list
available_shaders = (
shader_pulse, shader_postprocessing, shader_monochrome)
self.shader_index = 0
# create our widget tree
root = FloatLayout()
sw = ShaderWidget()
root.add_widget(sw)
# add a button and scatter image inside the shader widget
btn = Button(text='Hello world', size_hint=(None, None),
pos_hint={'center_x': .25, 'center_y': .5})
sw.add_widget(btn)
center = Window.width * 0.75 - 256, Window.height * 0.5 - 256
scatter = ScatterImage(source='tex3.jpg', size_hint=(None, None),
size=(512, 512), pos=center)
sw.add_widget(scatter)
# create a button outside the shader widget, to change the current used
# shader
btn = Button(text='Change fragment shader', size_hint=(1, None),
height=50)
def change(*largs):
sw.fs = available_shaders[self.shader_index]
self.shader_index = (self.shader_index + 1) % len(available_shaders)
btn.bind(on_release=change)
root.add_widget(btn)
return root
if __name__ == '__main__':
ShaderTreeApp().run()
|
vipulroxx/sympy | refs/heads/master | sympy/core/power.py | 7 | from __future__ import print_function, division
from math import log as _log
from .sympify import _sympify
from .cache import cacheit
from .singleton import S
from .expr import Expr
from .evalf import PrecisionExhausted
from .function import (_coeff_isneg, expand_complex, expand_multinomial,
expand_mul)
from .logic import fuzzy_bool
from .compatibility import as_int, range
from .evaluate import global_evaluate
from mpmath.libmp import sqrtrem as mpmath_sqrtrem
from sympy.utilities.iterables import sift
def integer_nthroot(y, n):
"""
Return a tuple containing x = floor(y**(1/n))
and a boolean indicating whether the result is exact (that is,
whether x**n == y).
>>> from sympy import integer_nthroot
>>> integer_nthroot(16,2)
(4, True)
>>> integer_nthroot(26,2)
(5, False)
"""
y, n = int(y), int(n)
if y < 0:
raise ValueError("y must be nonnegative")
if n < 1:
raise ValueError("n must be positive")
if y in (0, 1):
return y, True
if n == 1:
return y, True
if n == 2:
x, rem = mpmath_sqrtrem(y)
return int(x), not rem
if n > y:
return 1, False
# Get initial estimate for Newton's method. Care must be taken to
# avoid overflow
try:
guess = int(y**(1./n) + 0.5)
except OverflowError:
exp = _log(y, 2)/n
if exp > 53:
shift = int(exp - 53)
guess = int(2.0**(exp - shift) + 1) << shift
else:
guess = int(2.0**exp)
if guess > 2**50:
# Newton iteration
xprev, x = -1, guess
while 1:
t = x**(n - 1)
xprev, x = x, ((n - 1)*x + y//t)//n
if abs(x - xprev) < 2:
break
else:
x = guess
# Compensate
t = x**n
while t < y:
x += 1
t = x**n
while t > y:
x -= 1
t = x**n
return x, t == y
class Pow(Expr):
"""
Defines the expression x**y as "x raised to a power y"
Singleton definitions involving (0, 1, -1, oo, -oo):
+--------------+---------+-----------------------------------------------+
| expr | value | reason |
+==============+=========+===============================================+
| z**0 | 1 | Although arguments over 0**0 exist, see [2]. |
+--------------+---------+-----------------------------------------------+
| z**1 | z | |
+--------------+---------+-----------------------------------------------+
| (-oo)**(-1) | 0 | |
+--------------+---------+-----------------------------------------------+
| (-1)**-1 | -1 | |
+--------------+---------+-----------------------------------------------+
| S.Zero**-1 | zoo | This is not strictly true, as 0**-1 may be |
| | | undefined, but is convenient in some contexts |
| | | where the base is assumed to be positive. |
+--------------+---------+-----------------------------------------------+
| 1**-1 | 1 | |
+--------------+---------+-----------------------------------------------+
| oo**-1 | 0 | |
+--------------+---------+-----------------------------------------------+
| 0**oo | 0 | Because for all complex numbers z near |
| | | 0, z**oo -> 0. |
+--------------+---------+-----------------------------------------------+
| 0**-oo | zoo | This is not strictly true, as 0**oo may be |
| | | oscillating between positive and negative |
| | | values or rotating in the complex plane. |
| | | It is convenient, however, when the base |
| | | is positive. |
+--------------+---------+-----------------------------------------------+
| 1**oo | nan | Because there are various cases where |
| 1**-oo | | lim(x(t),t)=1, lim(y(t),t)=oo (or -oo), |
| 1**zoo | | but lim( x(t)**y(t), t) != 1. See [3]. |
+--------------+---------+-----------------------------------------------+
| (-1)**oo | nan | Because of oscillations in the limit. |
| (-1)**(-oo) | | |
+--------------+---------+-----------------------------------------------+
| oo**oo | oo | |
+--------------+---------+-----------------------------------------------+
| oo**-oo | 0 | |
+--------------+---------+-----------------------------------------------+
| (-oo)**oo | nan | |
| (-oo)**-oo | | |
+--------------+---------+-----------------------------------------------+
Because symbolic computations are more flexible that floating point
calculations and we prefer to never return an incorrect answer,
we choose not to conform to all IEEE 754 conventions. This helps
us avoid extra test-case code in the calculation of limits.
See Also
========
sympy.core.numbers.Infinity
sympy.core.numbers.NegativeInfinity
sympy.core.numbers.NaN
References
==========
.. [1] http://en.wikipedia.org/wiki/Exponentiation
.. [2] http://en.wikipedia.org/wiki/Exponentiation#Zero_to_the_power_of_zero
.. [3] http://en.wikipedia.org/wiki/Indeterminate_forms
"""
is_Pow = True
__slots__ = ['is_commutative']
@cacheit
def __new__(cls, b, e, evaluate=None):
if evaluate is None:
evaluate = global_evaluate[0]
from sympy.functions.elementary.exponential import exp_polar
b = _sympify(b)
e = _sympify(e)
if evaluate:
if e is S.Zero:
return S.One
elif e is S.One:
return b
elif e.is_integer and _coeff_isneg(b):
if e.is_even:
b = -b
elif e.is_odd:
return -Pow(-b, e)
if S.NaN in (b, e): # XXX S.NaN**x -> S.NaN under assumption that x != 0
return S.NaN
elif b is S.One:
if abs(e).is_infinite:
return S.NaN
return S.One
else:
# recognize base as E
if not e.is_Atom and b is not S.Exp1 and b.func is not exp_polar:
from sympy import numer, denom, log, sign, im, factor_terms
c, ex = factor_terms(e, sign=False).as_coeff_Mul()
den = denom(ex)
if den.func is log and den.args[0] == b:
return S.Exp1**(c*numer(ex))
elif den.is_Add:
s = sign(im(b))
if s.is_Number and s and den == \
log(-factor_terms(b, sign=False)) + s*S.ImaginaryUnit*S.Pi:
return S.Exp1**(c*numer(ex))
obj = b._eval_power(e)
if obj is not None:
return obj
obj = Expr.__new__(cls, b, e)
obj.is_commutative = (b.is_commutative and e.is_commutative)
return obj
@property
def base(self):
return self._args[0]
@property
def exp(self):
return self._args[1]
@classmethod
def class_key(cls):
return 3, 2, cls.__name__
def _eval_power(self, other):
from sympy import Abs, arg, exp, floor, im, log, re, sign
b, e = self.as_base_exp()
if b is S.NaN:
return (b**e)**other # let __new__ handle it
s = None
if other.is_integer:
s = 1
elif b.is_polar: # e.g. exp_polar, besselj, var('p', polar=True)...
s = 1
elif e.is_real is not None:
# helper functions ===========================
def _half(e):
"""Return True if the exponent has a literal 2 as the
denominator, else None."""
if getattr(e, 'q', None) == 2:
return True
n, d = e.as_numer_denom()
if n.is_integer and d == 2:
return True
def _n2(e):
"""Return ``e`` evaluated to a Number with 2 significant
digits, else None."""
try:
rv = e.evalf(2, strict=True)
if rv.is_Number:
return rv
except PrecisionExhausted:
pass
# ===================================================
if e.is_real:
# we need _half(other) with constant floor or
# floor(S.Half - e*arg(b)/2/pi) == 0
# handle -1 as special case
if (e == -1) == True:
# floor arg. is 1/2 + arg(b)/2/pi
if _half(other):
if b.is_negative is True:
return S.NegativeOne**other*Pow(-b, e*other)
if b.is_real is False:
return Pow(b.conjugate()/Abs(b)**2, other)
elif e.is_even:
if b.is_real:
b = abs(b)
if b.is_imaginary:
b = abs(im(b))*S.ImaginaryUnit
if (abs(e) < 1) == True or (e == 1) == True:
s = 1 # floor = 0
elif b.is_nonnegative:
s = 1 # floor = 0
elif re(b).is_nonnegative and (abs(e) < 2) == True:
s = 1 # floor = 0
elif im(b).is_nonzero and (abs(e) == 2) == True:
s = 1 # floor = 0
elif _half(other):
s = exp(2*S.Pi*S.ImaginaryUnit*other*floor(
S.Half - e*arg(b)/(2*S.Pi)))
if s.is_real and _n2(sign(s) - s) == 0:
s = sign(s)
else:
s = None
else:
# e.is_real is False requires:
# _half(other) with constant floor or
# floor(S.Half - im(e*log(b))/2/pi) == 0
try:
s = exp(2*S.ImaginaryUnit*S.Pi*other*
floor(S.Half - im(e*log(b))/2/S.Pi))
# be careful to test that s is -1 or 1 b/c sign(I) == I:
# so check that s is real
if s.is_real and _n2(sign(s) - s) == 0:
s = sign(s)
else:
s = None
except PrecisionExhausted:
s = None
if s is not None:
return s*Pow(b, e*other)
def _eval_is_even(self):
if self.exp.is_integer and self.exp.is_positive:
return self.base.is_even
def _eval_is_positive(self):
from sympy import log
if self.base == self.exp:
if self.base.is_nonnegative:
return True
elif self.base.is_positive:
if self.exp.is_real:
return True
elif self.base.is_negative:
if self.exp.is_even:
return True
if self.exp.is_odd:
return False
elif self.base.is_nonpositive:
if self.exp.is_odd:
return False
elif self.base.is_imaginary:
if self.exp.is_integer:
m = self.exp % 4
if m.is_zero:
return True
if m.is_integer and m.is_zero is False:
return False
if self.exp.is_imaginary:
return log(self.base).is_imaginary
def _eval_is_negative(self):
if self.base.is_negative:
if self.exp.is_odd:
return True
if self.exp.is_even:
return False
elif self.base.is_positive:
if self.exp.is_real:
return False
elif self.base.is_nonnegative:
if self.exp.is_nonnegative:
return False
elif self.base.is_nonpositive:
if self.exp.is_even:
return False
elif self.base.is_real:
if self.exp.is_even:
return False
def _eval_is_zero(self):
if self.base.is_zero:
if self.exp.is_positive:
return True
elif self.exp.is_nonpositive:
return False
elif self.base.is_nonzero:
if self.exp.is_finite:
return False
elif self.exp.is_infinite:
if (1 - abs(self.base)).is_positive:
return self.exp.is_positive
elif (1 - abs(self.base)).is_negative:
return self.exp.is_negative
def _eval_is_integer(self):
b, e = self.args
if b.is_rational:
if b.is_integer is False and e.is_positive:
return False # rat**nonneg
if b.is_integer and e.is_integer:
if b is S.NegativeOne:
return True
if e.is_nonnegative or e.is_positive:
return True
if b.is_integer and e.is_negative and (e.is_finite or e.is_integer):
if (b - 1).is_nonzero and (b + 1).is_nonzero:
return False
if b.is_Number and e.is_Number:
check = self.func(*self.args)
return check.is_Integer
def _eval_is_real(self):
from sympy import arg, exp, log, Mul
real_b = self.base.is_real
if real_b is None:
if self.base.func == exp and self.base.args[0].is_imaginary:
return self.exp.is_imaginary
return
real_e = self.exp.is_real
if real_e is None:
return
if real_b and real_e:
if self.base.is_positive:
return True
elif self.base.is_nonnegative:
if self.exp.is_nonnegative:
return True
else:
if self.exp.is_integer:
return True
elif self.base.is_negative:
if self.exp.is_Rational:
return False
if real_e and self.exp.is_negative:
return Pow(self.base, -self.exp).is_real
im_b = self.base.is_imaginary
im_e = self.exp.is_imaginary
if im_b:
if self.exp.is_integer:
if self.exp.is_even:
return True
elif self.exp.is_odd:
return False
elif im_e and log(self.base).is_imaginary:
return True
elif self.exp.is_Add:
c, a = self.exp.as_coeff_Add()
if c and c.is_Integer:
return Mul(
self.base**c, self.base**a, evaluate=False).is_real
elif self.base in (-S.ImaginaryUnit, S.ImaginaryUnit):
if (self.exp/2).is_integer is False:
return False
if real_b and im_e:
if self.base is S.NegativeOne:
return True
c = self.exp.coeff(S.ImaginaryUnit)
if c:
ok = (c*log(self.base)/S.Pi).is_Integer
if ok is not None:
return ok
if real_b is False: # we already know it's not imag
i = arg(self.base)*self.exp/S.Pi
return i.is_integer
def _eval_is_complex(self):
if all(a.is_complex for a in self.args):
return True
def _eval_is_imaginary(self):
from sympy import arg, log
if self.base.is_imaginary:
if self.exp.is_integer:
odd = self.exp.is_odd
if odd is not None:
return odd
return
if self.exp.is_imaginary:
imlog = log(self.base).is_imaginary
if imlog is not None:
return False # I**i -> real; (2*I)**i -> complex ==> not imaginary
if self.base.is_real and self.exp.is_real:
if self.base.is_positive:
return False
else:
rat = self.exp.is_rational
if not rat:
return rat
if self.exp.is_integer:
return False
else:
half = (2*self.exp).is_integer
if half:
return self.base.is_negative
return half
if self.base.is_real is False: # we already know it's not imag
i = arg(self.base)*self.exp/S.Pi
return (2*i).is_odd
def _eval_is_odd(self):
if self.exp.is_integer:
if self.exp.is_positive:
return self.base.is_odd
elif self.exp.is_nonnegative and self.base.is_odd:
return True
elif self.base is S.NegativeOne:
return True
def _eval_is_finite(self):
if self.exp.is_negative:
if self.base.is_zero:
return False
if self.base.is_infinite:
return True
c1 = self.base.is_finite
if c1 is None:
return
c2 = self.exp.is_finite
if c2 is None:
return
if c1 and c2:
if self.exp.is_nonnegative or self.base.is_nonzero:
return True
def _eval_is_polar(self):
return self.base.is_polar
def _eval_subs(self, old, new):
from sympy import exp, log, Symbol
def _check(ct1, ct2, old):
"""Return bool, pow where, if bool is True, then the exponent of
Pow `old` will combine with `pow` so the substitution is valid,
otherwise bool will be False,
cti are the coefficient and terms of an exponent of self or old
In this _eval_subs routine a change like (b**(2*x)).subs(b**x, y)
will give y**2 since (b**x)**2 == b**(2*x); if that equality does
not hold then the substitution should not occur so `bool` will be
False.
"""
coeff1, terms1 = ct1
coeff2, terms2 = ct2
if terms1 == terms2:
pow = coeff1/coeff2
try:
pow = as_int(pow)
combines = True
except ValueError:
combines = Pow._eval_power(
Pow(*old.as_base_exp(), evaluate=False),
pow) is not None
return combines, pow
return False, None
if old == self.base:
return new**self.exp._subs(old, new)
if old.func is self.func and self.base is old.base:
if self.exp.is_Add is False:
ct1 = self.exp.as_independent(Symbol, as_Add=False)
ct2 = old.exp.as_independent(Symbol, as_Add=False)
ok, pow = _check(ct1, ct2, old)
if ok:
# issue 5180: (x**(6*y)).subs(x**(3*y),z)->z**2
return self.func(new, pow)
else: # b**(6*x+a).subs(b**(3*x), y) -> y**2 * b**a
# exp(exp(x) + exp(x**2)).subs(exp(exp(x)), w) -> w * exp(exp(x**2))
oarg = old.exp
new_l = []
o_al = []
ct2 = oarg.as_coeff_mul()
for a in self.exp.args:
newa = a._subs(old, new)
ct1 = newa.as_coeff_mul()
ok, pow = _check(ct1, ct2, old)
if ok:
new_l.append(new**pow)
continue
o_al.append(newa)
if new_l:
new_l.append(Pow(self.base, Add(*o_al), evaluate=False))
return Mul(*new_l)
if old.func is exp and self.exp.is_real and self.base.is_positive:
ct1 = old.args[0].as_independent(Symbol, as_Add=False)
ct2 = (self.exp*log(self.base)).as_independent(
Symbol, as_Add=False)
ok, pow = _check(ct1, ct2, old)
if ok:
return self.func(new, pow) # (2**x).subs(exp(x*log(2)), z) -> z
def as_base_exp(self):
"""Return base and exp of self.
If base is 1/Integer, then return Integer, -exp. If this extra
processing is not needed, the base and exp properties will
give the raw arguments
Examples
========
>>> from sympy import Pow, S
>>> p = Pow(S.Half, 2, evaluate=False)
>>> p.as_base_exp()
(2, -2)
>>> p.args
(1/2, 2)
"""
b, e = self.args
if b.is_Rational and b.p == 1 and b.q != 1:
return Integer(b.q), -e
return b, e
def _eval_adjoint(self):
from sympy.functions.elementary.complexes import adjoint
i, p = self.exp.is_integer, self.base.is_positive
if i:
return adjoint(self.base)**self.exp
if p:
return self.base**adjoint(self.exp)
if i is False and p is False:
expanded = expand_complex(self)
if expanded != self:
return adjoint(expanded)
def _eval_conjugate(self):
from sympy.functions.elementary.complexes import conjugate as c
i, p = self.exp.is_integer, self.base.is_positive
if i:
return c(self.base)**self.exp
if p:
return self.base**c(self.exp)
if i is False and p is False:
expanded = expand_complex(self)
if expanded != self:
return c(expanded)
def _eval_transpose(self):
from sympy.functions.elementary.complexes import transpose
i, p = self.exp.is_integer, self.base.is_complex
if p:
return self.base**self.exp
if i:
return transpose(self.base)**self.exp
if i is False and p is False:
expanded = expand_complex(self)
if expanded != self:
return transpose(expanded)
def _eval_expand_power_exp(self, **hints):
"""a**(n+m) -> a**n*a**m"""
b = self.base
e = self.exp
if e.is_Add and e.is_commutative:
expr = []
for x in e.args:
expr.append(self.func(self.base, x))
return Mul(*expr)
return self.func(b, e)
def _eval_expand_power_base(self, **hints):
"""(a*b)**n -> a**n * b**n"""
force = hints.get('force', False)
b = self.base
e = self.exp
if not b.is_Mul:
return self
cargs, nc = b.args_cnc(split_1=False)
# expand each term - this is top-level-only
# expansion but we have to watch out for things
# that don't have an _eval_expand method
if nc:
nc = [i._eval_expand_power_base(**hints)
if hasattr(i, '_eval_expand_power_base') else i
for i in nc]
if e.is_Integer:
if e.is_positive:
rv = Mul(*nc*e)
else:
rv = 1/Mul(*nc*-e)
if cargs:
rv *= Mul(*cargs)**e
return rv
if not cargs:
return self.func(Mul(*nc), e, evaluate=False)
nc = [Mul(*nc)]
# sift the commutative bases
def pred(x):
if x is S.ImaginaryUnit:
return S.ImaginaryUnit
polar = x.is_polar
if polar:
return True
if polar is None:
return fuzzy_bool(x.is_nonnegative)
sifted = sift(cargs, pred)
nonneg = sifted[True]
other = sifted[None]
neg = sifted[False]
imag = sifted[S.ImaginaryUnit]
if imag:
I = S.ImaginaryUnit
i = len(imag) % 4
if i == 0:
pass
elif i == 1:
other.append(I)
elif i == 2:
if neg:
nonn = -neg.pop()
if nonn is not S.One:
nonneg.append(nonn)
else:
neg.append(S.NegativeOne)
else:
if neg:
nonn = -neg.pop()
if nonn is not S.One:
nonneg.append(nonn)
else:
neg.append(S.NegativeOne)
other.append(I)
del imag
# bring out the bases that can be separated from the base
if force or e.is_integer:
# treat all commutatives the same and put nc in other
cargs = nonneg + neg + other
other = nc
else:
# this is just like what is happening automatically, except
# that now we are doing it for an arbitrary exponent for which
# no automatic expansion is done
assert not e.is_Integer
# handle negatives by making them all positive and putting
# the residual -1 in other
if len(neg) > 1:
o = S.One
if not other and neg[0].is_Number:
o *= neg.pop(0)
if len(neg) % 2:
o = -o
for n in neg:
nonneg.append(-n)
if o is not S.One:
other.append(o)
elif neg and other:
if neg[0].is_Number and neg[0] is not S.NegativeOne:
other.append(S.NegativeOne)
nonneg.append(-neg[0])
else:
other.extend(neg)
else:
other.extend(neg)
del neg
cargs = nonneg
other += nc
rv = S.One
if cargs:
rv *= Mul(*[self.func(b, e, evaluate=False) for b in cargs])
if other:
rv *= self.func(Mul(*other), e, evaluate=False)
return rv
def _eval_expand_multinomial(self, **hints):
"""(a+b+..) ** n -> a**n + n*a**(n-1)*b + .., n is nonzero integer"""
base, exp = self.args
result = self
if exp.is_Rational and exp.p > 0 and base.is_Add:
if not exp.is_Integer:
n = Integer(exp.p // exp.q)
if not n:
return result
else:
radical, result = self.func(base, exp - n), []
expanded_base_n = self.func(base, n)
if expanded_base_n.is_Pow:
expanded_base_n = \
expanded_base_n._eval_expand_multinomial()
for term in Add.make_args(expanded_base_n):
result.append(term*radical)
return Add(*result)
n = int(exp)
if base.is_commutative:
order_terms, other_terms = [], []
for b in base.args:
if b.is_Order:
order_terms.append(b)
else:
other_terms.append(b)
if order_terms:
# (f(x) + O(x^n))^m -> f(x)^m + m*f(x)^{m-1} *O(x^n)
f = Add(*other_terms)
o = Add(*order_terms)
if n == 2:
return expand_multinomial(f**n, deep=False) + n*f*o
else:
g = expand_multinomial(f**(n - 1), deep=False)
return expand_mul(f*g, deep=False) + n*g*o
if base.is_number:
# Efficiently expand expressions of the form (a + b*I)**n
# where 'a' and 'b' are real numbers and 'n' is integer.
a, b = base.as_real_imag()
if a.is_Rational and b.is_Rational:
if not a.is_Integer:
if not b.is_Integer:
k = self.func(a.q * b.q, n)
a, b = a.p*b.q, a.q*b.p
else:
k = self.func(a.q, n)
a, b = a.p, a.q*b
elif not b.is_Integer:
k = self.func(b.q, n)
a, b = a*b.q, b.p
else:
k = 1
a, b, c, d = int(a), int(b), 1, 0
while n:
if n & 1:
c, d = a*c - b*d, b*c + a*d
n -= 1
a, b = a*a - b*b, 2*a*b
n //= 2
I = S.ImaginaryUnit
if k == 1:
return c + I*d
else:
return Integer(c)/k + I*d/k
p = other_terms
# (x+y)**3 -> x**3 + 3*x**2*y + 3*x*y**2 + y**3
# in this particular example:
# p = [x,y]; n = 3
# so now it's easy to get the correct result -- we get the
# coefficients first:
from sympy import multinomial_coefficients
from sympy.polys.polyutils import basic_from_dict
expansion_dict = multinomial_coefficients(len(p), n)
# in our example: {(3, 0): 1, (1, 2): 3, (0, 3): 1, (2, 1): 3}
# and now construct the expression.
return basic_from_dict(expansion_dict, *p)
else:
if n == 2:
return Add(*[f*g for f in base.args for g in base.args])
else:
multi = (base**(n - 1))._eval_expand_multinomial()
if multi.is_Add:
return Add(*[f*g for f in base.args
for g in multi.args])
else:
# XXX can this ever happen if base was an Add?
return Add(*[f*multi for f in base.args])
elif (exp.is_Rational and exp.p < 0 and base.is_Add and
abs(exp.p) > exp.q):
return 1 / self.func(base, -exp)._eval_expand_multinomial()
elif exp.is_Add and base.is_Number:
# a + b a b
# n --> n n , where n, a, b are Numbers
coeff, tail = S.One, S.Zero
for term in exp.args:
if term.is_Number:
coeff *= self.func(base, term)
else:
tail += term
return coeff * self.func(base, tail)
else:
return result
def as_real_imag(self, deep=True, **hints):
from sympy import atan2, cos, im, re, sin
from sympy.polys.polytools import poly
if self.exp.is_Integer:
exp = self.exp
re, im = self.base.as_real_imag(deep=deep)
if not im:
return self, S.Zero
a, b = symbols('a b', cls=Dummy)
if exp >= 0:
if re.is_Number and im.is_Number:
# We can be more efficient in this case
expr = expand_multinomial(self.base**exp)
return expr.as_real_imag()
expr = poly(
(a + b)**exp) # a = re, b = im; expr = (a + b*I)**exp
else:
mag = re**2 + im**2
re, im = re/mag, -im/mag
if re.is_Number and im.is_Number:
# We can be more efficient in this case
expr = expand_multinomial((re + im*S.ImaginaryUnit)**-exp)
return expr.as_real_imag()
expr = poly((a + b)**-exp)
# Terms with even b powers will be real
r = [i for i in expr.terms() if not i[0][1] % 2]
re_part = Add(*[cc*a**aa*b**bb for (aa, bb), cc in r])
# Terms with odd b powers will be imaginary
r = [i for i in expr.terms() if i[0][1] % 4 == 1]
im_part1 = Add(*[cc*a**aa*b**bb for (aa, bb), cc in r])
r = [i for i in expr.terms() if i[0][1] % 4 == 3]
im_part3 = Add(*[cc*a**aa*b**bb for (aa, bb), cc in r])
return (re_part.subs({a: re, b: S.ImaginaryUnit*im}),
im_part1.subs({a: re, b: im}) + im_part3.subs({a: re, b: -im}))
elif self.exp.is_Rational:
re, im = self.base.as_real_imag(deep=deep)
if im.is_zero and self.exp is S.Half:
if re.is_nonnegative:
return self, S.Zero
if re.is_nonpositive:
return S.Zero, (-self.base)**self.exp
# XXX: This is not totally correct since for x**(p/q) with
# x being imaginary there are actually q roots, but
# only a single one is returned from here.
r = self.func(self.func(re, 2) + self.func(im, 2), S.Half)
t = atan2(im, re)
rp, tp = self.func(r, self.exp), t*self.exp
return (rp*cos(tp), rp*sin(tp))
else:
if deep:
hints['complex'] = False
expanded = self.expand(deep, **hints)
if hints.get('ignore') == expanded:
return None
else:
return (re(expanded), im(expanded))
else:
return (re(self), im(self))
def _eval_derivative(self, s):
from sympy import log
dbase = self.base.diff(s)
dexp = self.exp.diff(s)
return self * (dexp * log(self.base) + dbase * self.exp/self.base)
def _eval_evalf(self, prec):
base, exp = self.as_base_exp()
base = base._evalf(prec)
if not exp.is_Integer:
exp = exp._evalf(prec)
if exp.is_negative and base.is_number and base.is_real is False:
base = base.conjugate() / (base * base.conjugate())._evalf(prec)
exp = -exp
return self.func(base, exp).expand()
return self.func(base, exp)
def _eval_is_polynomial(self, syms):
if self.exp.has(*syms):
return False
if self.base.has(*syms):
return bool(self.base._eval_is_polynomial(syms) and
self.exp.is_Integer and (self.exp >= 0))
else:
return True
def _eval_is_rational(self):
p = self.func(*self.as_base_exp()) # in case it's unevaluated
if not p.is_Pow:
return p.is_rational
b, e = p.as_base_exp()
if e.is_Rational and b.is_Rational:
# we didn't check that e is not an Integer
# because Rational**Integer autosimplifies
return False
if e.is_integer:
if b.is_rational:
if b.is_nonzero or e.is_nonnegative:
return True
if b == e: # always rational, even for 0**0
return True
elif b.is_irrational:
return e.is_zero
def _eval_is_algebraic(self):
if self.base.is_zero or (self.base - 1).is_zero:
return True
elif self.exp.is_rational:
return self.base.is_algebraic
elif self.base.is_algebraic and self.exp.is_algebraic:
if ((self.base.is_nonzero and (self.base - 1).is_nonzero)
or self.base.is_integer is False
or self.base.is_irrational):
return self.exp.is_rational
def _eval_is_rational_function(self, syms):
if self.exp.has(*syms):
return False
if self.base.has(*syms):
return self.base._eval_is_rational_function(syms) and \
self.exp.is_Integer
else:
return True
def _eval_is_algebraic_expr(self, syms):
if self.exp.has(*syms):
return False
if self.base.has(*syms):
return self.base._eval_is_algebraic_expr(syms) and \
self.exp.is_Rational
else:
return True
def as_numer_denom(self):
if not self.is_commutative:
return self, S.One
base, exp = self.as_base_exp()
n, d = base.as_numer_denom()
# this should be the same as ExpBase.as_numer_denom wrt
# exponent handling
neg_exp = exp.is_negative
if not neg_exp and not (-exp).is_negative:
neg_exp = _coeff_isneg(exp)
int_exp = exp.is_integer
# the denominator cannot be separated from the numerator if
# its sign is unknown unless the exponent is an integer, e.g.
# sqrt(a/b) != sqrt(a)/sqrt(b) when a=1 and b=-1. But if the
# denominator is negative the numerator and denominator can
# be negated and the denominator (now positive) separated.
if not (d.is_real or int_exp):
n = base
d = S.One
dnonpos = d.is_nonpositive
if dnonpos:
n, d = -n, -d
elif dnonpos is None and not int_exp:
n = base
d = S.One
if neg_exp:
n, d = d, n
exp = -exp
return self.func(n, exp), self.func(d, exp)
def matches(self, expr, repl_dict={}, old=False):
expr = _sympify(expr)
# special case, pattern = 1 and expr.exp can match to 0
if expr is S.One:
d = repl_dict.copy()
d = self.exp.matches(S.Zero, d)
if d is not None:
return d
# make sure the expression to be matched is an Expr
if not isinstance(expr, Expr):
return None
b, e = expr.as_base_exp()
# special case number
sb, se = self.as_base_exp()
if sb.is_Symbol and se.is_Integer and expr:
if e.is_rational:
return sb.matches(b**(e/se), repl_dict)
return sb.matches(expr**(1/se), repl_dict)
d = repl_dict.copy()
d = self.base.matches(b, d)
if d is None:
return None
d = self.exp.xreplace(d).matches(e, d)
if d is None:
return Expr.matches(self, expr, repl_dict)
return d
def _eval_nseries(self, x, n, logx):
# NOTE! This function is an important part of the gruntz algorithm
# for computing limits. It has to return a generalized power
# series with coefficients in C(log, log(x)). In more detail:
# It has to return an expression
# c_0*x**e_0 + c_1*x**e_1 + ... (finitely many terms)
# where e_i are numbers (not necessarily integers) and c_i are
# expressions involving only numbers, the log function, and log(x).
from sympy import ceiling, collect, exp, log, O, Order, powsimp
b, e = self.args
if e.is_Integer:
if e > 0:
# positive integer powers are easy to expand, e.g.:
# sin(x)**4 = (x-x**3/3+...)**4 = ...
return expand_multinomial(self.func(b._eval_nseries(x, n=n,
logx=logx), e), deep=False)
elif e is S.NegativeOne:
# this is also easy to expand using the formula:
# 1/(1 + x) = 1 - x + x**2 - x**3 ...
# so we need to rewrite base to the form "1+x"
nuse = n
cf = 1
try:
ord = b.as_leading_term(x)
cf = Order(ord, x).getn()
if cf and cf.is_Number:
nuse = n + 2*ceiling(cf)
else:
cf = 1
except NotImplementedError:
pass
b_orig, prefactor = b, O(1, x)
while prefactor.is_Order:
nuse += 1
b = b_orig._eval_nseries(x, n=nuse, logx=logx)
prefactor = b.as_leading_term(x)
# express "rest" as: rest = 1 + k*x**l + ... + O(x**n)
rest = expand_mul((b - prefactor)/prefactor)
if rest.is_Order:
return 1/prefactor + rest/prefactor + O(x**n, x)
k, l = rest.leadterm(x)
if l.is_Rational and l > 0:
pass
elif l.is_number and l > 0:
l = l.evalf()
elif l == 0:
k = k.simplify()
if k == 0:
# if prefactor == w**4 + x**2*w**4 + 2*x*w**4, we need to
# factor the w**4 out using collect:
return 1/collect(prefactor, x)
else:
raise NotImplementedError()
else:
raise NotImplementedError()
if cf < 0:
cf = S.One/abs(cf)
try:
dn = Order(1/prefactor, x).getn()
if dn and dn < 0:
pass
else:
dn = 0
except NotImplementedError:
dn = 0
terms = [1/prefactor]
for m in range(1, ceiling((n - dn)/l*cf)):
new_term = terms[-1]*(-rest)
if new_term.is_Pow:
new_term = new_term._eval_expand_multinomial(
deep=False)
else:
new_term = expand_mul(new_term, deep=False)
terms.append(new_term)
terms.append(O(x**n, x))
return powsimp(Add(*terms), deep=True, combine='exp')
else:
# negative powers are rewritten to the cases above, for
# example:
# sin(x)**(-4) = 1/( sin(x)**4) = ...
# and expand the denominator:
nuse, denominator = n, O(1, x)
while denominator.is_Order:
denominator = (b**(-e))._eval_nseries(x, n=nuse, logx=logx)
nuse += 1
if 1/denominator == self:
return self
# now we have a type 1/f(x), that we know how to expand
return (1/denominator)._eval_nseries(x, n=n, logx=logx)
if e.has(Symbol):
return exp(e*log(b))._eval_nseries(x, n=n, logx=logx)
# see if the base is as simple as possible
bx = b
while bx.is_Pow and bx.exp.is_Rational:
bx = bx.base
if bx == x:
return self
# work for b(x)**e where e is not an Integer and does not contain x
# and hopefully has no other symbols
def e2int(e):
"""return the integer value (if possible) of e and a
flag indicating whether it is bounded or not."""
n = e.limit(x, 0)
infinite = n.is_infinite
if not infinite:
# XXX was int or floor intended? int used to behave like floor
# so int(-Rational(1, 2)) returned -1 rather than int's 0
try:
n = int(n)
except TypeError:
#well, the n is something more complicated (like 1+log(2))
try:
n = int(n.evalf()) + 1 # XXX why is 1 being added?
except TypeError:
pass # hope that base allows this to be resolved
n = _sympify(n)
return n, infinite
order = O(x**n, x)
ei, infinite = e2int(e)
b0 = b.limit(x, 0)
if infinite and (b0 is S.One or b0.has(Symbol)):
# XXX what order
if b0 is S.One:
resid = (b - 1)
if resid.is_positive:
return S.Infinity
elif resid.is_negative:
return S.Zero
raise ValueError('cannot determine sign of %s' % resid)
return b0**ei
if (b0 is S.Zero or b0.is_infinite):
if infinite is not False:
return b0**e # XXX what order
if not ei.is_number: # if not, how will we proceed?
raise ValueError(
'expecting numerical exponent but got %s' % ei)
nuse = n - ei
if e.is_real and e.is_positive:
lt = b.as_leading_term(x)
# Try to correct nuse (= m) guess from:
# (lt + rest + O(x**m))**e =
# lt**e*(1 + rest/lt + O(x**m)/lt)**e =
# lt**e + ... + O(x**m)*lt**(e - 1) = ... + O(x**n)
try:
cf = Order(lt, x).getn()
nuse = ceiling(n - cf*(e - 1))
except NotImplementedError:
pass
bs = b._eval_nseries(x, n=nuse, logx=logx)
terms = bs.removeO()
if terms.is_Add:
bs = terms
lt = terms.as_leading_term(x)
# bs -> lt + rest -> lt*(1 + (bs/lt - 1))
return ((self.func(lt, e) * self.func((bs/lt).expand(), e).nseries(
x, n=nuse, logx=logx)).expand() + order)
if bs.is_Add:
from sympy import O
# So, bs + O() == terms
c = Dummy('c')
res = []
for arg in bs.args:
if arg.is_Order:
arg = c*arg.expr
res.append(arg)
bs = Add(*res)
rv = (bs**e).series(x).subs(c, O(1, x))
rv += order
return rv
rv = bs**e
if terms != bs:
rv += order
return rv
# either b0 is bounded but neither 1 nor 0 or e is infinite
# b -> b0 + (b-b0) -> b0 * (1 + (b/b0-1))
o2 = order*(b0**-e)
z = (b/b0 - 1)
o = O(z, x)
if o is S.Zero or o2 is S.Zero:
infinite = True
else:
if o.expr.is_number:
e2 = log(o2.expr*x)/log(x)
else:
e2 = log(o2.expr)/log(o.expr)
n, infinite = e2int(e2)
if infinite:
# requested accuracy gives infinite series,
# order is probably non-polynomial e.g. O(exp(-1/x), x).
r = 1 + z
else:
l = []
g = None
for i in range(n + 2):
g = self._taylor_term(i, z, g)
g = g.nseries(x, n=n, logx=logx)
l.append(g)
r = Add(*l)
return expand_mul(r*b0**e) + order
def _eval_as_leading_term(self, x):
from sympy import exp, log
if not self.exp.has(x):
return self.func(self.base.as_leading_term(x), self.exp)
return exp(self.exp * log(self.base)).as_leading_term(x)
@cacheit
def _taylor_term(self, n, x, *previous_terms): # of (1+x)**e
from sympy import binomial
return binomial(self.exp, n) * self.func(x, n)
def _sage_(self):
return self.args[0]._sage_()**self.args[1]._sage_()
def as_content_primitive(self, radical=False):
"""Return the tuple (R, self/R) where R is the positive Rational
extracted from self.
Examples
========
>>> from sympy import sqrt
>>> sqrt(4 + 4*sqrt(2)).as_content_primitive()
(2, sqrt(1 + sqrt(2)))
>>> sqrt(3 + 3*sqrt(2)).as_content_primitive()
(1, sqrt(3)*sqrt(1 + sqrt(2)))
>>> from sympy import expand_power_base, powsimp, Mul
>>> from sympy.abc import x, y
>>> ((2*x + 2)**2).as_content_primitive()
(4, (x + 1)**2)
>>> (4**((1 + y)/2)).as_content_primitive()
(2, 4**(y/2))
>>> (3**((1 + y)/2)).as_content_primitive()
(1, 3**((y + 1)/2))
>>> (3**((5 + y)/2)).as_content_primitive()
(9, 3**((y + 1)/2))
>>> eq = 3**(2 + 2*x)
>>> powsimp(eq) == eq
True
>>> eq.as_content_primitive()
(9, 3**(2*x))
>>> powsimp(Mul(*_))
3**(2*x + 2)
>>> eq = (2 + 2*x)**y
>>> s = expand_power_base(eq); s.is_Mul, s
(False, (2*x + 2)**y)
>>> eq.as_content_primitive()
(1, (2*(x + 1))**y)
>>> s = expand_power_base(_[1]); s.is_Mul, s
(True, 2**y*(x + 1)**y)
See docstring of Expr.as_content_primitive for more examples.
"""
b, e = self.as_base_exp()
b = _keep_coeff(*b.as_content_primitive(radical=radical))
ce, pe = e.as_content_primitive(radical=radical)
if b.is_Rational:
#e
#= ce*pe
#= ce*(h + t)
#= ce*h + ce*t
#=> self
#= b**(ce*h)*b**(ce*t)
#= b**(cehp/cehq)*b**(ce*t)
#= b**(iceh+r/cehq)*b**(ce*t)
#= b**(iceh)*b**(r/cehq)*b**(ce*t)
#= b**(iceh)*b**(ce*t + r/cehq)
h, t = pe.as_coeff_Add()
if h.is_Rational:
ceh = ce*h
c = self.func(b, ceh)
r = S.Zero
if not c.is_Rational:
iceh, r = divmod(ceh.p, ceh.q)
c = self.func(b, iceh)
return c, self.func(b, _keep_coeff(ce, t + r/ce/ceh.q))
e = _keep_coeff(ce, pe)
# b**e = (h*t)**e = h**e*t**e = c*m*t**e
if e.is_Rational and b.is_Mul:
h, t = b.as_content_primitive(radical=radical) # h is positive
c, m = self.func(h, e).as_coeff_Mul() # so c is positive
m, me = m.as_base_exp()
if m is S.One or me == e: # probably always true
# return the following, not return c, m*Pow(t, e)
# which would change Pow into Mul; we let sympy
# decide what to do by using the unevaluated Mul, e.g
# should it stay as sqrt(2 + 2*sqrt(5)) or become
# sqrt(2)*sqrt(1 + sqrt(5))
return c, self.func(_keep_coeff(m, t), e)
return S.One, self.func(b, e)
def is_constant(self, *wrt, **flags):
expr = self
if flags.get('simplify', True):
expr = expr.simplify()
b, e = expr.as_base_exp()
bz = b.equals(0)
if bz: # recalculate with assumptions in case it's unevaluated
new = b**e
if new != expr:
return new.is_constant()
econ = e.is_constant(*wrt)
bcon = b.is_constant(*wrt)
if bcon:
if econ:
return True
bz = b.equals(0)
if bz is False:
return False
elif bcon is None:
return None
return e.equals(0)
from .add import Add
from .numbers import Integer
from .mul import Mul, _keep_coeff
from .symbol import Symbol, Dummy, symbols
|
Laurawly/tvm-1 | refs/heads/master | tests/python/relay/test_pass_combine_parallel_batch_matmul.py | 4 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=invalid-name,too-many-locals,too-many-arguments,missing-module-docstring
import tvm
from tvm import relay
from tvm.relay import transform
def run_opt_pass(expr, opt_pass):
"runs the opt_pass on the expr of a function the function"
assert isinstance(opt_pass, tvm.transform.Pass)
mod = tvm.IRModule.from_expr(expr)
mod = tvm.relay.transform.InferType()(mod)
mod = opt_pass(mod)
return mod["main"]
def test_combine_parallel_batch_matmul():
"""Simple testcase."""
def before(x, w1, w2, w3):
args = [x, w1, w2, w3]
y1 = relay.nn.batch_matmul(x, w1)
y2 = relay.nn.batch_matmul(x, w2)
y3 = relay.nn.batch_matmul(x, w3)
y = relay.Tuple((y1, y2, y3))
return relay.Function(args, y)
def expected(x, w1, w2, w3):
# use a fixed order of args so alpha equal check can pass
s1 = w1.type_annotation.shape[1]
s2 = w2.type_annotation.shape[1]
s3 = w3.type_annotation.shape[1]
args = [x, w1, w2, w3]
w = relay.concatenate((w1, w2, w3), axis=1)
y = relay.nn.batch_matmul(x, w)
y1 = relay.strided_slice(
y, begin=[0, 0, 0], end=[-1, -1, s1], strides=[1, 1, 1], slice_mode="size"
)
y2 = relay.strided_slice(
y, begin=[0, 0, s1], end=[-1, -1, s2], strides=[1, 1, 1], slice_mode="size"
)
y3 = relay.strided_slice(
y, begin=[0, 0, s1 + s2], end=[-1, -1, s3], strides=[1, 1, 1], slice_mode="size"
)
y = relay.Tuple((y1, y2, y3))
return relay.Function(args, y)
def check(b, i, j, k):
x = relay.var("x", shape=(b, i, k))
w1 = relay.var("w1", shape=(b, j, k))
w2 = relay.var("w2", shape=(b, j, k))
w3 = relay.var("w3", shape=(b, j, k))
y_before = before(x, w1, w2, w3)
y = run_opt_pass(y_before, transform.CombineParallelBatchMatmul(min_num_branches=2))
y_expected = expected(x, w1, w2, w3)
y_expected = run_opt_pass(y_expected, transform.InferType())
tvm.ir.assert_structural_equal(y, y_expected, map_free_vars=True)
check(2, 3, 5, 4)
check(1, 100, 200, 300)
def test_combine_parallel_batch_matmul_biasadd():
"""Simple testcase with bias"""
def before(x, w1, w2, w3, b1, b2, b3):
args = [x, w1, w2, w3, b1, b2, b3]
y1 = relay.nn.batch_matmul(x, w1)
y2 = relay.nn.batch_matmul(x, w2)
y3 = relay.nn.batch_matmul(x, w3)
y1 = relay.add(y1, b1)
y2 = relay.add(y2, b2)
y3 = relay.add(y3, b3)
y = relay.Tuple((y1, y2, y3))
return relay.Function(args, y)
def expected(x, w1, w2, w3, b1, b2, b3):
# use a fixed order of args so alpha equal check can pass
s1 = w1.type_annotation.shape[1]
s2 = w2.type_annotation.shape[1]
s3 = w3.type_annotation.shape[1]
args = [x, w1, w2, w3, b1, b2, b3]
w = relay.concatenate((w1, w2, w3), axis=1)
b = relay.concatenate((b1, b2, b3), axis=-1)
y = relay.nn.batch_matmul(x, w)
y = relay.add(y, b)
y1 = relay.strided_slice(
y, begin=[0, 0, 0], end=[-1, -1, s1], strides=[1, 1, 1], slice_mode="size"
)
y2 = relay.strided_slice(
y, begin=[0, 0, s1], end=[-1, -1, s2], strides=[1, 1, 1], slice_mode="size"
)
y3 = relay.strided_slice(
y, begin=[0, 0, s1 + s2], end=[-1, -1, s3], strides=[1, 1, 1], slice_mode="size"
)
y = relay.Tuple((y1, y2, y3))
return relay.Function(args, y)
def check(b, i, j, k):
x = relay.var("x", shape=(b, i, k))
w1 = relay.var("w1", shape=(b, j, k))
w2 = relay.var("w2", shape=(b, j, k))
w3 = relay.var("w3", shape=(b, j, k))
b1 = relay.var("b1", shape=(j,))
b2 = relay.var("b2", shape=(j,))
b3 = relay.var("b3", shape=(j,))
y_before = before(x, w1, w2, w3, b1, b2, b3)
y = run_opt_pass(y_before, transform.CombineParallelBatchMatmul(min_num_branches=2))
y_expected = expected(x, w1, w2, w3, b1, b2, b3)
y_expected = run_opt_pass(y_expected, transform.InferType())
tvm.ir.assert_structural_equal(y, y_expected, map_free_vars=True)
check(2, 3, 5, 4)
check(1, 100, 200, 300)
if __name__ == "__main__":
test_combine_parallel_batch_matmul()
test_combine_parallel_batch_matmul_biasadd()
|
fitermay/intellij-community | refs/heads/master | python/testData/pyi/inspections/overloads/m1.py | 57 | class C(object):
def __getitem__(self, key):
return f(key)
def __add__(self, other):
return f(other)
def f(key):
return key
def g(x):
pass
class Gen(object):
def __init__(self, x):
self.x = x
def get(self, x, y):
return self.x
|
BryanCutler/spark | refs/heads/master | python/pyspark/sql/tests/test_pandas_udf.py | 22 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import unittest
from pyspark.sql.functions import udf, pandas_udf, PandasUDFType
from pyspark.sql.types import DoubleType, StructType, StructField, LongType
from pyspark.sql.utils import ParseException, PythonException
from pyspark.rdd import PythonEvalType
from pyspark.testing.sqlutils import ReusedSQLTestCase, have_pandas, have_pyarrow, \
pandas_requirement_message, pyarrow_requirement_message
from pyspark.testing.utils import QuietTest
@unittest.skipIf(
not have_pandas or not have_pyarrow,
pandas_requirement_message or pyarrow_requirement_message) # type: ignore[arg-type]
class PandasUDFTests(ReusedSQLTestCase):
def test_pandas_udf_basic(self):
udf = pandas_udf(lambda x: x, DoubleType())
self.assertEqual(udf.returnType, DoubleType())
self.assertEqual(udf.evalType, PythonEvalType.SQL_SCALAR_PANDAS_UDF)
udf = pandas_udf(lambda x: x, DoubleType(), PandasUDFType.SCALAR)
self.assertEqual(udf.returnType, DoubleType())
self.assertEqual(udf.evalType, PythonEvalType.SQL_SCALAR_PANDAS_UDF)
udf = pandas_udf(lambda x: x, 'double', PandasUDFType.SCALAR)
self.assertEqual(udf.returnType, DoubleType())
self.assertEqual(udf.evalType, PythonEvalType.SQL_SCALAR_PANDAS_UDF)
udf = pandas_udf(lambda x: x, StructType([StructField("v", DoubleType())]),
PandasUDFType.GROUPED_MAP)
self.assertEqual(udf.returnType, StructType([StructField("v", DoubleType())]))
self.assertEqual(udf.evalType, PythonEvalType.SQL_GROUPED_MAP_PANDAS_UDF)
udf = pandas_udf(lambda x: x, 'v double', PandasUDFType.GROUPED_MAP)
self.assertEqual(udf.returnType, StructType([StructField("v", DoubleType())]))
self.assertEqual(udf.evalType, PythonEvalType.SQL_GROUPED_MAP_PANDAS_UDF)
udf = pandas_udf(lambda x: x, 'v double',
functionType=PandasUDFType.GROUPED_MAP)
self.assertEqual(udf.returnType, StructType([StructField("v", DoubleType())]))
self.assertEqual(udf.evalType, PythonEvalType.SQL_GROUPED_MAP_PANDAS_UDF)
udf = pandas_udf(lambda x: x, returnType='v double',
functionType=PandasUDFType.GROUPED_MAP)
self.assertEqual(udf.returnType, StructType([StructField("v", DoubleType())]))
self.assertEqual(udf.evalType, PythonEvalType.SQL_GROUPED_MAP_PANDAS_UDF)
def test_pandas_udf_decorator(self):
@pandas_udf(DoubleType())
def foo(x):
return x
self.assertEqual(foo.returnType, DoubleType())
self.assertEqual(foo.evalType, PythonEvalType.SQL_SCALAR_PANDAS_UDF)
@pandas_udf(returnType=DoubleType())
def foo(x):
return x
self.assertEqual(foo.returnType, DoubleType())
self.assertEqual(foo.evalType, PythonEvalType.SQL_SCALAR_PANDAS_UDF)
schema = StructType([StructField("v", DoubleType())])
@pandas_udf(schema, PandasUDFType.GROUPED_MAP)
def foo(x):
return x
self.assertEqual(foo.returnType, schema)
self.assertEqual(foo.evalType, PythonEvalType.SQL_GROUPED_MAP_PANDAS_UDF)
@pandas_udf('v double', PandasUDFType.GROUPED_MAP)
def foo(x):
return x
self.assertEqual(foo.returnType, schema)
self.assertEqual(foo.evalType, PythonEvalType.SQL_GROUPED_MAP_PANDAS_UDF)
@pandas_udf(schema, functionType=PandasUDFType.GROUPED_MAP)
def foo(x):
return x
self.assertEqual(foo.returnType, schema)
self.assertEqual(foo.evalType, PythonEvalType.SQL_GROUPED_MAP_PANDAS_UDF)
@pandas_udf(returnType='double', functionType=PandasUDFType.SCALAR)
def foo(x):
return x
self.assertEqual(foo.returnType, DoubleType())
self.assertEqual(foo.evalType, PythonEvalType.SQL_SCALAR_PANDAS_UDF)
@pandas_udf(returnType=schema, functionType=PandasUDFType.GROUPED_MAP)
def foo(x):
return x
self.assertEqual(foo.returnType, schema)
self.assertEqual(foo.evalType, PythonEvalType.SQL_GROUPED_MAP_PANDAS_UDF)
def test_udf_wrong_arg(self):
with QuietTest(self.sc):
with self.assertRaises(ParseException):
@pandas_udf('blah')
def foo(x):
return x
with self.assertRaisesRegex(ValueError, 'Invalid return type.*None'):
@pandas_udf(functionType=PandasUDFType.SCALAR)
def foo(x):
return x
with self.assertRaisesRegex(ValueError, 'Invalid function'):
@pandas_udf('double', 100)
def foo(x):
return x
with self.assertRaisesRegex(ValueError, '0-arg pandas_udfs.*not.*supported'):
pandas_udf(lambda: 1, LongType(), PandasUDFType.SCALAR)
with self.assertRaisesRegex(ValueError, '0-arg pandas_udfs.*not.*supported'):
@pandas_udf(LongType(), PandasUDFType.SCALAR)
def zero_with_type():
return 1
with self.assertRaisesRegex(TypeError, 'Invalid return type'):
@pandas_udf(returnType=PandasUDFType.GROUPED_MAP)
def foo(df):
return df
with self.assertRaisesRegex(TypeError, 'Invalid return type'):
@pandas_udf(returnType='double', functionType=PandasUDFType.GROUPED_MAP)
def foo(df):
return df
with self.assertRaisesRegex(ValueError, 'Invalid function'):
@pandas_udf(returnType='k int, v double', functionType=PandasUDFType.GROUPED_MAP)
def foo(k, v, w):
return k
def test_stopiteration_in_udf(self):
def foo(x):
raise StopIteration()
def foofoo(x, y):
raise StopIteration()
exc_message = "Caught StopIteration thrown from user's code; failing the task"
df = self.spark.range(0, 100)
# plain udf (test for SPARK-23754)
self.assertRaisesRegex(
PythonException,
exc_message,
df.withColumn('v', udf(foo)('id')).collect
)
# pandas scalar udf
self.assertRaisesRegex(
PythonException,
exc_message,
df.withColumn(
'v', pandas_udf(foo, 'double', PandasUDFType.SCALAR)('id')
).collect
)
# pandas grouped map
self.assertRaisesRegex(
PythonException,
exc_message,
df.groupBy('id').apply(
pandas_udf(foo, df.schema, PandasUDFType.GROUPED_MAP)
).collect
)
self.assertRaisesRegex(
PythonException,
exc_message,
df.groupBy('id').apply(
pandas_udf(foofoo, df.schema, PandasUDFType.GROUPED_MAP)
).collect
)
# pandas grouped agg
self.assertRaisesRegex(
PythonException,
exc_message,
df.groupBy('id').agg(
pandas_udf(foo, 'double', PandasUDFType.GROUPED_AGG)('id')
).collect
)
def test_pandas_udf_detect_unsafe_type_conversion(self):
import pandas as pd
import numpy as np
values = [1.0] * 3
pdf = pd.DataFrame({'A': values})
df = self.spark.createDataFrame(pdf).repartition(1)
@pandas_udf(returnType="int")
def udf(column):
return pd.Series(np.linspace(0, 1, len(column)))
# Since 0.11.0, PyArrow supports the feature to raise an error for unsafe cast.
with self.sql_conf({
"spark.sql.execution.pandas.convertToArrowArraySafely": True}):
with self.assertRaisesRegex(Exception,
"Exception thrown when converting pandas.Series"):
df.select(['A']).withColumn('udf', udf('A')).collect()
# Disabling Arrow safe type check.
with self.sql_conf({
"spark.sql.execution.pandas.convertToArrowArraySafely": False}):
df.select(['A']).withColumn('udf', udf('A')).collect()
def test_pandas_udf_arrow_overflow(self):
import pandas as pd
df = self.spark.range(0, 1)
@pandas_udf(returnType="byte")
def udf(column):
return pd.Series([128] * len(column))
# When enabling safe type check, Arrow 0.11.0+ disallows overflow cast.
with self.sql_conf({
"spark.sql.execution.pandas.convertToArrowArraySafely": True}):
with self.assertRaisesRegex(Exception,
"Exception thrown when converting pandas.Series"):
df.withColumn('udf', udf('id')).collect()
# Disabling safe type check, let Arrow do the cast anyway.
with self.sql_conf({"spark.sql.execution.pandas.convertToArrowArraySafely": False}):
df.withColumn('udf', udf('id')).collect()
if __name__ == "__main__":
from pyspark.sql.tests.test_pandas_udf import * # noqa: F401
try:
import xmlrunner # type: ignore[import]
testRunner = xmlrunner.XMLTestRunner(output='target/test-reports', verbosity=2)
except ImportError:
testRunner = None
unittest.main(testRunner=testRunner, verbosity=2)
|
InAnimaTe/CouchPotatoServer | refs/heads/master | libs/pyutil/PickleSaver.py | 106 | # Copyright (c) 2001 Autonomous Zone Industries
# Copyright (c) 2002-2009 Zooko Wilcox-O'Hearn
# This file is part of pyutil; see README.rst for licensing terms.
"""
An object that makes some of the attributes of your class persistent, pickling
them and lazily writing them to a file.
"""
# from the Python Standard Library
import os
import cPickle as pickle
import warnings
# from the pyutil library
import fileutil
import nummedobj
import twistedutil
# from the Twisted library
from twisted.python import log
class PickleSaver(nummedobj.NummedObj):
"""
This makes some of the attributes of your class persistent, saving
them in a pickle and saving them lazily.
The general idea: You are going to tell PickleSaver which of your
attributes ought to be persistently saved, and the name of a file to
save them in. Those attributes will get saved to disk, and when
your object is instantiated those attributes will get set to the
values loaded from the file.
Usage: inherit from PickleSaver and call PickleSaver.__init__() in your
constructor. You will pass arguments to PickleSaver.__init__()
telling it which attributes to save, which file to save them in, and
what values they should have if there is no value stored for them in
the file.
Note: do *not* assign values to your persistent attributes in your
constructor, because you might thus overwrite their persistent
values.
Then whenever you change one of the persistent attributes, call
self.lazy_save() (it won't *really* save -- it'll just schedule a
save for DELAY minutes later.) If you update an attribute and
forget to call self.lazy_save() then the change will not be saved,
unless you later call self.lazy_save() before you shut down.
Data could be lost if the Python interpreter were to die
unexpectedly (for example, due to a segfault in a compiled machine
code module or due to the Python process being killed without
warning via SIGKILL) before the delay passes. However if the Python
interpreter shuts down cleanly (i.e., if it garbage collects and
invokes the __del__ methods of the collected objects), then the data
will be saved at that time (unless your class has the "not-collectable"
problem: http://python.org/doc/current/lib/module-gc.html -- search
in text for "uncollectable").
Note: you can pass DELAY=0 to make PickleSaver a not-so-lazy saver.
The advantage of laziness is that you don't touch the disk as
often -- touching disk is a performance cost.
To cleanly shutdown, invoke shutdown(). Further operations after that
will result in exceptions.
"""
class ExtRes:
"""
This is for holding things (external resources) that PickleSaver needs
to finalize after PickleSaver is killed. (post-mortem finalization)
In particular, this holds the names and values of all attributes
that have been changed, so that after the PickleSaver is
garbage-collected those values will be saved to the persistent file.
"""
def __init__(self, fname, objname):
self.fname = fname
self.objname = objname
self.dirty = False # True iff the attrs have been changed and need to be saved to disk; When you change this flag from False to True, you schedule a save task for 10 minutes later. When the save task goes off it changes the flag from True to False.
self.savertask = None
self.valstr = None # the pickled (serialized, string) contents of the attributes that should be saved
def _save_to_disk(self):
if self.valstr is not None:
log.msg("%s._save_to_disk(): fname: %s" % (self.objname, self.fname,))
of = open(self.fname + ".tmp", "wb")
of.write(self.valstr)
of.flush()
of.close()
of = None
fileutil.remove_if_possible(self.fname)
fileutil.rename(self.fname + ".tmp", self.fname)
log.msg("%s._save_to_disk(): now, having finished write(), os.path.isfile(%s): %s" % (self, self.fname, os.path.isfile(self.fname),))
self.valstr = None
self.dirty = False
try:
self.savertask.callId.cancel()
except:
pass
self.savertask = None
def shutdown(self):
if self.dirty:
self._save_to_disk()
if self.savertask:
try:
self.savertask.callId.cancel()
except:
pass
self.savertask = None
def __del__(self):
self.shutdown()
def __init__(self, fname, attrs, DELAY=60*60, savecb=None):
"""
@param attrs: a dict whose keys are the names of all the attributes to be persistently stored and whose values are the initial default value that the attribute gets set to the first time it is ever used; After this first initialization, the value will be persistent so the initial default value will never be used again.
@param savecb: if not None, then it is a callable that will be called after each save completes (useful for unit tests) (savecb doesn't get called after a shutdown-save, only after a scheduled save)
"""
warnings.warn("deprecated", DeprecationWarning)
nummedobj.NummedObj.__init__(self)
self._DELAY = DELAY
self._attrnames = attrs.keys()
self._extres = PickleSaver.ExtRes(fname=fname, objname=self.__repr__())
self._savecb = savecb
for attrname, defaultval in attrs.items():
setattr(self, attrname, defaultval)
try:
attrdict = pickle.loads(open(self._extres.fname, "rb").read())
for attrname, attrval in attrdict.items():
if not hasattr(self, attrname):
log.msg("WARNING: %s has no attribute named %s on load from disk, value: %s." % (self, attrname, attrval,))
setattr(self, attrname, attrval)
except (pickle.UnpicklingError, IOError, EOFError,), le:
try:
attrdict = pickle.loads(open(self._extres.fname + ".tmp", "rb").read())
for attrname, attrval in attrdict.items():
if not hasattr(self, attrname):
log.msg("WARNING: %s has no attribute named %s on load from disk, value: %s." % (self, attrname, attrval,))
setattr(self, attrname, attrval)
except (pickle.UnpicklingError, IOError, EOFError,), le2:
log.msg("Got exception attempting to load attrs. (This is normal if this is the first time you've used this persistent %s object.) fname: %s, le: %s, le2: %s" % (self.__class__, self._extres.fname, le, le2,))
self.lazy_save()
def _store_attrs_in_extres(self):
d = {}
for attrname in self._attrnames:
d[attrname] = getattr(self, attrname)
# log.msg("%s._store_attrs_in_extres: attrname: %s, val: %s" % (self, attrname, getattr(self, attrname),))
# pickle the attrs now, to ensure that there are no reference cycles
self._extres.valstr = pickle.dumps(d, True)
# log.msg("%s._store_attrs_in_extres: valstr: %s" % (self, self._extres.valstr,))
self._extres.dirty = True
def _save_to_disk(self):
log.msg("%s._save_to_disk()" % (self,))
self._extres._save_to_disk()
if self._savecb:
self._savecb()
def _lazy_save(self, delay=None):
""" @deprecated: use lazy_save() instead """
return self.lazy_save(delay)
def lazy_save(self, delay=None):
"""
@param delay: how long from now before the data gets saved to disk, or `None' in order to use the default value provided in the constructor
"""
if delay is None:
delay=self._DELAY
# copy the values into extres so that if `self' gets garbage-collected the values will be written to disk during post-mortem finalization. (This also marks it as dirty.)
self._store_attrs_in_extres()
newsavetask = twistedutil.callLater_weakly(delay, self._save_to_disk)
if self._extres.savertask:
if self._extres.savertask.callId.getTime() < newsavetask.callId.getTime():
try:
newsavetask.callId.cancel()
except:
pass
else:
try:
self._extres.savertask.callId.cancel()
except:
pass
self._extres.savertask = newsavetask
else:
self._extres.savertask = newsavetask
def shutdown(self):
self.extres.shutdown()
self.extres = None
|
chinmaygarde/depot_tools | refs/heads/master | tests/gclient_smoketest.py | 25 | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Smoke tests for gclient.py.
Shell out 'gclient' and run basic conformance tests.
This test assumes GClientSmokeBase.URL_BASE is valid.
"""
import logging
import os
import re
import subprocess
import sys
import unittest
ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.insert(0, ROOT_DIR)
import gclient_utils
import scm as gclient_scm
import subprocess2
from testing_support import fake_repos
from testing_support.fake_repos import join, write
GCLIENT_PATH = os.path.join(ROOT_DIR, 'gclient')
COVERAGE = False
class GClientSmokeBase(fake_repos.FakeReposTestBase):
def setUp(self):
super(GClientSmokeBase, self).setUp()
# Make sure it doesn't try to auto update when testing!
self.env = os.environ.copy()
self.env['DEPOT_TOOLS_UPDATE'] = '0'
def gclient(self, cmd, cwd=None):
if not cwd:
cwd = self.root_dir
if COVERAGE:
# Don't use the wrapper script.
cmd_base = ['coverage', 'run', '-a', GCLIENT_PATH + '.py']
else:
cmd_base = [GCLIENT_PATH]
cmd = cmd_base + cmd
process = subprocess.Popen(cmd, cwd=cwd, env=self.env,
stdout=subprocess.PIPE, stderr=subprocess.PIPE,
shell=sys.platform.startswith('win'))
(stdout, stderr) = process.communicate()
logging.debug("XXX: %s\n%s\nXXX" % (' '.join(cmd), stdout))
logging.debug("YYY: %s\n%s\nYYY" % (' '.join(cmd), stderr))
# pylint: disable=E1103
return (stdout.replace('\r\n', '\n'), stderr.replace('\r\n', '\n'),
process.returncode)
def untangle(self, stdout):
tasks = {}
remaining = []
for line in stdout.splitlines(False):
m = re.match(r'^(\d)+>(.*)$', line)
if not m:
remaining.append(line)
else:
self.assertEquals([], remaining)
tasks.setdefault(int(m.group(1)), []).append(m.group(2))
out = []
for key in sorted(tasks.iterkeys()):
out.extend(tasks[key])
out.extend(remaining)
return '\n'.join(out)
def parseGclient(self, cmd, items, expected_stderr='', untangle=False):
"""Parse gclient's output to make it easier to test.
If untangle is True, tries to sort out the output from parallel checkout."""
(stdout, stderr, returncode) = self.gclient(cmd)
if untangle:
stdout = self.untangle(stdout)
self.checkString(expected_stderr, stderr)
self.assertEquals(0, returncode)
return self.checkBlock(stdout, items)
def splitBlock(self, stdout):
"""Split gclient's output into logical execution blocks.
___ running 'foo' at '/bar'
(...)
___ running 'baz' at '/bar'
(...)
will result in 2 items of len((...).splitlines()) each.
"""
results = []
for line in stdout.splitlines(False):
# Intentionally skips empty lines.
if not line:
continue
if line.startswith('__'):
match = re.match(r'^________ ([a-z]+) \'(.*)\' in \'(.*)\'$', line)
if not match:
match = re.match(r'^_____ (.*) is missing, synching instead$', line)
if match:
# Blah, it's when a dependency is deleted, we should probably not
# output this message.
results.append([line])
elif (
not re.match(
r'_____ [^ ]+ : Attempting rebase onto [0-9a-f]+...',
line) and
not re.match(r'_____ [^ ]+ at [^ ]+', line)):
# The two regexp above are a bit too broad, they are necessary only
# for git checkouts.
self.fail(line)
else:
results.append([[match.group(1), match.group(2), match.group(3)]])
else:
if not results:
# TODO(maruel): gclient's git stdout is inconsistent.
# This should fail the test instead!!
pass
else:
results[-1].append(line)
return results
def checkBlock(self, stdout, items):
results = self.splitBlock(stdout)
for i in xrange(min(len(results), len(items))):
if isinstance(items[i], (list, tuple)):
verb = items[i][0]
path = items[i][1]
else:
verb = items[i]
path = self.root_dir
self.checkString(results[i][0][0], verb, (i, results[i][0][0], verb))
if sys.platform == 'win32':
# Make path lower case since casing can change randomly.
self.checkString(
results[i][0][2].lower(),
path.lower(),
(i, results[i][0][2].lower(), path.lower()))
else:
self.checkString(results[i][0][2], path, (i, results[i][0][2], path))
self.assertEquals(len(results), len(items), (stdout, items, len(results)))
return results
@staticmethod
def svnBlockCleanup(out):
"""Work around svn status difference between svn 1.5 and svn 1.6
I don't know why but on Windows they are reversed. So sorts the items."""
for i in xrange(len(out)):
if len(out[i]) < 2:
continue
out[i] = [out[i][0]] + sorted([x[1:].strip() for x in out[i][1:]])
return out
class GClientSmoke(GClientSmokeBase):
"""Doesn't require either svnserve nor git-daemon."""
@property
def svn_base(self):
return 'svn://random.server/svn/'
@property
def git_base(self):
return 'git://random.server/git/'
def testHelp(self):
"""testHelp: make sure no new command was added."""
result = self.gclient(['help'])
# Roughly, not too short, not too long.
self.assertTrue(1000 < len(result[0]) and len(result[0]) < 2300,
'Too much written to stdout: %d bytes' % len(result[0]))
self.assertEquals(0, len(result[1]))
self.assertEquals(0, result[2])
def testUnknown(self):
result = self.gclient(['foo'])
# Roughly, not too short, not too long.
self.assertTrue(1000 < len(result[0]) and len(result[0]) < 2300,
'Too much written to stdout: %d bytes' % len(result[0]))
self.assertEquals(0, len(result[1]))
self.assertEquals(0, result[2])
def testNotConfigured(self):
res = ('', 'Error: client not configured; see \'gclient config\'\n', 1)
self.check(res, self.gclient(['cleanup']))
self.check(res, self.gclient(['diff']))
self.check(res, self.gclient(['pack']))
self.check(res, self.gclient(['revert']))
self.check(res, self.gclient(['revinfo']))
self.check(res, self.gclient(['runhooks']))
self.check(res, self.gclient(['status']))
self.check(res, self.gclient(['sync']))
self.check(res, self.gclient(['update']))
def testConfig(self):
p = join(self.root_dir, '.gclient')
def test(cmd, expected):
if os.path.exists(p):
os.remove(p)
results = self.gclient(cmd)
self.check(('', '', 0), results)
self.checkString(expected, open(p, 'rU').read())
test(['config', self.svn_base + 'trunk/src/'],
('solutions = [\n'
' { "name" : "src",\n'
' "url" : "%strunk/src",\n'
' "deps_file" : "DEPS",\n'
' "managed" : True,\n'
' "custom_deps" : {\n'
' },\n'
' "safesync_url": "",\n'
' },\n'
']\n'
'cache_dir = None\n') % self.svn_base)
test(['config', self.git_base + 'repo_1', '--name', 'src'],
('solutions = [\n'
' { "name" : "src",\n'
' "url" : "%srepo_1",\n'
' "deps_file" : "DEPS",\n'
' "managed" : True,\n'
' "custom_deps" : {\n'
' },\n'
' "safesync_url": "",\n'
' },\n'
']\n'
'cache_dir = None\n') % self.git_base)
test(['config', 'foo', 'faa'],
'solutions = [\n'
' { "name" : "foo",\n'
' "url" : "foo",\n'
' "deps_file" : "DEPS",\n'
' "managed" : True,\n'
' "custom_deps" : {\n'
' },\n'
' "safesync_url": "faa",\n'
' },\n'
']\n'
'cache_dir = None\n')
test(['config', 'foo', '--deps', 'blah'],
'solutions = [\n'
' { "name" : "foo",\n'
' "url" : "foo",\n'
' "deps_file" : "blah",\n'
' "managed" : True,\n'
' "custom_deps" : {\n'
' },\n'
' "safesync_url": "",\n'
' },\n'
']\n'
'cache_dir = None\n')
test(['config', '--spec', '["blah blah"]'], '["blah blah"]')
os.remove(p)
results = self.gclient(['config', 'foo', 'faa', 'fuu'])
err = ('Usage: gclient.py config [options] [url] [safesync url]\n\n'
'gclient.py: error: Inconsistent arguments. Use either --spec or one'
' or 2 args\n')
self.check(('', err, 2), results)
self.assertFalse(os.path.exists(join(self.root_dir, '.gclient')))
def testSolutionNone(self):
results = self.gclient(['config', '--spec',
'solutions=[{"name": "./", "url": None}]'])
self.check(('', '', 0), results)
results = self.gclient(['sync'])
self.check(('', '', 0), results)
self.assertTree({})
results = self.gclient(['revinfo'])
self.check(('./: None\n', '', 0), results)
self.check(('', '', 0), self.gclient(['cleanup']))
self.check(('', '', 0), self.gclient(['diff']))
self.assertTree({})
self.check(('', '', 0), self.gclient(['pack']))
self.check(('', '', 0), self.gclient(['revert']))
self.assertTree({})
self.check(('', '', 0), self.gclient(['runhooks']))
self.assertTree({})
self.check(('', '', 0), self.gclient(['status']))
def testDifferentTopLevelDirectory(self):
# Check that even if the .gclient file does not mention the directory src
# itself, but it is included via dependencies, the .gclient file is used.
self.gclient(['config', self.svn_base + 'trunk/src.DEPS'])
deps = join(self.root_dir, 'src.DEPS')
os.mkdir(deps)
write(join(deps, 'DEPS'),
'deps = { "src": "%strunk/src" }' % (self.svn_base))
src = join(self.root_dir, 'src')
os.mkdir(src)
res = self.gclient(['status', '--jobs', '1'], src)
self.checkBlock(res[0], [('running', deps), ('running', src)])
class GClientSmokeGIT(GClientSmokeBase):
def setUp(self):
super(GClientSmokeGIT, self).setUp()
self.enabled = self.FAKE_REPOS.set_up_git()
def testSync(self):
if not self.enabled:
return
# TODO(maruel): safesync.
self.gclient(['config', self.git_base + 'repo_1', '--name', 'src'])
# Test unversioned checkout.
self.parseGclient(
['sync', '--deps', 'mac', '--jobs', '1'],
['running', 'running'])
# TODO(maruel): http://crosbug.com/3582 hooks run even if not matching, must
# add sync parsing to get the list of updated files.
tree = self.mangle_git_tree(('repo_1@2', 'src'),
('repo_2@1', 'src/repo2'),
('repo_3@2', 'src/repo2/repo_renamed'))
tree['src/git_hooked1'] = 'git_hooked1'
tree['src/git_hooked2'] = 'git_hooked2'
self.assertTree(tree)
# Manually remove git_hooked1 before synching to make sure it's not
# recreated.
os.remove(join(self.root_dir, 'src', 'git_hooked1'))
# Test incremental versioned sync: sync backward.
self.parseGclient(
['sync', '--jobs', '1', '--revision',
'src@' + self.githash('repo_1', 1),
'--deps', 'mac', '--delete_unversioned_trees'],
['deleting'])
tree = self.mangle_git_tree(('repo_1@1', 'src'),
('repo_2@2', 'src/repo2'),
('repo_3@1', 'src/repo2/repo3'),
('repo_4@2', 'src/repo4'))
tree['src/git_hooked2'] = 'git_hooked2'
self.assertTree(tree)
# Test incremental sync: delete-unversioned_trees isn't there.
self.parseGclient(
['sync', '--deps', 'mac', '--jobs', '1'],
['running', 'running'])
tree = self.mangle_git_tree(('repo_1@2', 'src'),
('repo_2@1', 'src/repo2'),
('repo_3@1', 'src/repo2/repo3'),
('repo_3@2', 'src/repo2/repo_renamed'),
('repo_4@2', 'src/repo4'))
tree['src/git_hooked1'] = 'git_hooked1'
tree['src/git_hooked2'] = 'git_hooked2'
self.assertTree(tree)
def testSyncIgnoredSolutionName(self):
"""TODO(maruel): This will become an error soon."""
if not self.enabled:
return
self.gclient(['config', self.git_base + 'repo_1', '--name', 'src'])
self.parseGclient(
['sync', '--deps', 'mac', '--jobs', '1',
'--revision', 'invalid@' + self.githash('repo_1', 1)],
['running', 'running'],
'Please fix your script, having invalid --revision flags '
'will soon considered an error.\n')
tree = self.mangle_git_tree(('repo_1@2', 'src'),
('repo_2@1', 'src/repo2'),
('repo_3@2', 'src/repo2/repo_renamed'))
tree['src/git_hooked1'] = 'git_hooked1'
tree['src/git_hooked2'] = 'git_hooked2'
self.assertTree(tree)
def testSyncNoSolutionName(self):
if not self.enabled:
return
# When no solution name is provided, gclient uses the first solution listed.
self.gclient(['config', self.git_base + 'repo_1', '--name', 'src'])
self.parseGclient(
['sync', '--deps', 'mac', '--jobs', '1',
'--revision', self.githash('repo_1', 1)],
[])
tree = self.mangle_git_tree(('repo_1@1', 'src'),
('repo_2@2', 'src/repo2'),
('repo_3@1', 'src/repo2/repo3'),
('repo_4@2', 'src/repo4'))
self.assertTree(tree)
def testSyncJobs(self):
if not self.enabled:
return
# TODO(maruel): safesync.
self.gclient(['config', self.git_base + 'repo_1', '--name', 'src'])
# Test unversioned checkout.
self.parseGclient(
['sync', '--deps', 'mac', '--jobs', '8'],
['running', 'running'],
untangle=True)
# TODO(maruel): http://crosbug.com/3582 hooks run even if not matching, must
# add sync parsing to get the list of updated files.
tree = self.mangle_git_tree(('repo_1@2', 'src'),
('repo_2@1', 'src/repo2'),
('repo_3@2', 'src/repo2/repo_renamed'))
tree['src/git_hooked1'] = 'git_hooked1'
tree['src/git_hooked2'] = 'git_hooked2'
self.assertTree(tree)
# Manually remove git_hooked1 before synching to make sure it's not
# recreated.
os.remove(join(self.root_dir, 'src', 'git_hooked1'))
# Test incremental versioned sync: sync backward.
# Use --jobs 1 otherwise the order is not deterministic.
self.parseGclient(
['sync', '--revision', 'src@' + self.githash('repo_1', 1),
'--deps', 'mac', '--delete_unversioned_trees', '--jobs', '1'],
['deleting'],
untangle=True)
tree = self.mangle_git_tree(('repo_1@1', 'src'),
('repo_2@2', 'src/repo2'),
('repo_3@1', 'src/repo2/repo3'),
('repo_4@2', 'src/repo4'))
tree['src/git_hooked2'] = 'git_hooked2'
self.assertTree(tree)
# Test incremental sync: delete-unversioned_trees isn't there.
self.parseGclient(
['sync', '--deps', 'mac', '--jobs', '8'],
['running', 'running'],
untangle=True)
tree = self.mangle_git_tree(('repo_1@2', 'src'),
('repo_2@1', 'src/repo2'),
('repo_3@1', 'src/repo2/repo3'),
('repo_3@2', 'src/repo2/repo_renamed'),
('repo_4@2', 'src/repo4'))
tree['src/git_hooked1'] = 'git_hooked1'
tree['src/git_hooked2'] = 'git_hooked2'
self.assertTree(tree)
def testRunHooks(self):
if not self.enabled:
return
self.gclient(['config', self.git_base + 'repo_1', '--name', 'src'])
self.gclient(['sync', '--deps', 'mac'])
tree = self.mangle_git_tree(('repo_1@2', 'src'),
('repo_2@1', 'src/repo2'),
('repo_3@2', 'src/repo2/repo_renamed'))
tree['src/git_hooked1'] = 'git_hooked1'
tree['src/git_hooked2'] = 'git_hooked2'
self.assertTree(tree)
os.remove(join(self.root_dir, 'src', 'git_hooked1'))
os.remove(join(self.root_dir, 'src', 'git_hooked2'))
# runhooks runs all hooks even if not matching by design.
out = self.parseGclient(['runhooks', '--deps', 'mac'],
['running', 'running'])
self.assertEquals(1, len(out[0]))
self.assertEquals(1, len(out[1]))
tree = self.mangle_git_tree(('repo_1@2', 'src'),
('repo_2@1', 'src/repo2'),
('repo_3@2', 'src/repo2/repo_renamed'))
tree['src/git_hooked1'] = 'git_hooked1'
tree['src/git_hooked2'] = 'git_hooked2'
self.assertTree(tree)
def testPreDepsHooks(self):
if not self.enabled:
return
self.gclient(['config', self.git_base + 'repo_5', '--name', 'src'])
expectation = [
('running', self.root_dir), # pre-deps hook
]
out = self.parseGclient(['sync', '--deps', 'mac', '--jobs=1',
'--revision', 'src@' + self.githash('repo_5', 2)],
expectation)
self.assertEquals(2, len(out[0]))
self.assertEquals('pre-deps hook', out[0][1])
tree = self.mangle_git_tree(('repo_5@2', 'src'),
('repo_1@2', 'src/repo1'),
('repo_2@1', 'src/repo2')
)
tree['src/git_pre_deps_hooked'] = 'git_pre_deps_hooked'
self.assertTree(tree)
os.remove(join(self.root_dir, 'src', 'git_pre_deps_hooked'))
# Pre-DEPS hooks don't run with runhooks.
self.gclient(['runhooks', '--deps', 'mac'])
tree = self.mangle_git_tree(('repo_5@2', 'src'),
('repo_1@2', 'src/repo1'),
('repo_2@1', 'src/repo2')
)
self.assertTree(tree)
# Pre-DEPS hooks run when syncing with --nohooks.
self.gclient(['sync', '--deps', 'mac', '--nohooks',
'--revision', 'src@' + self.githash('repo_5', 2)])
tree = self.mangle_git_tree(('repo_5@2', 'src'),
('repo_1@2', 'src/repo1'),
('repo_2@1', 'src/repo2')
)
tree['src/git_pre_deps_hooked'] = 'git_pre_deps_hooked'
self.assertTree(tree)
os.remove(join(self.root_dir, 'src', 'git_pre_deps_hooked'))
# Pre-DEPS hooks don't run with --noprehooks
self.gclient(['sync', '--deps', 'mac', '--noprehooks',
'--revision', 'src@' + self.githash('repo_5', 2)])
tree = self.mangle_git_tree(('repo_5@2', 'src'),
('repo_1@2', 'src/repo1'),
('repo_2@1', 'src/repo2')
)
self.assertTree(tree)
def testPreDepsHooksError(self):
if not self.enabled:
return
self.gclient(['config', self.git_base + 'repo_5', '--name', 'src'])
expectated_stdout = [
('running', self.root_dir), # pre-deps hook
('running', self.root_dir), # pre-deps hook (fails)
]
expected_stderr = ("Error: Command '/usr/bin/python -c import sys; "
"sys.exit(1)' returned non-zero exit status 1 in %s\n"
% self.root_dir)
stdout, stderr, retcode = self.gclient(['sync', '--deps', 'mac', '--jobs=1',
'--revision',
'src@' + self.githash('repo_5', 3)])
self.assertEquals(stderr, expected_stderr)
self.assertEquals(2, retcode)
self.checkBlock(stdout, expectated_stdout)
def testRevInfo(self):
if not self.enabled:
return
self.gclient(['config', self.git_base + 'repo_1', '--name', 'src'])
self.gclient(['sync', '--deps', 'mac'])
results = self.gclient(['revinfo', '--deps', 'mac'])
out = ('src: %(base)srepo_1\n'
'src/repo2: %(base)srepo_2@%(hash2)s\n'
'src/repo2/repo_renamed: %(base)srepo_3\n' %
{
'base': self.git_base,
'hash2': self.githash('repo_2', 1)[:7],
})
self.check((out, '', 0), results)
results = self.gclient(['revinfo', '--deps', 'mac', '--actual'])
out = ('src: %(base)srepo_1@%(hash1)s\n'
'src/repo2: %(base)srepo_2@%(hash2)s\n'
'src/repo2/repo_renamed: %(base)srepo_3@%(hash3)s\n' %
{
'base': self.git_base,
'hash1': self.githash('repo_1', 2),
'hash2': self.githash('repo_2', 1),
'hash3': self.githash('repo_3', 2),
})
self.check((out, '', 0), results)
class GClientSmokeGITMutates(GClientSmokeBase):
"""testRevertAndStatus mutates the git repo so move it to its own suite."""
def setUp(self):
super(GClientSmokeGITMutates, self).setUp()
self.enabled = self.FAKE_REPOS.set_up_git()
def testRevertAndStatus(self):
if not self.enabled:
return
# Commit new change to repo to make repo_2's hash use a custom_var.
cur_deps = self.FAKE_REPOS.git_hashes['repo_1'][-1][1]['DEPS']
repo_2_hash = self.FAKE_REPOS.git_hashes['repo_2'][1][0][:7]
new_deps = cur_deps.replace('repo_2@%s\'' % repo_2_hash,
'repo_2@\' + Var(\'r2hash\')')
new_deps = 'vars = {\'r2hash\': \'%s\'}\n%s' % (repo_2_hash, new_deps)
self.FAKE_REPOS._commit_git('repo_1', { # pylint: disable=W0212
'DEPS': new_deps,
'origin': 'git/repo_1@3\n',
})
config_template = (
"""solutions = [{
"name" : "src",
"url" : "%(git_base)srepo_1",
"deps_file" : "DEPS",
"managed" : True,
"custom_vars" : %(custom_vars)s,
}]""")
self.gclient(['config', '--spec', config_template % {
'git_base': self.git_base,
'custom_vars': {}
}])
# Tested in testSync.
self.gclient(['sync', '--deps', 'mac'])
write(join(self.root_dir, 'src', 'repo2', 'hi'), 'Hey!')
out = self.parseGclient(['status', '--deps', 'mac', '--jobs', '1'], [])
# TODO(maruel): http://crosbug.com/3584 It should output the unversioned
# files.
self.assertEquals(0, len(out))
# Revert implies --force implies running hooks without looking at pattern
# matching. For each expected path, 'git reset' and 'git clean' are run, so
# there should be two results for each. The last two results should reflect
# writing git_hooked1 and git_hooked2. There's only one result for the third
# because it is clean and has no output for 'git clean'.
out = self.parseGclient(['revert', '--deps', 'mac', '--jobs', '1'],
['running', 'running'])
self.assertEquals(2, len(out))
tree = self.mangle_git_tree(('repo_1@3', 'src'),
('repo_2@1', 'src/repo2'),
('repo_3@2', 'src/repo2/repo_renamed'))
tree['src/git_hooked1'] = 'git_hooked1'
tree['src/git_hooked2'] = 'git_hooked2'
self.assertTree(tree)
# Make a new commit object in the origin repo, to force reset to fetch.
self.FAKE_REPOS._commit_git('repo_2', { # pylint: disable=W0212
'origin': 'git/repo_2@3\n',
})
self.gclient(['config', '--spec', config_template % {
'git_base': self.git_base,
'custom_vars': {'r2hash': self.FAKE_REPOS.git_hashes['repo_2'][-1][0] }
}])
out = self.parseGclient(['revert', '--deps', 'mac', '--jobs', '1'],
['running', 'running'])
self.assertEquals(2, len(out))
tree = self.mangle_git_tree(('repo_1@3', 'src'),
('repo_2@3', 'src/repo2'),
('repo_3@2', 'src/repo2/repo_renamed'))
tree['src/git_hooked1'] = 'git_hooked1'
tree['src/git_hooked2'] = 'git_hooked2'
self.assertTree(tree)
results = self.gclient(['status', '--deps', 'mac', '--jobs', '1'])
out = results[0].splitlines(False)
# TODO(maruel): http://crosbug.com/3584 It should output the unversioned
# files.
self.assertEquals(0, len(out))
def testSyncNoHistory(self):
if not self.enabled:
return
# Create an extra commit in repo_2 and point DEPS to its hash.
cur_deps = self.FAKE_REPOS.git_hashes['repo_1'][-1][1]['DEPS']
repo_2_hash_old = self.FAKE_REPOS.git_hashes['repo_2'][1][0][:7]
self.FAKE_REPOS._commit_git('repo_2', { # pylint: disable=W0212
'last_file': 'file created in last commit',
})
repo_2_hash_new = self.FAKE_REPOS.git_hashes['repo_2'][-1][0]
new_deps = cur_deps.replace(repo_2_hash_old, repo_2_hash_new)
self.assertNotEqual(new_deps, cur_deps)
self.FAKE_REPOS._commit_git('repo_1', { # pylint: disable=W0212
'DEPS': new_deps,
'origin': 'git/repo_1@4\n',
})
config_template = (
"""solutions = [{
"name" : "src",
"url" : "%(git_base)srepo_1",
"deps_file" : "DEPS",
"managed" : True,
}]""")
self.gclient(['config', '--spec', config_template % {
'git_base': self.git_base
}])
self.gclient(['sync', '--no-history', '--deps', 'mac'])
repo2_root = join(self.root_dir, 'src', 'repo2')
# Check that repo_2 is actually shallow and its log has only one entry.
rev_lists = subprocess2.check_output(['git', 'rev-list', 'HEAD'],
cwd=repo2_root)
self.assertEquals(repo_2_hash_new, rev_lists.strip('\r\n'))
# Check that we have actually checked out the right commit.
self.assertTrue(os.path.exists(join(repo2_root, 'last_file')))
class SkiaDEPSTransitionSmokeTest(GClientSmokeBase):
"""Simulate the behavior of bisect bots as they transition across the Skia
DEPS change."""
FAKE_REPOS_CLASS = fake_repos.FakeRepoSkiaDEPS
def setUp(self):
super(SkiaDEPSTransitionSmokeTest, self).setUp()
self.enabled = self.FAKE_REPOS.set_up_git()
def testSkiaDEPSChangeGit(self):
if not self.enabled:
return
# Create an initial checkout:
# - Single checkout at the root.
# - Multiple checkouts in a shared subdirectory.
self.gclient(['config', '--spec',
'solutions=['
'{"name": "src",'
' "url": "' + self.git_base + 'repo_2",'
'}]'])
checkout_path = os.path.join(self.root_dir, 'src')
skia = os.path.join(checkout_path, 'third_party', 'skia')
skia_gyp = os.path.join(skia, 'gyp')
skia_include = os.path.join(skia, 'include')
skia_src = os.path.join(skia, 'src')
gyp_git_url = self.git_base + 'repo_3'
include_git_url = self.git_base + 'repo_4'
src_git_url = self.git_base + 'repo_5'
skia_git_url = self.FAKE_REPOS.git_base + 'repo_1'
pre_hash = self.githash('repo_2', 1)
post_hash = self.githash('repo_2', 2)
# Initial sync. Verify that we get the expected checkout.
res = self.gclient(['sync', '--deps', 'mac', '--revision',
'src@%s' % pre_hash])
self.assertEqual(res[2], 0, 'Initial sync failed.')
self.assertEqual(gclient_scm.GIT.Capture(['config', 'remote.origin.url'],
skia_gyp), gyp_git_url)
self.assertEqual(gclient_scm.GIT.Capture(['config', 'remote.origin.url'],
skia_include), include_git_url)
self.assertEqual(gclient_scm.GIT.Capture(['config', 'remote.origin.url'],
skia_src), src_git_url)
# Verify that the sync succeeds. Verify that we have the expected merged
# checkout.
res = self.gclient(['sync', '--deps', 'mac', '--revision',
'src@%s' % post_hash])
self.assertEqual(res[2], 0, 'DEPS change sync failed.')
self.assertEqual(gclient_scm.GIT.Capture(['config', 'remote.origin.url'],
skia), skia_git_url)
# Sync again. Verify that we still have the expected merged checkout.
res = self.gclient(['sync', '--deps', 'mac', '--revision',
'src@%s' % post_hash])
self.assertEqual(res[2], 0, 'Subsequent sync failed.')
self.assertEqual(gclient_scm.GIT.Capture(['config', 'remote.origin.url'],
skia), skia_git_url)
# Sync back to the original DEPS. Verify that we get the original structure.
res = self.gclient(['sync', '--deps', 'mac', '--revision',
'src@%s' % pre_hash])
self.assertEqual(res[2], 0, 'Reverse sync failed.')
self.assertEqual(gclient_scm.GIT.Capture(['config', 'remote.origin.url'],
skia_gyp), gyp_git_url)
self.assertEqual(gclient_scm.GIT.Capture(['config', 'remote.origin.url'],
skia_include), include_git_url)
self.assertEqual(gclient_scm.GIT.Capture(['config', 'remote.origin.url'],
skia_src), src_git_url)
# Sync again. Verify that we still have the original structure.
res = self.gclient(['sync', '--deps', 'mac', '--revision',
'src@%s' % pre_hash])
self.assertEqual(res[2], 0, 'Subsequent sync #2 failed.')
self.assertEqual(gclient_scm.GIT.Capture(['config', 'remote.origin.url'],
skia_gyp), gyp_git_url)
self.assertEqual(gclient_scm.GIT.Capture(['config', 'remote.origin.url'],
skia_include), include_git_url)
self.assertEqual(gclient_scm.GIT.Capture(['config', 'remote.origin.url'],
skia_src), src_git_url)
class BlinkDEPSTransitionSmokeTest(GClientSmokeBase):
"""Simulate the behavior of bisect bots as they transition across the Blink
DEPS change."""
FAKE_REPOS_CLASS = fake_repos.FakeRepoBlinkDEPS
def setUp(self):
super(BlinkDEPSTransitionSmokeTest, self).setUp()
self.enabled = self.FAKE_REPOS.set_up_git()
self.checkout_path = os.path.join(self.root_dir, 'src')
self.blink = os.path.join(self.checkout_path, 'third_party', 'WebKit')
self.blink_git_url = self.FAKE_REPOS.git_base + 'repo_2'
self.pre_merge_sha = self.githash('repo_1', 1)
self.post_merge_sha = self.githash('repo_1', 2)
def CheckStatusPreMergePoint(self):
self.assertEqual(gclient_scm.GIT.Capture(['config', 'remote.origin.url'],
self.blink), self.blink_git_url)
self.assertTrue(os.path.exists(join(self.blink, '.git')))
self.assertTrue(os.path.exists(join(self.blink, 'OWNERS')))
with open(join(self.blink, 'OWNERS')) as f:
owners_content = f.read()
self.assertEqual('OWNERS-pre', owners_content, 'OWNERS not updated')
self.assertTrue(os.path.exists(join(self.blink, 'Source', 'exists_always')))
self.assertTrue(os.path.exists(
join(self.blink, 'Source', 'exists_before_but_not_after')))
self.assertFalse(os.path.exists(
join(self.blink, 'Source', 'exists_after_but_not_before')))
def CheckStatusPostMergePoint(self):
# Check that the contents still exists
self.assertTrue(os.path.exists(join(self.blink, 'OWNERS')))
with open(join(self.blink, 'OWNERS')) as f:
owners_content = f.read()
self.assertEqual('OWNERS-post', owners_content, 'OWNERS not updated')
self.assertTrue(os.path.exists(join(self.blink, 'Source', 'exists_always')))
# Check that file removed between the branch point are actually deleted.
self.assertTrue(os.path.exists(
join(self.blink, 'Source', 'exists_after_but_not_before')))
self.assertFalse(os.path.exists(
join(self.blink, 'Source', 'exists_before_but_not_after')))
# But not the .git folder
self.assertFalse(os.path.exists(join(self.blink, '.git')))
@unittest.skip('flaky')
def testBlinkDEPSChangeUsingGclient(self):
"""Checks that {src,blink} repos are consistent when syncing going back and
forth using gclient sync src@revision."""
if not self.enabled:
return
self.gclient(['config', '--spec',
'solutions=['
'{"name": "src",'
' "url": "' + self.git_base + 'repo_1",'
'}]'])
# Go back and forth two times.
for _ in xrange(2):
res = self.gclient(['sync', '--jobs', '1',
'--revision', 'src@%s' % self.pre_merge_sha])
self.assertEqual(res[2], 0, 'DEPS change sync failed.')
self.CheckStatusPreMergePoint()
res = self.gclient(['sync', '--jobs', '1',
'--revision', 'src@%s' % self.post_merge_sha])
self.assertEqual(res[2], 0, 'DEPS change sync failed.')
self.CheckStatusPostMergePoint()
@unittest.skip('flaky')
def testBlinkDEPSChangeUsingGit(self):
"""Like testBlinkDEPSChangeUsingGclient, but move the main project using
directly git and not gclient sync."""
if not self.enabled:
return
self.gclient(['config', '--spec',
'solutions=['
'{"name": "src",'
' "url": "' + self.git_base + 'repo_1",'
' "managed": False,'
'}]'])
# Perform an initial sync to bootstrap the repo.
res = self.gclient(['sync', '--jobs', '1'])
self.assertEqual(res[2], 0, 'Initial gclient sync failed.')
# Go back and forth two times.
for _ in xrange(2):
subprocess2.check_call(['git', 'checkout', '-q', self.pre_merge_sha],
cwd=self.checkout_path)
res = self.gclient(['sync', '--jobs', '1'])
self.assertEqual(res[2], 0, 'gclient sync failed.')
self.CheckStatusPreMergePoint()
subprocess2.check_call(['git', 'checkout', '-q', self.post_merge_sha],
cwd=self.checkout_path)
res = self.gclient(['sync', '--jobs', '1'])
self.assertEqual(res[2], 0, 'DEPS change sync failed.')
self.CheckStatusPostMergePoint()
@unittest.skip('flaky')
def testBlinkLocalBranchesArePreserved(self):
"""Checks that the state of local git branches are effectively preserved
when going back and forth."""
if not self.enabled:
return
self.gclient(['config', '--spec',
'solutions=['
'{"name": "src",'
' "url": "' + self.git_base + 'repo_1",'
'}]'])
# Initialize to pre-merge point.
self.gclient(['sync', '--revision', 'src@%s' % self.pre_merge_sha])
self.CheckStatusPreMergePoint()
# Create a branch named "foo".
subprocess2.check_call(['git', 'checkout', '-qB', 'foo'],
cwd=self.blink)
# Cross the pre-merge point.
self.gclient(['sync', '--revision', 'src@%s' % self.post_merge_sha])
self.CheckStatusPostMergePoint()
# Go backwards and check that we still have the foo branch.
self.gclient(['sync', '--revision', 'src@%s' % self.pre_merge_sha])
self.CheckStatusPreMergePoint()
subprocess2.check_call(
['git', 'show-ref', '-q', '--verify', 'refs/heads/foo'], cwd=self.blink)
if __name__ == '__main__':
if '-v' in sys.argv:
logging.basicConfig(level=logging.DEBUG)
if '-c' in sys.argv:
COVERAGE = True
sys.argv.remove('-c')
if os.path.exists('.coverage'):
os.remove('.coverage')
os.environ['COVERAGE_FILE'] = os.path.join(
os.path.dirname(os.path.dirname(os.path.abspath(__file__))),
'.coverage')
unittest.main()
|
ActiveState/code | refs/heads/master | recipes/Python/52201_Memoizing_cacheing_functireturn/recipe-52201.py | 1 | # Functions can be memoised "by hand" using a dictionary to hold
# the return values when they are calculated:
# Here is a simple case, using the recursive fibonnaci function
# f(n) = f(n-1) + f(n-2)
fib_memo = {}
def fib(n):
if n < 2: return 1
if not fib_memo.has_key(n):
fib_memo[n] = fib(n-1) + fib(n-2)
return fib_memo[n]
# To encapsulate this in a class, use the Memoize class:
class Memoize:
"""Memoize(fn) - an instance which acts like fn but memoizes its arguments
Will only work on functions with non-mutable arguments
"""
def __init__(self, fn):
self.fn = fn
self.memo = {}
def __call__(self, *args):
if not self.memo.has_key(args):
self.memo[args] = self.fn(*args)
return self.memo[args]
# And here is how to use this class to memoize fib(). Note that the definition
# for fib() is now the "obvious" one, without the cacheing code obscuring
# the algorithm.
def fib(n):
if n < 2: return 1
return fib(n-1) + fib(n-2)
fib = Memoize(fib)
# For functions taking mutable arguments, use the cPickle module, as
# in class MemoizeMutable:
class MemoizeMutable:
"""Memoize(fn) - an instance which acts like fn but memoizes its arguments
Will work on functions with mutable arguments (slower than Memoize)
"""
def __init__(self, fn):
self.fn = fn
self.memo = {}
def __call__(self, *args):
import cPickle
str = cPickle.dumps(args)
if not self.memo.has_key(str):
self.memo[str] = self.fn(*args)
return self.memo[str]
|
ulif/pulp | refs/heads/master | common/test/unit/test_common_config_validation.py | 13 | import re
import unittest
from mock import patch
from StringIO import StringIO
from pulp.common.config import (ANY, BOOL, Config, NUMBER, OPTIONAL, parse_bool, read_json_config,
REQUIRED, ValidationException, Validator)
SCHEMA = (
('server', REQUIRED,
(
('name', REQUIRED, ANY),
('url', REQUIRED, 'http://.+'),
('port', REQUIRED, NUMBER),
)),
('limits', OPTIONAL,
(
('threads', OPTIONAL, NUMBER),
('posix', OPTIONAL, BOOL),
('mode', REQUIRED, '(AUTO$|MANUAL$)'),
)),
)
VALID = """
[server]
url=http://foo.com
port=10
name=elvis
[limits]
threads=10
posix=true
mode=AUTO
"""
EXTRA_SECTIONS_AND_PROPERTIES = """
%s
cpu=10
color=blue
[wtf]
name=john
age=10
""" % VALID
OVERRIDE_PROPERTIES = """
[server]
url=http://bar.com
"""
MISSING_REQUIRED_SECTION = """
[limits]
threads=10
posix=true
mode=AUTO
"""
MISSING_OPTIONAL_SECTION = """
[server]
url=http://foo.com
port=10
name=elvis
"""
MISSING_REQUIRED_PROPERTY = """
[server]
url=http://foo.com
name=elvis
[limits]
threads=10
posix=true
mode=AUTO
"""
MISSING_OPTIONAL_PROPERTY = """
[server]
url=http://foo.com
port=10
name=elvis
[limits]
mode=AUTO
"""
TEST_MISSING_REQUIRED_VALUE = """
[server]
url=
port=10
name=elvis
[limits]
threads=10
posix=true
mode=AUTO
"""
TEST_MISSING_OPTIONAL_VALUE = """
[server]
url=http://foo.com
port=10
name=
[limits]
threads=10
posix=true
mode=AUTO
"""
TEST_INVALID_VALUE = """
[server]
url=http://foo.com
port=hello
name=elvis
[limits]
threads=10
posix=true
mode=AUTO
"""
RANDOM_1 = """
[abc]
name=joe
age=10
phone=555-1212
[abcdef]
foo=ABC
bar=DEF
[my_a]
color=blue
height=88
weight=7
[my_b]
width=99
length=44
wood=oak
"""
class TestConfigValidator(unittest.TestCase):
def test_valid(self):
validator = Validator(SCHEMA)
for s in (VALID,
MISSING_OPTIONAL_SECTION,
MISSING_OPTIONAL_PROPERTY,
TEST_MISSING_OPTIONAL_VALUE,):
cfg = self.read(s)
validator.validate(cfg)
def test_invalid(self):
validator = Validator(SCHEMA)
for s in (MISSING_REQUIRED_SECTION,
MISSING_REQUIRED_PROPERTY,
TEST_MISSING_REQUIRED_VALUE,
TEST_INVALID_VALUE,):
cfg = self.read(s)
self.assertRaises(ValidationException, validator.validate, cfg)
def test_extras(self):
cfg = self.read(EXTRA_SECTIONS_AND_PROPERTIES)
s, p = cfg.validate(SCHEMA)
self.assertEqual(len(s), 1)
self.assertEqual(s, ['wtf'])
self.assertEqual(sorted(p), sorted(['limits.cpu', 'limits.color']))
def test_util(self):
cfg = self.read(VALID).graph(True)
# getbool()
v = parse_bool(cfg.limits.posix)
self.assertTrue(isinstance(v, bool))
def test_section_filtering(self):
# load using Config.read()
self.__test_section_filtering(self.read)
# load using Config.update()
def fn(s, filter):
fp = StringIO(s)
d = dict(Config(fp))
return Config(d, filter=filter)
self.__test_section_filtering(fn)
def __test_section_filtering(self, read):
# (abc) only
cfg = read(RANDOM_1, 'abc$')
self.assertEquals(len(cfg), 1)
self.assertTrue('abc' in cfg)
# (abc*) only
cfg = read(RANDOM_1, 'abc')
self.assertEquals(len(cfg), 2)
self.assertTrue('abc' in cfg)
self.assertTrue('abcdef' in cfg)
# (my_a|my_b) only
cfg = read(RANDOM_1, 'my_a|my_b')
self.assertEquals(len(cfg), 2)
self.assertTrue('my_a' in cfg)
self.assertTrue('my_b' in cfg)
# list filter
cfg = read(RANDOM_1, ['abcdef'])
self.assertEquals(len(cfg), 1)
self.assertTrue('abcdef' in cfg)
# tuple filter
cfg = read(RANDOM_1, ('abcdef', 'my_b'))
self.assertEquals(len(cfg), 2)
self.assertTrue('abcdef' in cfg)
self.assertTrue('my_b' in cfg)
# callable filter
def fn(s):
return s in ('my_a', 'my_b')
cfg = read(RANDOM_1, fn)
self.assertEquals(len(cfg), 2)
self.assertTrue('my_a' in cfg)
self.assertTrue('my_b' in cfg)
# (my_a|my_b) only with regex
cfg = read(RANDOM_1, 'my_')
self.assertEquals(len(cfg), 2)
self.assertTrue('my_a' in cfg)
self.assertTrue('my_b' in cfg)
# (my_a|my_b) only with regex pattern passed as callable
pattern = re.compile('my_')
cfg = read(RANDOM_1, pattern.match)
self.assertEquals(len(cfg), 2)
self.assertTrue('my_a' in cfg)
self.assertTrue('my_b' in cfg)
def test_graph(self):
cfg = self.read(VALID).graph()
v = cfg.server.port
self.assertEquals(v, '10')
v = cfg.xxx.port
self.assertEquals(v, None)
v = cfg.server.xxx
self.assertEquals(v, None)
v = cfg.xxx
self.assertEquals(v, {})
def test_override(self):
# Setup
valid_fp = StringIO(VALID)
override_fp = StringIO(OVERRIDE_PROPERTIES)
config = Config(valid_fp, override_fp)
# Test
value = config['server']['url']
# Verify
self.assertEqual(value, 'http://bar.com')
def test_has_option(self):
# Setup
config = self.read(VALID)
# Test
self.assertTrue(config.has_option('server', 'url'))
self.assertTrue(not config.has_option('server', 'foo'))
self.assertTrue(not config.has_option('bar', 'foo'))
def read(self, s, filter=None):
fp = StringIO(s)
cfg = Config(fp, filter=filter)
return cfg
class TestReadJsonConfig(unittest.TestCase):
"""
Class to package up all the tests for the generic code to read
json configurations from a file
"""
@patch('os.path.exists', autospec=True)
@patch('__builtin__.open', autospec=True)
def test_read_json_config(self, mock_open, exists):
exists.return_value = True
mock_open.return_value.read.return_value = '{"foo":"bar"}'
config = read_json_config("server/foo")
mock_open.assert_called_once_with('/etc/pulp/server/foo', 'r')
self.assertEqual(config, {'foo': 'bar'})
@patch('os.path.exists', autospec=True)
@patch('__builtin__.open', autospec=True)
def test_read_json_config_prepended_slash_in_path(self, mock_open, exists):
exists.return_value = True
mock_open.return_value.read.return_value = '{"foo":"bar"}'
read_json_config("/server/foo")
mock_open.assert_called_once_with('/etc/pulp/server/foo', 'r')
def test_read_json_config_non_existent_file(self):
config = read_json_config("bad/file/name")
self.assertEqual(config, {})
|
zuck/prometeo-erp | refs/heads/master | core/views/reports.py | 3 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""This file is part of the prometeo project.
This program is free software: you can redistribute it and/or modify it
under the terms of the GNU Lesser General Public License as published by the
Free Software Foundation, either version 3 of the License, or (at your
option) any later version.
This program is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
more details.
You should have received a copy of the GNU Lesser General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>
"""
__author__ = 'Emanuele Bertoldi <[email protected]>'
__copyright__ = 'Copyright (c) 2011 Emanuele Bertoldi'
__version__ = '0.0.5'
import os
from cStringIO import StringIO
from xhtml2pdf import pisa
from django.template import Context, RequestContext
from django.template.loader import render_to_string
from django.http import HttpResponse
from django.conf import settings
from django.contrib import messages
from django.views.generic.simple import redirect_to
from prometeo.core.utils import *
def fetch_resources(uri, rel):
"""Callback to allow pisa/reportlab to retrieve images, stylesheets, etc.
"""
return os.path.join(settings.STATIC_ROOT, uri.replace(settings.STATIC_URL, ""))
def render_to_pdf(request, template_name, context, filename="report.pdf", encoding='utf-8', **kwargs):
"""Renders a pdf response using given *request*, *template_name* and *context*.
"""
if not isinstance(context, Context):
context = RequestContext(request, context)
content = render_to_string(template_name, context)
src = StringIO(content.encode(encoding))
out = StringIO()
result = pisa.CreatePDF(src, out, link_callback=fetch_resources, encoding="UTF-8")
if not result.err:
response = HttpResponse(out.getvalue(), mimetype='application/pdf')
if filename is not None:
response['Content-Disposition'] = 'attachment; filename=%s' % filename
return response
messages.error(request, _("An error has occurred during the PDF conversion"))
return redirect_to(request, url=clean_referer(request))
|
teeple/pns_server | refs/heads/master | work/install/node-v0.10.25/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSToolFile.py | 2736 | # Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Visual Studio project reader/writer."""
import gyp.common
import gyp.easy_xml as easy_xml
class Writer(object):
"""Visual Studio XML tool file writer."""
def __init__(self, tool_file_path, name):
"""Initializes the tool file.
Args:
tool_file_path: Path to the tool file.
name: Name of the tool file.
"""
self.tool_file_path = tool_file_path
self.name = name
self.rules_section = ['Rules']
def AddCustomBuildRule(self, name, cmd, description,
additional_dependencies,
outputs, extensions):
"""Adds a rule to the tool file.
Args:
name: Name of the rule.
description: Description of the rule.
cmd: Command line of the rule.
additional_dependencies: other files which may trigger the rule.
outputs: outputs of the rule.
extensions: extensions handled by the rule.
"""
rule = ['CustomBuildRule',
{'Name': name,
'ExecutionDescription': description,
'CommandLine': cmd,
'Outputs': ';'.join(outputs),
'FileExtensions': ';'.join(extensions),
'AdditionalDependencies':
';'.join(additional_dependencies)
}]
self.rules_section.append(rule)
def WriteIfChanged(self):
"""Writes the tool file."""
content = ['VisualStudioToolFile',
{'Version': '8.00',
'Name': self.name
},
self.rules_section
]
easy_xml.WriteXmlIfChanged(content, self.tool_file_path,
encoding="Windows-1252")
|
cloudnull/ansible-modules-core | refs/heads/devel | cloud/google/gce_lb.py | 9 | #!/usr/bin/python
# Copyright 2013 Google Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: gce_lb
version_added: "1.5"
short_description: create/destroy GCE load-balancer resources
description:
- This module can create and destroy Google Compute Engine C(loadbalancer)
and C(httphealthcheck) resources. The primary LB resource is the
C(load_balancer) resource and the health check parameters are all
prefixed with I(httphealthcheck).
The full documentation for Google Compute Engine load balancing is at
U(https://developers.google.com/compute/docs/load-balancing/). However,
the ansible module simplifies the configuration by following the
libcloud model.
Full install/configuration instructions for the gce* modules can
be found in the comments of ansible/test/gce_tests.py.
options:
httphealthcheck_name:
description:
- the name identifier for the HTTP health check
required: false
default: null
httphealthcheck_port:
description:
- the TCP port to use for HTTP health checking
required: false
default: 80
httphealthcheck_path:
description:
- the url path to use for HTTP health checking
required: false
default: "/"
httphealthcheck_interval:
description:
- the duration in seconds between each health check request
required: false
default: 5
httphealthcheck_timeout:
description:
- the timeout in seconds before a request is considered a failed check
required: false
default: 5
httphealthcheck_unhealthy_count:
description:
- number of consecutive failed checks before marking a node unhealthy
required: false
default: 2
httphealthcheck_healthy_count:
description:
- number of consecutive successful checks before marking a node healthy
required: false
default: 2
httphealthcheck_host:
description:
- host header to pass through on HTTP check requests
required: false
default: null
name:
description:
- name of the load-balancer resource
required: false
default: null
protocol:
description:
- the protocol used for the load-balancer packet forwarding, tcp or udp
required: false
default: "tcp"
choices: ['tcp', 'udp']
region:
description:
- the GCE region where the load-balancer is defined
required: false
external_ip:
description:
- the external static IPv4 (or auto-assigned) address for the LB
required: false
default: null
port_range:
description:
- the port (range) to forward, e.g. 80 or 8000-8888 defaults to all ports
required: false
default: null
members:
description:
- a list of zone/nodename pairs, e.g ['us-central1-a/www-a', ...]
required: false
aliases: ['nodes']
state:
description:
- desired state of the LB
default: "present"
choices: ["active", "present", "absent", "deleted"]
aliases: []
service_account_email:
version_added: "1.6"
description:
- service account email
required: false
default: null
aliases: []
pem_file:
version_added: "1.6"
description:
- path to the pem file associated with the service account email
required: false
default: null
aliases: []
project_id:
version_added: "1.6"
description:
- your GCE project ID
required: false
default: null
aliases: []
requirements:
- "python >= 2.6"
- "apache-libcloud >= 0.13.3"
author: Eric Johnson <[email protected]>
'''
EXAMPLES = '''
# Simple example of creating a new LB, adding members, and a health check
- local_action:
module: gce_lb
name: testlb
region: us-central1
members: ["us-central1-a/www-a", "us-central1-b/www-b"]
httphealthcheck_name: hc
httphealthcheck_port: 80
httphealthcheck_path: "/up"
'''
try:
from libcloud.compute.types import Provider
from libcloud.compute.providers import get_driver
from libcloud.loadbalancer.types import Provider as Provider_lb
from libcloud.loadbalancer.providers import get_driver as get_driver_lb
from libcloud.common.google import GoogleBaseError, QuotaExceededError, \
ResourceExistsError, ResourceNotFoundError
_ = Provider.GCE
HAS_LIBCLOUD = True
except ImportError:
HAS_LIBCLOUD = False
def main():
module = AnsibleModule(
argument_spec = dict(
httphealthcheck_name = dict(),
httphealthcheck_port = dict(default=80),
httphealthcheck_path = dict(default='/'),
httphealthcheck_interval = dict(default=5),
httphealthcheck_timeout = dict(default=5),
httphealthcheck_unhealthy_count = dict(default=2),
httphealthcheck_healthy_count = dict(default=2),
httphealthcheck_host = dict(),
name = dict(),
protocol = dict(default='tcp'),
region = dict(),
external_ip = dict(),
port_range = dict(),
members = dict(type='list'),
state = dict(default='present'),
service_account_email = dict(),
pem_file = dict(),
project_id = dict(),
)
)
if not HAS_LIBCLOUD:
module.fail_json(msg='libcloud with GCE support (0.13.3+) required for this module.')
gce = gce_connect(module)
httphealthcheck_name = module.params.get('httphealthcheck_name')
httphealthcheck_port = module.params.get('httphealthcheck_port')
httphealthcheck_path = module.params.get('httphealthcheck_path')
httphealthcheck_interval = module.params.get('httphealthcheck_interval')
httphealthcheck_timeout = module.params.get('httphealthcheck_timeout')
httphealthcheck_unhealthy_count = \
module.params.get('httphealthcheck_unhealthy_count')
httphealthcheck_healthy_count = \
module.params.get('httphealthcheck_healthy_count')
httphealthcheck_host = module.params.get('httphealthcheck_host')
name = module.params.get('name')
protocol = module.params.get('protocol')
region = module.params.get('region')
external_ip = module.params.get('external_ip')
port_range = module.params.get('port_range')
members = module.params.get('members')
state = module.params.get('state')
try:
gcelb = get_driver_lb(Provider_lb.GCE)(gce_driver=gce)
gcelb.connection.user_agent_append("%s/%s" % (
USER_AGENT_PRODUCT, USER_AGENT_VERSION))
except Exception, e:
module.fail_json(msg=unexpected_error_msg(e), changed=False)
changed = False
json_output = {'name': name, 'state': state}
if not name and not httphealthcheck_name:
module.fail_json(msg='Nothing to do, please specify a "name" ' + \
'or "httphealthcheck_name" parameter', changed=False)
if state in ['active', 'present']:
# first, create the httphealthcheck if requested
hc = None
if httphealthcheck_name:
json_output['httphealthcheck_name'] = httphealthcheck_name
try:
hc = gcelb.ex_create_healthcheck(httphealthcheck_name,
host=httphealthcheck_host, path=httphealthcheck_path,
port=httphealthcheck_port,
interval=httphealthcheck_interval,
timeout=httphealthcheck_timeout,
unhealthy_threshold=httphealthcheck_unhealthy_count,
healthy_threshold=httphealthcheck_healthy_count)
changed = True
except ResourceExistsError:
hc = gce.ex_get_healthcheck(httphealthcheck_name)
except Exception, e:
module.fail_json(msg=unexpected_error_msg(e), changed=False)
if hc is not None:
json_output['httphealthcheck_host'] = hc.extra['host']
json_output['httphealthcheck_path'] = hc.path
json_output['httphealthcheck_port'] = hc.port
json_output['httphealthcheck_interval'] = hc.interval
json_output['httphealthcheck_timeout'] = hc.timeout
json_output['httphealthcheck_unhealthy_count'] = \
hc.unhealthy_threshold
json_output['httphealthcheck_healthy_count'] = \
hc.healthy_threshold
# create the forwarding rule (and target pool under the hood)
lb = None
if name:
if not region:
module.fail_json(msg='Missing required region name',
changed=False)
nodes = []
output_nodes = []
json_output['name'] = name
# members is a python list of 'zone/inst' strings
if members:
for node in members:
try:
zone, node_name = node.split('/')
nodes.append(gce.ex_get_node(node_name, zone))
output_nodes.append(node)
except:
# skip nodes that are badly formatted or don't exist
pass
try:
if hc is not None:
lb = gcelb.create_balancer(name, port_range, protocol,
None, nodes, ex_region=region, ex_healthchecks=[hc],
ex_address=external_ip)
else:
lb = gcelb.create_balancer(name, port_range, protocol,
None, nodes, ex_region=region, ex_address=external_ip)
changed = True
except ResourceExistsError:
lb = gcelb.get_balancer(name)
except Exception, e:
module.fail_json(msg=unexpected_error_msg(e), changed=False)
if lb is not None:
json_output['members'] = output_nodes
json_output['protocol'] = protocol
json_output['region'] = region
json_output['external_ip'] = lb.ip
json_output['port_range'] = lb.port
hc_names = []
if 'healthchecks' in lb.extra:
for hc in lb.extra['healthchecks']:
hc_names.append(hc.name)
json_output['httphealthchecks'] = hc_names
if state in ['absent', 'deleted']:
# first, delete the load balancer (forwarding rule and target pool)
# if specified.
if name:
json_output['name'] = name
try:
lb = gcelb.get_balancer(name)
gcelb.destroy_balancer(lb)
changed = True
except ResourceNotFoundError:
pass
except Exception, e:
module.fail_json(msg=unexpected_error_msg(e), changed=False)
# destroy the health check if specified
if httphealthcheck_name:
json_output['httphealthcheck_name'] = httphealthcheck_name
try:
hc = gce.ex_get_healthcheck(httphealthcheck_name)
gce.ex_destroy_healthcheck(hc)
changed = True
except ResourceNotFoundError:
pass
except Exception, e:
module.fail_json(msg=unexpected_error_msg(e), changed=False)
json_output['changed'] = changed
module.exit_json(**json_output)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.gce import *
if __name__ == '__main__':
main()
|
sharkerz/deluge-yarss-plugin | refs/heads/master | yarss2/lib/requests/api.py | 55 | # -*- coding: utf-8 -*-
"""
requests.api
~~~~~~~~~~~~
This module implements the Requests API.
:copyright: (c) 2012 by Kenneth Reitz.
:license: ISC, see LICENSE for more details.
"""
from . import sessions
from .safe_mode import catch_exceptions_if_in_safe_mode
@catch_exceptions_if_in_safe_mode
def request(method, url, **kwargs):
"""Constructs and sends a :class:`Request <Request>`.
Returns :class:`Response <Response>` object.
:param method: method for the new :class:`Request` object.
:param url: URL for the new :class:`Request` object.
:param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`.
:param data: (optional) Dictionary or bytes to send in the body of the :class:`Request`.
:param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`.
:param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`.
:param files: (optional) Dictionary of 'name': file-like-objects (or {'name': ('filename', fileobj)}) for multipart encoding upload.
:param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth.
:param timeout: (optional) Float describing the timeout of the request.
:param allow_redirects: (optional) Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
:param proxies: (optional) Dictionary mapping protocol to the URL of the proxy.
:param return_response: (optional) If False, an un-sent Request object will returned.
:param session: (optional) A :class:`Session` object to be used for the request.
:param config: (optional) A configuration dictionary. See ``request.defaults`` for allowed keys and their default values.
:param verify: (optional) if ``True``, the SSL cert will be verified. A CA_BUNDLE path can also be provided.
:param prefetch: (optional) if ``True``, the response content will be immediately downloaded.
:param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair.
"""
# if this session was passed in, leave it open (and retain pooled connections);
# if we're making it just for this call, then close it when we're done.
adhoc_session = False
session = kwargs.pop('session', None)
if session is None:
session = sessions.session()
adhoc_session = True
try:
return session.request(method=method, url=url, **kwargs)
finally:
if adhoc_session:
session.close()
def get(url, **kwargs):
"""Sends a GET request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object.
:param \*\*kwargs: Optional arguments that ``request`` takes.
"""
kwargs.setdefault('allow_redirects', True)
return request('get', url, **kwargs)
def options(url, **kwargs):
"""Sends a OPTIONS request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object.
:param \*\*kwargs: Optional arguments that ``request`` takes.
"""
kwargs.setdefault('allow_redirects', True)
return request('options', url, **kwargs)
def head(url, **kwargs):
"""Sends a HEAD request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object.
:param \*\*kwargs: Optional arguments that ``request`` takes.
"""
kwargs.setdefault('allow_redirects', False)
return request('head', url, **kwargs)
def post(url, data=None, **kwargs):
"""Sends a POST request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object.
:param data: (optional) Dictionary or bytes to send in the body of the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes.
"""
return request('post', url, data=data, **kwargs)
def put(url, data=None, **kwargs):
"""Sends a PUT request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object.
:param data: (optional) Dictionary or bytes to send in the body of the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes.
"""
return request('put', url, data=data, **kwargs)
def patch(url, data=None, **kwargs):
"""Sends a PATCH request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object.
:param data: (optional) Dictionary or bytes to send in the body of the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes.
"""
return request('patch', url, data=data, **kwargs)
def delete(url, **kwargs):
"""Sends a DELETE request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object.
:param \*\*kwargs: Optional arguments that ``request`` takes.
"""
return request('delete', url, **kwargs)
|
StealthMicro/OctoPi-Makerbot | refs/heads/master | env/Lib/site-packages/netaddr/ip/sets.py | 9 | #-----------------------------------------------------------------------------
# Copyright (c) 2008-2012, David P. D. Moss. All rights reserved.
#
# Released under the BSD license. See the LICENSE file for details.
#-----------------------------------------------------------------------------
"""Set based operations for IP addresses and subnets."""
import sys as _sys
import itertools as _itertools
from netaddr.strategy import ipv4 as _ipv4, ipv6 as _ipv6
from netaddr.ip.intset import IntSet as _IntSet
from netaddr.ip import IPNetwork, IPAddress, cidr_merge, cidr_exclude, \
iprange_to_cidrs
from netaddr.compat import _zip, _sys_maxint, _dict_keys, _int_type
#-----------------------------------------------------------------------------
def partition_ips(iterable):
"""
Takes a sequence of IP addresses and networks splitting them into two
separate sequences by IP version.
:param iterable: a sequence or iterator contain IP addresses and networks.
:return: a two element tuple (ipv4_list, ipv6_list).
"""
# Start off using set as we'll remove any duplicates at the start.
if not hasattr(iterable, '__iter__'):
raise ValueError('A sequence or iterator is expected!')
ipv4 = []
ipv6 = []
for ip in iterable:
if not hasattr(ip, 'version'):
raise TypeError('IPAddress or IPNetwork expected!')
if ip.version == 4:
ipv4.append(ip)
else:
ipv6.append(ip)
return ipv4, ipv6
#-----------------------------------------------------------------------------
class IPSet(object):
"""
Represents an unordered collection (set) of unique IP addresses and
subnets.
"""
__slots__ = ('_cidrs',)
def __init__(self, iterable=None, flags=0):
"""
Constructor.
:param iterable: (optional) an iterable containing IP addresses and
subnets.
:param flags: decides which rules are applied to the interpretation
of the addr value. See the netaddr.core namespace documentation
for supported constant values.
"""
self._cidrs = {}
if iterable is not None:
mergeable = []
for addr in iterable:
if isinstance(addr, _int_type):
addr = IPAddress(addr, flags=flags)
mergeable.append(addr)
for cidr in cidr_merge(mergeable):
self._cidrs[cidr] = True
def __getstate__(self):
""":return: Pickled state of an ``IPSet`` object."""
return tuple([cidr.__getstate__() for cidr in self._cidrs])
def __setstate__(self, state):
"""
:param state: data used to unpickle a pickled ``IPSet`` object.
"""
#TODO: this needs to be optimised.
self._cidrs = {}
for cidr_tuple in state:
value, prefixlen, version = cidr_tuple
if version == 4:
module = _ipv4
elif version == 6:
module = _ipv6
else:
raise ValueError('unpickling failed for object state %s' \
% str(state))
if 0 <= prefixlen <= module.width:
cidr = IPNetwork((value, prefixlen), version=module.version)
self._cidrs[cidr] = True
else:
raise ValueError('unpickling failed for object state %s' \
% str(state))
def compact(self):
"""
Compact internal list of `IPNetwork` objects using a CIDR merge.
"""
cidrs = cidr_merge(list(self._cidrs))
self._cidrs = dict(_zip(cidrs, [True] * len(cidrs)))
def __hash__(self):
"""
Raises ``TypeError`` if this method is called.
.. note:: IPSet objects are not hashable and cannot be used as \
dictionary keys or as members of other sets. \
"""
raise TypeError('IP sets are unhashable!')
def __contains__(self, ip):
"""
:param ip: An IP address or subnet.
:return: ``True`` if IP address or subnet is a member of this IP set.
"""
ip = IPNetwork(ip)
for cidr in self._cidrs:
if ip in cidr:
return True
return False
def __iter__(self):
"""
:return: an iterator over the IP addresses within this IP set.
"""
return _itertools.chain(*sorted(self._cidrs))
def iter_cidrs(self):
"""
:return: an iterator over individual IP subnets within this IP set.
"""
return sorted(self._cidrs)
def add(self, addr, flags=0):
"""
Adds an IP address or subnet to this IP set. Has no effect if it is
already present.
Note that where possible the IP address or subnet is merged with other
members of the set to form more concise CIDR blocks.
:param addr: An IP address or subnet.
:param flags: decides which rules are applied to the interpretation
of the addr value. See the netaddr.core namespace documentation
for supported constant values.
"""
if isinstance(addr, _int_type):
addr = IPAddress(addr, flags=flags)
else:
addr = IPNetwork(addr)
self._cidrs[addr] = True
self.compact()
def remove(self, addr, flags=0):
"""
Removes an IP address or subnet from this IP set. Does nothing if it
is not already a member.
Note that this method behaves more like discard() found in regular
Python sets because it doesn't raise KeyError exceptions if the
IP address or subnet is question does not exist. It doesn't make sense
to fully emulate that behaviour here as IP sets contain groups of
individual IP addresses as individual set members using IPNetwork
objects.
:param addr: An IP address or subnet.
:param flags: decides which rules are applied to the interpretation
of the addr value. See the netaddr.core namespace documentation
for supported constant values.
"""
if isinstance(addr, _int_type):
addr = IPAddress(addr, flags=flags)
else:
addr = IPNetwork(addr)
# This add() is required for address blocks provided that are larger
# than blocks found within the set but have overlaps. e.g. :-
#
# >>> IPSet(['192.0.2.0/24']).remove('192.0.2.0/23')
# IPSet([])
#
self.add(addr)
remainder = None
matching_cidr = None
# Search for a matching CIDR and exclude IP from it.
for cidr in self._cidrs:
if addr in cidr:
remainder = cidr_exclude(cidr, addr)
matching_cidr = cidr
break
# Replace matching CIDR with remaining CIDR elements.
if remainder is not None:
del self._cidrs[matching_cidr]
for cidr in remainder:
self._cidrs[cidr] = True
self.compact()
def pop(self):
"""
Removes and returns an arbitrary IP address or subnet from this IP
set.
:return: An IP address or subnet.
"""
return self._cidrs.popitem()[0]
def isdisjoint(self, other):
"""
:param other: an IP set.
:return: ``True`` if this IP set has no elements (IP addresses
or subnets) in common with other. Intersection *must* be an
empty set.
"""
result = self.intersection(other)
if result == IPSet():
return True
return False
def copy(self):
""":return: a shallow copy of this IP set."""
obj_copy = self.__class__()
obj_copy._cidrs.update(self._cidrs)
return obj_copy
def update(self, iterable, flags=0):
"""
Update the contents of this IP set with the union of itself and
other IP set.
:param iterable: an iterable containing IP addresses and subnets.
:param flags: decides which rules are applied to the interpretation
of the addr value. See the netaddr.core namespace documentation
for supported constant values.
"""
if not hasattr(iterable, '__iter__'):
raise TypeError('an iterable was expected!')
if hasattr(iterable, '_cidrs'):
# Another IP set.
for ip in cidr_merge(_dict_keys(self._cidrs)
+ _dict_keys(iterable._cidrs)):
self._cidrs[ip] = True
else:
# An iterable contain IP addresses or subnets.
mergeable = []
for addr in iterable:
if isinstance(addr, _int_type):
addr = IPAddress(addr, flags=flags)
mergeable.append(addr)
for cidr in cidr_merge(_dict_keys(self._cidrs) + mergeable):
self._cidrs[cidr] = True
self.compact()
def clear(self):
"""Remove all IP addresses and subnets from this IP set."""
self._cidrs = {}
def __eq__(self, other):
"""
:param other: an IP set
:return: ``True`` if this IP set is equivalent to the ``other`` IP set,
``False`` otherwise.
"""
try:
return self._cidrs == other._cidrs
except AttributeError:
return NotImplemented
def __ne__(self, other):
"""
:param other: an IP set
:return: ``False`` if this IP set is equivalent to the ``other`` IP set,
``True`` otherwise.
"""
try:
return self._cidrs != other._cidrs
except AttributeError:
return NotImplemented
def __lt__(self, other):
"""
:param other: an IP set
:return: ``True`` if this IP set is less than the ``other`` IP set,
``False`` otherwise.
"""
if not hasattr(other, '_cidrs'):
return NotImplemented
return len(self) < len(other) and self.issubset(other)
def issubset(self, other):
"""
:param other: an IP set.
:return: ``True`` if every IP address and subnet in this IP set
is found within ``other``.
"""
if not hasattr(other, '_cidrs'):
return NotImplemented
l_ipv4, l_ipv6 = partition_ips(self._cidrs)
r_ipv4, r_ipv6 = partition_ips(other._cidrs)
l_ipv4_iset = _IntSet(*[(c.first, c.last) for c in l_ipv4])
r_ipv4_iset = _IntSet(*[(c.first, c.last) for c in r_ipv4])
l_ipv6_iset = _IntSet(*[(c.first, c.last) for c in l_ipv6])
r_ipv6_iset = _IntSet(*[(c.first, c.last) for c in r_ipv6])
ipv4 = l_ipv4_iset.issubset(r_ipv4_iset)
ipv6 = l_ipv6_iset.issubset(r_ipv6_iset)
return ipv4 and ipv6
__le__ = issubset
def __gt__(self, other):
"""
:param other: an IP set.
:return: ``True`` if this IP set is greater than the ``other`` IP set,
``False`` otherwise.
"""
if not hasattr(other, '_cidrs'):
return NotImplemented
return len(self) > len(other) and self.issuperset(other)
def issuperset(self, other):
"""
:param other: an IP set.
:return: ``True`` if every IP address and subnet in other IP set
is found within this one.
"""
if not hasattr(other, '_cidrs'):
return NotImplemented
l_ipv4, l_ipv6 = partition_ips(self._cidrs)
r_ipv4, r_ipv6 = partition_ips(other._cidrs)
l_ipv4_iset = _IntSet(*[(c.first, c.last) for c in l_ipv4])
r_ipv4_iset = _IntSet(*[(c.first, c.last) for c in r_ipv4])
l_ipv6_iset = _IntSet(*[(c.first, c.last) for c in l_ipv6])
r_ipv6_iset = _IntSet(*[(c.first, c.last) for c in r_ipv6])
ipv4 = l_ipv4_iset.issuperset(r_ipv4_iset)
ipv6 = l_ipv6_iset.issuperset(r_ipv6_iset)
return ipv4 and ipv6
__ge__ = issuperset
def union(self, other):
"""
:param other: an IP set.
:return: the union of this IP set and another as a new IP set
(combines IP addresses and subnets from both sets).
"""
ip_set = self.copy()
ip_set.update(other)
ip_set.compact()
return ip_set
__or__ = union
def intersection(self, other):
"""
:param other: an IP set.
:return: the intersection of this IP set and another as a new IP set.
(IP addresses and subnets common to both sets).
"""
cidr_list = []
# Separate IPv4 from IPv6.
l_ipv4, l_ipv6 = partition_ips(self._cidrs)
r_ipv4, r_ipv6 = partition_ips(other._cidrs)
# Process IPv4.
l_ipv4_iset = _IntSet(*[(c.first, c.last) for c in l_ipv4])
r_ipv4_iset = _IntSet(*[(c.first, c.last) for c in r_ipv4])
ipv4_result = l_ipv4_iset & r_ipv4_iset
for start, end in list(ipv4_result._ranges):
cidrs = iprange_to_cidrs(IPAddress(start, 4), IPAddress(end-1, 4))
cidr_list.extend(cidrs)
# Process IPv6.
l_ipv6_iset = _IntSet(*[(c.first, c.last) for c in l_ipv6])
r_ipv6_iset = _IntSet(*[(c.first, c.last) for c in r_ipv6])
ipv6_result = l_ipv6_iset & r_ipv6_iset
for start, end in list(ipv6_result._ranges):
cidrs = iprange_to_cidrs(IPAddress(start, 6), IPAddress(end-1, 6))
cidr_list.extend(cidrs)
return IPSet(cidr_list)
__and__ = intersection
def symmetric_difference(self, other):
"""
:param other: an IP set.
:return: the symmetric difference of this IP set and another as a new
IP set (all IP addresses and subnets that are in exactly one
of the sets).
"""
cidr_list = []
# Separate IPv4 from IPv6.
l_ipv4, l_ipv6 = partition_ips(self._cidrs)
r_ipv4, r_ipv6 = partition_ips(other._cidrs)
# Process IPv4.
l_ipv4_iset = _IntSet(*[(c.first, c.last) for c in l_ipv4])
r_ipv4_iset = _IntSet(*[(c.first, c.last) for c in r_ipv4])
ipv4_result = l_ipv4_iset ^ r_ipv4_iset
for start, end in list(ipv4_result._ranges):
cidrs = iprange_to_cidrs(IPAddress(start, 4), IPAddress(end-1, 4))
cidr_list.extend(cidrs)
# Process IPv6.
l_ipv6_iset = _IntSet(*[(c.first, c.last) for c in l_ipv6])
r_ipv6_iset = _IntSet(*[(c.first, c.last) for c in r_ipv6])
ipv6_result = l_ipv6_iset ^ r_ipv6_iset
for start, end in list(ipv6_result._ranges):
cidrs = iprange_to_cidrs(IPAddress(start, 6), IPAddress(end-1, 6))
cidr_list.extend(cidrs)
return IPSet(cidr_list)
__xor__ = symmetric_difference
def difference(self, other):
"""
:param other: an IP set.
:return: the difference between this IP set and another as a new IP
set (all IP addresses and subnets that are in this IP set but
not found in the other.)
"""
cidr_list = []
# Separate IPv4 from IPv6.
l_ipv4, l_ipv6 = partition_ips(self._cidrs)
r_ipv4, r_ipv6 = partition_ips(other._cidrs)
# Process IPv4.
l_ipv4_iset = _IntSet(*[(c.first, c.last) for c in l_ipv4])
r_ipv4_iset = _IntSet(*[(c.first, c.last) for c in r_ipv4])
ipv4_result = l_ipv4_iset - r_ipv4_iset
for start, end in list(ipv4_result._ranges):
cidrs = iprange_to_cidrs(IPAddress(start, 4), IPAddress(end-1, 4))
cidr_list.extend(cidrs)
# Process IPv6.
l_ipv6_iset = _IntSet(*[(c.first, c.last) for c in l_ipv6])
r_ipv6_iset = _IntSet(*[(c.first, c.last) for c in r_ipv6])
ipv6_result = l_ipv6_iset - r_ipv6_iset
for start, end in list(ipv6_result._ranges):
cidrs = iprange_to_cidrs(IPAddress(start, 6), IPAddress(end-1, 6))
cidr_list.extend(cidrs)
return IPSet(cidr_list)
__sub__ = difference
def __len__(self):
"""
:return: the cardinality of this IP set (i.e. sum of individual IP \
addresses). Raises ``IndexError`` if size > maxint (a Python \
limitation). Use the .size property for subnets of any size.
"""
size = self.size
if size > _sys.maxint:
raise IndexError("range contains greater than %d (maxint) " \
"IP addresses! Use the .size property instead." % _sys_maxint)
return size
@property
def size(self):
"""
The cardinality of this IP set (based on the number of individual IP
addresses including those implicitly defined in subnets).
"""
return sum([cidr.size for cidr in self._cidrs])
def __repr__(self):
""":return: Python statement to create an equivalent object"""
return 'IPSet(%r)' % [str(c) for c in sorted(self._cidrs)]
__str__ = __repr__
|
sriksrid/MatlabAMPL | refs/heads/master | NeosClient.py | 1 | #!/usr/bin/env python
import sys
import xmlrpclib
import time
#from config import Variables
if len(sys.argv) < 2 or len(sys.argv) > 3:
sys.stderr.write("Usage: NeosClient <xmlfilename | help | queue>\n")
sys.exit(1)
neos=xmlrpclib.Server("http://%s:%d" % ('www.neos-server.org', 3332))
if sys.argv[1] == "help":
sys.stdout.write("Help not yet available...\n")
elif sys.argv[1] == "queue":
msg = neos.printQueue()
sys.stdout.write(msg)
else:
xmlfile = open(sys.argv[1],"r")
xml=""
buffer=1
while buffer:
buffer = xmlfile.read()
xml+= buffer
xmlfile.close()
(jobNumber,password) = neos.submitJob(xml)
sys.stdout.write("jobNumber = %d\tpassword = %s\n" % (jobNumber,password))
offset=0
status="Waiting"
while status == "Running" or status=="Waiting":
time.sleep(1)
(msg,offset) = neos.getIntermediateResults(jobNumber,password,offset)
#sys.stdout.write(msg.data)
status = neos.getJobStatus(jobNumber, password)
msg = neos.getFinalResults(jobNumber, password).data
sys.stdout.write(msg)
|
dkodnik/Ant | refs/heads/master | addons/stock/report/stock_inventory_move_report.py | 63 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from openerp.report import report_sxw
class stock_inventory_move(report_sxw.rml_parse):
def __init__(self, cr, uid, name, context):
super(stock_inventory_move, self).__init__(cr, uid, name, context=context)
self.localcontext.update({
'time': time,
'qty_total':self._qty_total
})
def _qty_total(self, objects):
total = 0.0
uom = objects[0].product_uom.name
for obj in objects:
total += obj.product_qty
return {'quantity':total,'uom':uom}
report_sxw.report_sxw(
'report.stock.inventory.move',
'stock.inventory',
'addons/stock/report/stock_inventory_move.rml',
parser=stock_inventory_move,
header='internal'
)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
SEL-Columbia/commcare-hq | refs/heads/master | corehq/apps/hqcase/tests/test_bugs.py | 2 | import uuid
from dimagi.utils.parsing import json_format_datetime
from django.contrib.auth.models import User
from django.test import TestCase
from casexml.apps.case.mock import CaseBlock
from casexml.apps.case.models import CommCareCase
from casexml.apps.case.util import post_case_blocks
from casexml.apps.case.xml import V2
from casexml.apps.phone.restore import RestoreConfig
from corehq.apps.domain.shortcuts import create_domain
from corehq.apps.users.models import CommCareUser, CouchUser
from corehq.apps.users.util import format_username
class OtaRestoreBugTest(TestCase):
def setUp(self):
for user in CouchUser.all():
user.delete()
User.objects.all().delete()
def testCrossDomainAssignments(self):
good_domain = 'main-domain'
create_domain(good_domain)
bad_domain = 'bad-domain'
create_domain(bad_domain)
user = CommCareUser.create(good_domain, format_username('user', good_domain), 'secret')
def _submit_case(domain):
case_id = uuid.uuid4().hex
case_block = CaseBlock(
create=True,
case_id=case_id,
case_name='donald',
case_type='duck',
user_id=user._id,
owner_id=user._id,
version=V2,
).as_xml(format_datetime=json_format_datetime)
post_case_blocks([case_block], {'domain': domain})
return CommCareCase.get(case_id)
good_case = _submit_case(good_domain)
# create a case in the "wrong" domain
# in the future this should actually fail completely
bad_case = _submit_case(bad_domain)
self.assertEqual(good_domain, good_case.domain)
self.assertEqual(bad_domain, bad_case.domain)
for case in (good_case, bad_case):
self.assertEqual(user._id, case.user_id)
self.assertEqual(user._id, case.owner_id)
restore_config = RestoreConfig(
user.to_casexml_user(), version=V2,
)
payload = restore_config.get_payload()
self.assertTrue(good_case._id in payload)
self.assertFalse(bad_case._id in payload)
|
sainathreddy/muzei | refs/heads/master | web/lib/bs4/__init__.py | 417 | """Beautiful Soup
Elixir and Tonic
"The Screen-Scraper's Friend"
http://www.crummy.com/software/BeautifulSoup/
Beautiful Soup uses a pluggable XML or HTML parser to parse a
(possibly invalid) document into a tree representation. Beautiful Soup
provides provides methods and Pythonic idioms that make it easy to
navigate, search, and modify the parse tree.
Beautiful Soup works with Python 2.6 and up. It works better if lxml
and/or html5lib is installed.
For more than you ever wanted to know about Beautiful Soup, see the
documentation:
http://www.crummy.com/software/BeautifulSoup/bs4/doc/
"""
__author__ = "Leonard Richardson ([email protected])"
__version__ = "4.3.2"
__copyright__ = "Copyright (c) 2004-2013 Leonard Richardson"
__license__ = "MIT"
__all__ = ['BeautifulSoup']
import os
import re
import warnings
from .builder import builder_registry, ParserRejectedMarkup
from .dammit import UnicodeDammit
from .element import (
CData,
Comment,
DEFAULT_OUTPUT_ENCODING,
Declaration,
Doctype,
NavigableString,
PageElement,
ProcessingInstruction,
ResultSet,
SoupStrainer,
Tag,
)
# The very first thing we do is give a useful error if someone is
# running this code under Python 3 without converting it.
syntax_error = u'You are trying to run the Python 2 version of Beautiful Soup under Python 3. This will not work. You need to convert the code, either by installing it (`python setup.py install`) or by running 2to3 (`2to3 -w bs4`).'
class BeautifulSoup(Tag):
"""
This class defines the basic interface called by the tree builders.
These methods will be called by the parser:
reset()
feed(markup)
The tree builder may call these methods from its feed() implementation:
handle_starttag(name, attrs) # See note about return value
handle_endtag(name)
handle_data(data) # Appends to the current data node
endData(containerClass=NavigableString) # Ends the current data node
No matter how complicated the underlying parser is, you should be
able to build a tree using 'start tag' events, 'end tag' events,
'data' events, and "done with data" events.
If you encounter an empty-element tag (aka a self-closing tag,
like HTML's <br> tag), call handle_starttag and then
handle_endtag.
"""
ROOT_TAG_NAME = u'[document]'
# If the end-user gives no indication which tree builder they
# want, look for one with these features.
DEFAULT_BUILDER_FEATURES = ['html', 'fast']
ASCII_SPACES = '\x20\x0a\x09\x0c\x0d'
def __init__(self, markup="", features=None, builder=None,
parse_only=None, from_encoding=None, **kwargs):
"""The Soup object is initialized as the 'root tag', and the
provided markup (which can be a string or a file-like object)
is fed into the underlying parser."""
if 'convertEntities' in kwargs:
warnings.warn(
"BS4 does not respect the convertEntities argument to the "
"BeautifulSoup constructor. Entities are always converted "
"to Unicode characters.")
if 'markupMassage' in kwargs:
del kwargs['markupMassage']
warnings.warn(
"BS4 does not respect the markupMassage argument to the "
"BeautifulSoup constructor. The tree builder is responsible "
"for any necessary markup massage.")
if 'smartQuotesTo' in kwargs:
del kwargs['smartQuotesTo']
warnings.warn(
"BS4 does not respect the smartQuotesTo argument to the "
"BeautifulSoup constructor. Smart quotes are always converted "
"to Unicode characters.")
if 'selfClosingTags' in kwargs:
del kwargs['selfClosingTags']
warnings.warn(
"BS4 does not respect the selfClosingTags argument to the "
"BeautifulSoup constructor. The tree builder is responsible "
"for understanding self-closing tags.")
if 'isHTML' in kwargs:
del kwargs['isHTML']
warnings.warn(
"BS4 does not respect the isHTML argument to the "
"BeautifulSoup constructor. You can pass in features='html' "
"or features='xml' to get a builder capable of handling "
"one or the other.")
def deprecated_argument(old_name, new_name):
if old_name in kwargs:
warnings.warn(
'The "%s" argument to the BeautifulSoup constructor '
'has been renamed to "%s."' % (old_name, new_name))
value = kwargs[old_name]
del kwargs[old_name]
return value
return None
parse_only = parse_only or deprecated_argument(
"parseOnlyThese", "parse_only")
from_encoding = from_encoding or deprecated_argument(
"fromEncoding", "from_encoding")
if len(kwargs) > 0:
arg = kwargs.keys().pop()
raise TypeError(
"__init__() got an unexpected keyword argument '%s'" % arg)
if builder is None:
if isinstance(features, basestring):
features = [features]
if features is None or len(features) == 0:
features = self.DEFAULT_BUILDER_FEATURES
builder_class = builder_registry.lookup(*features)
if builder_class is None:
raise FeatureNotFound(
"Couldn't find a tree builder with the features you "
"requested: %s. Do you need to install a parser library?"
% ",".join(features))
builder = builder_class()
self.builder = builder
self.is_xml = builder.is_xml
self.builder.soup = self
self.parse_only = parse_only
if hasattr(markup, 'read'): # It's a file-type object.
markup = markup.read()
elif len(markup) <= 256:
# Print out warnings for a couple beginner problems
# involving passing non-markup to Beautiful Soup.
# Beautiful Soup will still parse the input as markup,
# just in case that's what the user really wants.
if (isinstance(markup, unicode)
and not os.path.supports_unicode_filenames):
possible_filename = markup.encode("utf8")
else:
possible_filename = markup
is_file = False
try:
is_file = os.path.exists(possible_filename)
except Exception, e:
# This is almost certainly a problem involving
# characters not valid in filenames on this
# system. Just let it go.
pass
if is_file:
warnings.warn(
'"%s" looks like a filename, not markup. You should probably open this file and pass the filehandle into Beautiful Soup.' % markup)
if markup[:5] == "http:" or markup[:6] == "https:":
# TODO: This is ugly but I couldn't get it to work in
# Python 3 otherwise.
if ((isinstance(markup, bytes) and not b' ' in markup)
or (isinstance(markup, unicode) and not u' ' in markup)):
warnings.warn(
'"%s" looks like a URL. Beautiful Soup is not an HTTP client. You should probably use an HTTP client to get the document behind the URL, and feed that document to Beautiful Soup.' % markup)
for (self.markup, self.original_encoding, self.declared_html_encoding,
self.contains_replacement_characters) in (
self.builder.prepare_markup(markup, from_encoding)):
self.reset()
try:
self._feed()
break
except ParserRejectedMarkup:
pass
# Clear out the markup and remove the builder's circular
# reference to this object.
self.markup = None
self.builder.soup = None
def _feed(self):
# Convert the document to Unicode.
self.builder.reset()
self.builder.feed(self.markup)
# Close out any unfinished strings and close all the open tags.
self.endData()
while self.currentTag.name != self.ROOT_TAG_NAME:
self.popTag()
def reset(self):
Tag.__init__(self, self, self.builder, self.ROOT_TAG_NAME)
self.hidden = 1
self.builder.reset()
self.current_data = []
self.currentTag = None
self.tagStack = []
self.preserve_whitespace_tag_stack = []
self.pushTag(self)
def new_tag(self, name, namespace=None, nsprefix=None, **attrs):
"""Create a new tag associated with this soup."""
return Tag(None, self.builder, name, namespace, nsprefix, attrs)
def new_string(self, s, subclass=NavigableString):
"""Create a new NavigableString associated with this soup."""
navigable = subclass(s)
navigable.setup()
return navigable
def insert_before(self, successor):
raise NotImplementedError("BeautifulSoup objects don't support insert_before().")
def insert_after(self, successor):
raise NotImplementedError("BeautifulSoup objects don't support insert_after().")
def popTag(self):
tag = self.tagStack.pop()
if self.preserve_whitespace_tag_stack and tag == self.preserve_whitespace_tag_stack[-1]:
self.preserve_whitespace_tag_stack.pop()
#print "Pop", tag.name
if self.tagStack:
self.currentTag = self.tagStack[-1]
return self.currentTag
def pushTag(self, tag):
#print "Push", tag.name
if self.currentTag:
self.currentTag.contents.append(tag)
self.tagStack.append(tag)
self.currentTag = self.tagStack[-1]
if tag.name in self.builder.preserve_whitespace_tags:
self.preserve_whitespace_tag_stack.append(tag)
def endData(self, containerClass=NavigableString):
if self.current_data:
current_data = u''.join(self.current_data)
# If whitespace is not preserved, and this string contains
# nothing but ASCII spaces, replace it with a single space
# or newline.
if not self.preserve_whitespace_tag_stack:
strippable = True
for i in current_data:
if i not in self.ASCII_SPACES:
strippable = False
break
if strippable:
if '\n' in current_data:
current_data = '\n'
else:
current_data = ' '
# Reset the data collector.
self.current_data = []
# Should we add this string to the tree at all?
if self.parse_only and len(self.tagStack) <= 1 and \
(not self.parse_only.text or \
not self.parse_only.search(current_data)):
return
o = containerClass(current_data)
self.object_was_parsed(o)
def object_was_parsed(self, o, parent=None, most_recent_element=None):
"""Add an object to the parse tree."""
parent = parent or self.currentTag
most_recent_element = most_recent_element or self._most_recent_element
o.setup(parent, most_recent_element)
if most_recent_element is not None:
most_recent_element.next_element = o
self._most_recent_element = o
parent.contents.append(o)
def _popToTag(self, name, nsprefix=None, inclusivePop=True):
"""Pops the tag stack up to and including the most recent
instance of the given tag. If inclusivePop is false, pops the tag
stack up to but *not* including the most recent instqance of
the given tag."""
#print "Popping to %s" % name
if name == self.ROOT_TAG_NAME:
# The BeautifulSoup object itself can never be popped.
return
most_recently_popped = None
stack_size = len(self.tagStack)
for i in range(stack_size - 1, 0, -1):
t = self.tagStack[i]
if (name == t.name and nsprefix == t.prefix):
if inclusivePop:
most_recently_popped = self.popTag()
break
most_recently_popped = self.popTag()
return most_recently_popped
def handle_starttag(self, name, namespace, nsprefix, attrs):
"""Push a start tag on to the stack.
If this method returns None, the tag was rejected by the
SoupStrainer. You should proceed as if the tag had not occured
in the document. For instance, if this was a self-closing tag,
don't call handle_endtag.
"""
# print "Start tag %s: %s" % (name, attrs)
self.endData()
if (self.parse_only and len(self.tagStack) <= 1
and (self.parse_only.text
or not self.parse_only.search_tag(name, attrs))):
return None
tag = Tag(self, self.builder, name, namespace, nsprefix, attrs,
self.currentTag, self._most_recent_element)
if tag is None:
return tag
if self._most_recent_element:
self._most_recent_element.next_element = tag
self._most_recent_element = tag
self.pushTag(tag)
return tag
def handle_endtag(self, name, nsprefix=None):
#print "End tag: " + name
self.endData()
self._popToTag(name, nsprefix)
def handle_data(self, data):
self.current_data.append(data)
def decode(self, pretty_print=False,
eventual_encoding=DEFAULT_OUTPUT_ENCODING,
formatter="minimal"):
"""Returns a string or Unicode representation of this document.
To get Unicode, pass None for encoding."""
if self.is_xml:
# Print the XML declaration
encoding_part = ''
if eventual_encoding != None:
encoding_part = ' encoding="%s"' % eventual_encoding
prefix = u'<?xml version="1.0"%s?>\n' % encoding_part
else:
prefix = u''
if not pretty_print:
indent_level = None
else:
indent_level = 0
return prefix + super(BeautifulSoup, self).decode(
indent_level, eventual_encoding, formatter)
# Alias to make it easier to type import: 'from bs4 import _soup'
_s = BeautifulSoup
_soup = BeautifulSoup
class BeautifulStoneSoup(BeautifulSoup):
"""Deprecated interface to an XML parser."""
def __init__(self, *args, **kwargs):
kwargs['features'] = 'xml'
warnings.warn(
'The BeautifulStoneSoup class is deprecated. Instead of using '
'it, pass features="xml" into the BeautifulSoup constructor.')
super(BeautifulStoneSoup, self).__init__(*args, **kwargs)
class StopParsing(Exception):
pass
class FeatureNotFound(ValueError):
pass
#By default, act as an HTML pretty-printer.
if __name__ == '__main__':
import sys
soup = BeautifulSoup(sys.stdin)
print soup.prettify()
|
bowang/tensorflow | refs/heads/master | tensorflow/python/kernel_tests/sparse_cross_op_test.py | 72 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for sparse_cross_op."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy
from tensorflow.python.client import session
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.ops import sparse_ops
from tensorflow.python.platform import test
class SparseCrossOpTest(test.TestCase):
def test_simple(self):
"""Tests a simple scenario."""
op = sparse_ops._sparse_cross([
self._sparse_tensor([['batch1-FC1-F1'],
['batch2-FC1-F1', 'batch2-FC1-F2']]),
self._sparse_tensor([['batch1-FC2-F1'],
['batch2-FC2-F1', 'batch2-FC2-F2']])
])
expected_out = self._sparse_tensor([['batch1-FC1-F1_X_batch1-FC2-F1'], [
'batch2-FC1-F1_X_batch2-FC2-F1', 'batch2-FC1-F1_X_batch2-FC2-F2',
'batch2-FC1-F2_X_batch2-FC2-F1', 'batch2-FC1-F2_X_batch2-FC2-F2'
]])
with self.test_session() as sess:
self._assert_sparse_tensor_equals(expected_out, sess.run(op))
def test_dense(self):
"""Tests only dense inputs."""
op = sparse_ops._sparse_cross([
constant_op.constant([['batch1-FC1-F1', 'batch1-FC1-F2'],
['batch2-FC1-F1', 'batch2-FC1-F2']],
dtypes.string),
constant_op.constant([['batch1-FC2-F1', 'batch1-FC2-F2'],
['batch2-FC2-F1', 'batch2-FC2-F2']],
dtypes.string),
])
expected_out = self._sparse_tensor([[
'batch1-FC1-F1_X_batch1-FC2-F1', 'batch1-FC1-F1_X_batch1-FC2-F2',
'batch1-FC1-F2_X_batch1-FC2-F1', 'batch1-FC1-F2_X_batch1-FC2-F2'
], [
'batch2-FC1-F1_X_batch2-FC2-F1', 'batch2-FC1-F1_X_batch2-FC2-F2',
'batch2-FC1-F2_X_batch2-FC2-F1', 'batch2-FC1-F2_X_batch2-FC2-F2'
]])
with self.test_session() as sess:
self._assert_sparse_tensor_equals(expected_out, sess.run(op))
def test_integer_mixed_string_sparse(self):
"""Tests mixed type."""
op = sparse_ops._sparse_cross([
self._sparse_tensor([[11], [333, 55555]]),
self._sparse_tensor([['batch1-FC2-F1'],
['batch2-FC2-F1', 'batch2-FC2-F2']])
])
expected_out = self._sparse_tensor([['11_X_batch1-FC2-F1'], [
'333_X_batch2-FC2-F1', '333_X_batch2-FC2-F2', '55555_X_batch2-FC2-F1',
'55555_X_batch2-FC2-F2'
]])
with self.test_session() as sess:
self._assert_sparse_tensor_equals(expected_out, sess.run(op))
def test_integer_mixed_string_dense(self):
"""Tests mixed dense inputs."""
op = sparse_ops._sparse_cross([
constant_op.constant([[11, 333], [55555, 999999]], dtypes.int64),
constant_op.constant([['batch1-FC2-F1', 'batch1-FC2-F2'],
['batch2-FC2-F1', 'batch2-FC2-F2']],
dtypes.string),
])
expected_out = self._sparse_tensor([[
'11_X_batch1-FC2-F1', '11_X_batch1-FC2-F2', '333_X_batch1-FC2-F1',
'333_X_batch1-FC2-F2'
], [
'55555_X_batch2-FC2-F1', '55555_X_batch2-FC2-F2',
'999999_X_batch2-FC2-F1', '999999_X_batch2-FC2-F2'
]])
with self.test_session() as sess:
self._assert_sparse_tensor_equals(expected_out, sess.run(op))
def test_sparse_cross_dense(self):
"""Tests sparse and dense inputs."""
op = sparse_ops._sparse_cross([
self._sparse_tensor([['batch1-FC1-F1'],
['batch2-FC1-F1', 'batch2-FC1-F2']]),
constant_op.constant([['batch1-FC2-F1', 'batch1-FC2-F2'],
['batch2-FC2-F1', 'batch2-FC2-F2']],
dtypes.string),
])
expected_out = self._sparse_tensor(
[['batch1-FC1-F1_X_batch1-FC2-F1', 'batch1-FC1-F1_X_batch1-FC2-F2'], [
'batch2-FC1-F1_X_batch2-FC2-F1', 'batch2-FC1-F1_X_batch2-FC2-F2',
'batch2-FC1-F2_X_batch2-FC2-F1', 'batch2-FC1-F2_X_batch2-FC2-F2'
]])
with self.test_session() as sess:
self._assert_sparse_tensor_equals(expected_out, sess.run(op))
def test_integer_sparse_input(self):
"""Tests mixed type sparse and dense inputs."""
op = sparse_ops._sparse_cross([
self._sparse_tensor([[11], [333, 5555]]),
constant_op.constant([['batch1-FC2-F1', 'batch1-FC2-F2'],
['batch2-FC2-F1', 'batch2-FC2-F2']],
dtypes.string),
])
expected_out = self._sparse_tensor(
[['11_X_batch1-FC2-F1', '11_X_batch1-FC2-F2'], [
'333_X_batch2-FC2-F1', '333_X_batch2-FC2-F2',
'5555_X_batch2-FC2-F1', '5555_X_batch2-FC2-F2'
]])
with self.test_session() as sess:
self._assert_sparse_tensor_equals(expected_out, sess.run(op))
def test_permutation_3x3x3(self):
"""Tests 3x3x3 permutation."""
op = sparse_ops._sparse_cross([
self._sparse_tensor(
[['batch1-FC1-F1', 'batch1-FC1-F2', 'batch1-FC1-F3']]),
self._sparse_tensor(
[['batch1-FC2-F1', 'batch1-FC2-F2', 'batch1-FC2-F3']]),
self._sparse_tensor(
[['batch1-FC3-F1', 'batch1-FC3-F2', 'batch1-FC3-F3']])
])
expected_out = self._sparse_tensor([[
'batch1-FC1-F1_X_batch1-FC2-F1_X_batch1-FC3-F1',
'batch1-FC1-F1_X_batch1-FC2-F1_X_batch1-FC3-F2',
'batch1-FC1-F1_X_batch1-FC2-F1_X_batch1-FC3-F3',
'batch1-FC1-F1_X_batch1-FC2-F2_X_batch1-FC3-F1',
'batch1-FC1-F1_X_batch1-FC2-F2_X_batch1-FC3-F2',
'batch1-FC1-F1_X_batch1-FC2-F2_X_batch1-FC3-F3',
'batch1-FC1-F1_X_batch1-FC2-F3_X_batch1-FC3-F1',
'batch1-FC1-F1_X_batch1-FC2-F3_X_batch1-FC3-F2',
'batch1-FC1-F1_X_batch1-FC2-F3_X_batch1-FC3-F3',
'batch1-FC1-F2_X_batch1-FC2-F1_X_batch1-FC3-F1',
'batch1-FC1-F2_X_batch1-FC2-F1_X_batch1-FC3-F2',
'batch1-FC1-F2_X_batch1-FC2-F1_X_batch1-FC3-F3',
'batch1-FC1-F2_X_batch1-FC2-F2_X_batch1-FC3-F1',
'batch1-FC1-F2_X_batch1-FC2-F2_X_batch1-FC3-F2',
'batch1-FC1-F2_X_batch1-FC2-F2_X_batch1-FC3-F3',
'batch1-FC1-F2_X_batch1-FC2-F3_X_batch1-FC3-F1',
'batch1-FC1-F2_X_batch1-FC2-F3_X_batch1-FC3-F2',
'batch1-FC1-F2_X_batch1-FC2-F3_X_batch1-FC3-F3',
'batch1-FC1-F3_X_batch1-FC2-F1_X_batch1-FC3-F1',
'batch1-FC1-F3_X_batch1-FC2-F1_X_batch1-FC3-F2',
'batch1-FC1-F3_X_batch1-FC2-F1_X_batch1-FC3-F3',
'batch1-FC1-F3_X_batch1-FC2-F2_X_batch1-FC3-F1',
'batch1-FC1-F3_X_batch1-FC2-F2_X_batch1-FC3-F2',
'batch1-FC1-F3_X_batch1-FC2-F2_X_batch1-FC3-F3',
'batch1-FC1-F3_X_batch1-FC2-F3_X_batch1-FC3-F1',
'batch1-FC1-F3_X_batch1-FC2-F3_X_batch1-FC3-F2',
'batch1-FC1-F3_X_batch1-FC2-F3_X_batch1-FC3-F3'
]])
with self.test_session() as sess:
self._assert_sparse_tensor_equals(expected_out, sess.run(op))
def test_permutation_3x1x2(self):
"""Tests 3x1x2 permutation."""
op = sparse_ops._sparse_cross([
self._sparse_tensor(
[['batch1-FC1-F1', 'batch1-FC1-F2', 'batch1-FC1-F3']]),
self._sparse_tensor([['batch1-FC2-F1']]),
self._sparse_tensor([['batch1-FC3-F1', 'batch1-FC3-F2']])
])
expected_out = self._sparse_tensor([[
'batch1-FC1-F1_X_batch1-FC2-F1_X_batch1-FC3-F1',
'batch1-FC1-F1_X_batch1-FC2-F1_X_batch1-FC3-F2',
'batch1-FC1-F2_X_batch1-FC2-F1_X_batch1-FC3-F1',
'batch1-FC1-F2_X_batch1-FC2-F1_X_batch1-FC3-F2',
'batch1-FC1-F3_X_batch1-FC2-F1_X_batch1-FC3-F1',
'batch1-FC1-F3_X_batch1-FC2-F1_X_batch1-FC3-F2'
]])
with self.test_session() as sess:
self._assert_sparse_tensor_equals(expected_out, sess.run(op))
def test_large_batch(self):
"""Tests with large batch size to force multithreading."""
batch_size = 5000
col1 = []
col2 = []
col3 = []
for b in range(batch_size):
col1.append(
['batch%d-FC1-F1' % b, 'batch%d-FC1-F2' % b, 'batch%d-FC1-F3' % b])
col2.append(['batch%d-FC2-F1' % b])
col3.append(['batch%d-FC3-F1' % b, 'batch%d-FC3-F2' % b])
op = sparse_ops._sparse_cross([
self._sparse_tensor(col1), self._sparse_tensor(col2),
self._sparse_tensor(col3)
])
col_out = []
for b in range(batch_size):
col_out.append([
'batch%d-FC1-F1_X_batch%d-FC2-F1_X_batch%d-FC3-F1' % (b, b, b),
'batch%d-FC1-F1_X_batch%d-FC2-F1_X_batch%d-FC3-F2' % (b, b, b),
'batch%d-FC1-F2_X_batch%d-FC2-F1_X_batch%d-FC3-F1' % (b, b, b),
'batch%d-FC1-F2_X_batch%d-FC2-F1_X_batch%d-FC3-F2' % (b, b, b),
'batch%d-FC1-F3_X_batch%d-FC2-F1_X_batch%d-FC3-F1' % (b, b, b),
'batch%d-FC1-F3_X_batch%d-FC2-F1_X_batch%d-FC3-F2' % (b, b, b)
])
expected_out = self._sparse_tensor(col_out)
with self.test_session() as sess:
self._assert_sparse_tensor_equals(expected_out, sess.run(op))
def test_one_column_empty(self):
"""Tests when one column is empty.
The crossed tensor should be empty.
"""
op = sparse_ops._sparse_cross([
self._sparse_tensor([['batch1-FC1-F1', 'batch1-FC1-F2']]),
self._sparse_tensor([], 1),
self._sparse_tensor([['batch1-FC3-F1', 'batch1-FC3-F2']])
])
with self.test_session() as sess:
self._assert_sparse_tensor_empty(sess.run(op))
def test_some_columns_empty(self):
"""Tests when more than one columns are empty.
Cross for the corresponding batch should be empty.
"""
op = sparse_ops._sparse_cross([
self._sparse_tensor([['batch1-FC1-F1', 'batch1-FC1-F2']], 2),
self._sparse_tensor([['batch1-FC2-F1'], ['batch2-FC2-F1']], 2),
self._sparse_tensor([['batch1-FC3-F1', 'batch1-FC3-F2']], 2)
])
expected_out = self._sparse_tensor([[
'batch1-FC1-F1_X_batch1-FC2-F1_X_batch1-FC3-F1',
'batch1-FC1-F1_X_batch1-FC2-F1_X_batch1-FC3-F2',
'batch1-FC1-F2_X_batch1-FC2-F1_X_batch1-FC3-F1',
'batch1-FC1-F2_X_batch1-FC2-F1_X_batch1-FC3-F2'
]], 2)
with self.test_session() as sess:
self._assert_sparse_tensor_equals(expected_out, sess.run(op))
def test_all_columns_empty(self):
"""Tests when all columns are empty.
The crossed tensor should be empty.
"""
op = sparse_ops._sparse_cross([
self._sparse_tensor([]), self._sparse_tensor([]),
self._sparse_tensor([])
])
with self.test_session() as sess:
self._assert_sparse_tensor_empty(sess.run(op))
def test_hashed_zero_bucket_no_hash_key(self):
op = sparse_ops._sparse_cross_hashed(
[
self._sparse_tensor([['batch1-FC1-F1']]),
self._sparse_tensor([['batch1-FC2-F1']]),
self._sparse_tensor([['batch1-FC3-F1']])
])
# Check actual hashed output to prevent unintentional hashing changes.
expected_out = self._sparse_tensor([[1971693436396284976]])
with self.test_session() as sess:
self._assert_sparse_tensor_equals(expected_out, sess.run(op))
def test_hashed_zero_bucket(self):
op = sparse_ops._sparse_cross_hashed(
[
self._sparse_tensor([['batch1-FC1-F1']]),
self._sparse_tensor([['batch1-FC2-F1']]),
self._sparse_tensor([['batch1-FC3-F1']])
],
hash_key=sparse_ops._DEFAULT_HASH_KEY + 1)
# Check actual hashed output to prevent unintentional hashing changes.
expected_out = self._sparse_tensor([[4847552627144134031]])
with self.test_session() as sess:
self._assert_sparse_tensor_equals(expected_out, sess.run(op))
# TODO(sibyl-Aix6ihai): Add benchmark to compare Hashed vs Non-hashed.
def test_hashed_no_hash_key(self):
op = sparse_ops._sparse_cross_hashed(
[
self._sparse_tensor([['batch1-FC1-F1']]),
self._sparse_tensor([['batch1-FC2-F1']]),
self._sparse_tensor([['batch1-FC3-F1']])
],
num_buckets=100)
# Check actual hashed output to prevent unintentional hashing changes.
expected_out = self._sparse_tensor([[83]])
with self.test_session() as sess:
self._assert_sparse_tensor_equals(expected_out, sess.run(op))
def test_hashed_output(self):
op = sparse_ops._sparse_cross_hashed(
[
self._sparse_tensor([['batch1-FC1-F1']]),
self._sparse_tensor([['batch1-FC2-F1']]),
self._sparse_tensor([['batch1-FC3-F1']])
],
num_buckets=100,
hash_key=sparse_ops._DEFAULT_HASH_KEY + 1)
# Check actual hashed output to prevent unintentional hashing changes.
expected_out = self._sparse_tensor([[31]])
with self.test_session() as sess:
self._assert_sparse_tensor_equals(expected_out, sess.run(op))
def test_hashed__has_no_collision(self):
"""Tests that fingerprint concatenation has no collisions."""
# Although the last 10 bits of 359 and 1024+359 are identical.
# As a result, all the crosses shouldn't collide.
t1 = constant_op.constant([[359], [359 + 1024]])
t2 = constant_op.constant([list(range(10)), list(range(10))])
cross = sparse_ops._sparse_cross_hashed(
[t2, t1],
num_buckets=1024,
hash_key=sparse_ops._DEFAULT_HASH_KEY + 1)
cross_dense = sparse_ops.sparse_tensor_to_dense(cross)
with session.Session():
values = cross_dense.eval()
self.assertTrue(numpy.not_equal(values[0], values[1]).all())
def test_hashed_3x1x2(self):
"""Tests 3x1x2 permutation with hashed output."""
op = sparse_ops._sparse_cross_hashed(
[
self._sparse_tensor(
[['batch1-FC1-F1', 'batch1-FC1-F2', 'batch1-FC1-F3']]),
self._sparse_tensor([['batch1-FC2-F1']]),
self._sparse_tensor([['batch1-FC3-F1', 'batch1-FC3-F2']])
],
num_buckets=1000)
with self.test_session() as sess:
out = sess.run(op)
self.assertEqual(6, len(out.values))
self.assertAllEqual([[0, i] for i in range(6)], out.indices)
self.assertTrue(all(x < 1000 and x >= 0 for x in out.values))
all_values_are_different = len(out.values) == len(set(out.values))
self.assertTrue(all_values_are_different)
def _assert_sparse_tensor_empty(self, sp):
self.assertEquals(0, sp.indices.size)
self.assertEquals(0, sp.values.size)
# TODO(zakaria): check if we can ignore the first dim of the shape.
self.assertEquals(0, sp.dense_shape[1])
def _assert_sparse_tensor_equals(self, sp1, sp2):
self.assertAllEqual(sp1.indices.eval(), sp2.indices)
self.assertAllEqual(sp1.values.eval(), sp2.values)
self.assertAllEqual(sp1.dense_shape.eval(), sp2.dense_shape)
def _sparse_tensor(self, data, batch_size=-1):
"""Generates a SparseTensor.
Args:
data: Should be a list of list of strings or int64. Each item of the outer
list represents a batch. Each item of the batch is a feature of a
specific feature column.
batch_size: optional batch size, especially for cases when data has no
entry for some batches.
Returns:
A SparseTensor.
"""
indices = []
values = []
max_col_count = 0
for batch, batch_ix in zip(data, range(len(data))):
for column, column_ix in zip(batch, range(len(batch))):
indices.append([batch_ix, column_ix])
values.append(column)
max_col_count = max(max_col_count, column_ix + 1)
shape = [batch_size if batch_size != -1 else len(data), max_col_count]
value_type = (dtypes.string if not values or isinstance(values[0], str) else
dtypes.int64)
return sparse_tensor.SparseTensor(
constant_op.constant(indices, dtypes.int64, [len(indices), 2]),
constant_op.constant(values, value_type, [len(indices)]),
constant_op.constant(shape, dtypes.int64))
if __name__ == '__main__':
test.main()
|
account-login/dnsagent | refs/heads/master | dnsagent/tests/test_app.py | 1 | from twisted.internet import defer
from dnsagent.app import App
from dnsagent.resolver import ExtendedResolver
from dnsagent.server import ExtendedDNSServerFactory
from dnsagent.tests import iplist, FakeResolver, BaseTestResolver
class TestApp(BaseTestResolver):
server_addr = ('127.0.2.2', 5300)
def setUp(self):
super().setUp()
self.apps = []
@defer.inlineCallbacks
def tearDown(self):
try:
return (yield super().tearDown())
finally:
yield defer.DeferredList(
[app.stop() for app in self.apps], fireOnOneErrback=True,
)
def set_resolver(self, resolver):
server = ExtendedDNSServerFactory(resolver=resolver)
app = App()
self.apps.append(app)
app.start((server, [self.server_addr]))
self.resolver = ExtendedResolver(servers=[self.server_addr])
def test_basic(self):
fake_resolver = FakeResolver()
fake_resolver.set_answer('asdf', '1.1.1.1')
self.set_resolver(fake_resolver)
self.check_a('asdf', iplist('1.1.1.1'))
self.check_a('asdfasdf', fail=True)
del BaseTestResolver
|
jdugge/QGIS | refs/heads/master | tests/src/python/test_syntactic_sugar.py | 45 | # -*- coding: utf-8 -*-
"""QGIS Unit tests for some syntactic sugar in python
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = 'Matthias Kuhn'
__date__ = '12.8.2015'
__copyright__ = 'Copyright 2015, The QGIS Project'
import qgis # NOQA
from qgis.testing import unittest, start_app
from qgis.core import (edit,
QgsFeature,
QgsVectorLayer,
QgsEditError
)
start_app()
class TestSyntacticSugar(unittest.TestCase):
def testEdit(self):
"""Test `with edit(layer):` code"""
ml = QgsVectorLayer("Point?crs=epsg:4236&field=id:integer&field=value:double",
"test_data", "memory")
# Data as list of x, y, id, value
self.assertTrue(ml.isValid())
fields = ml.fields()
# Check insert
with edit(ml):
feat = QgsFeature(fields)
feat['id'] = 1
feat['value'] = 0.9
self.assertTrue(ml.addFeature(feat))
self.assertEqual(next(ml.dataProvider().getFeatures())['value'], 0.9)
# Check update
with edit(ml):
f = next(ml.getFeatures())
f['value'] = 9.9
self.assertTrue(ml.updateFeature(f))
self.assertEqual(next(ml.dataProvider().getFeatures())['value'], 9.9)
# Check for rollBack after exceptions
with self.assertRaises(NameError):
with edit(ml):
f = next(ml.getFeatures())
f['value'] = 3.8
crashycrash() # NOQA
self.assertEqual(next(ml.dataProvider().getFeatures())['value'], 9.9)
self.assertEqual(next(ml.getFeatures())['value'], 9.9)
# Check for `as`
with edit(ml) as l:
f = next(l.getFeatures())
f['value'] = 10
self.assertTrue(l.updateFeature(f))
self.assertEqual(next(ml.dataProvider().getFeatures())['value'], 10)
# Check that we get a QgsEditError exception when the commit fails
with self.assertRaises(QgsEditError):
with edit(ml) as l:
l.rollBack()
if __name__ == "__main__":
unittest.main()
|
tbarbugli/sentry_fork | refs/heads/master | sentry/utils/__init__.py | 2 | import hmac
import logging
try:
import pkg_resources
except ImportError:
pkg_resources = None
import sys
import uuid
from pprint import pformat
from types import ClassType, TypeType
import django
from django.conf import settings as django_settings
from django.utils.encoding import force_unicode
from django.utils.functional import Promise
from django.utils.hashcompat import md5_constructor, sha_constructor
import sentry
from sentry.conf import settings
_FILTER_CACHE = None
def get_filters():
global _FILTER_CACHE
if _FILTER_CACHE is None:
filters = []
for filter_ in settings.FILTERS:
if filter_.endswith('sentry.filters.SearchFilter'):
continue
module_name, class_name = filter_.rsplit('.', 1)
try:
module = __import__(module_name, {}, {}, class_name)
filter_ = getattr(module, class_name)
except Exception:
logger = logging.getLogger('sentry.errors')
logger.exception('Unable to import %s' % (filter_,))
continue
filters.append(filter_)
_FILTER_CACHE = filters
for f in _FILTER_CACHE:
yield f
def get_db_engine(alias='default'):
has_multidb = django.VERSION >= (1, 2)
if has_multidb:
value = django_settings.DATABASES[alias]['ENGINE']
else:
assert alias == 'default', 'You cannot fetch a database engine other than the default on Django < 1.2'
value = django_settings.DATABASE_ENGINE
return value.rsplit('.', 1)[-1]
def construct_checksum(level=logging.ERROR, class_name='', traceback='', message='', **kwargs):
checksum = md5_constructor(str(level))
checksum.update(class_name or '')
if traceback:
traceback = '\n'.join(traceback.split('\n')[:-3])
message = traceback or message
if isinstance(message, unicode):
message = message.encode('utf-8', 'replace')
checksum.update(message)
return checksum.hexdigest()
def varmap(func, var, context=None):
if context is None:
context = {}
objid = id(var)
if objid in context:
return func('<...>')
context[objid] = 1
if isinstance(var, dict):
ret = dict((k, varmap(func, v, context)) for k, v in var.iteritems())
elif isinstance(var, (list, tuple)):
ret = [varmap(func, f, context) for f in var]
else:
ret = func(var)
del context[objid]
return ret
def has_sentry_metadata(value):
try:
return callable(value.__getattribute__("__sentry__"))
except:
return False
def transform(value, stack=[], context=None):
# TODO: make this extendable
if context is None:
context = {}
objid = id(value)
if objid in context:
return '<...>'
context[objid] = 1
transform_rec = lambda o: transform(o, stack + [value], context)
if any(value is s for s in stack):
ret = 'cycle'
elif isinstance(value, (tuple, list, set, frozenset)):
try:
ret = type(value)(transform_rec(o) for o in value)
except TypeError, te:
# We may be dealing with a namedtuple
ret = type(value)(transform_rec(o) for o in value[:])
elif isinstance(value, uuid.UUID):
ret = repr(value)
elif isinstance(value, dict):
ret = dict((k, transform_rec(v)) for k, v in value.iteritems())
elif isinstance(value, unicode):
ret = to_unicode(value)
elif isinstance(value, str):
try:
ret = str(value.decode('utf-8').encode('utf-8'))
except:
ret = to_unicode(value)
elif not isinstance(value, (ClassType, TypeType)) and \
has_sentry_metadata(value):
ret = transform_rec(value.__sentry__())
elif isinstance(value, Promise):
# EPIC HACK
# handles lazy model instances (which are proxy values that dont easily give you the actual function)
pre = value.__class__.__name__[1:]
value = getattr(value, '%s__func' % pre)(*getattr(value, '%s__args' % pre), **getattr(value, '%s__kw' % pre))
return transform(value)
elif not isinstance(value, (int, bool)) and value is not None:
try:
ret = transform(repr(value))
except:
# It's common case that a model's __unicode__ definition may try to query the database
# which if it was not cleaned up correctly, would hit a transaction aborted exception
ret = u'<BadRepr: %s>' % type(value)
else:
ret = value
del context[objid]
return ret
def to_unicode(value):
try:
value = unicode(force_unicode(value))
except (UnicodeEncodeError, UnicodeDecodeError):
value = '(Error decoding value)'
except Exception: # in some cases we get a different exception
try:
value = str(repr(type(value)))
except Exception:
value = '(Error decoding value)'
return value
def get_installed_apps():
"""
Generate a list of modules in settings.INSTALLED_APPS.
"""
out = set()
for app in django_settings.INSTALLED_APPS:
out.add(app)
return out
class _Missing(object):
def __repr__(self):
return 'no value'
def __reduce__(self):
return '_missing'
_missing = _Missing()
class cached_property(object):
# This is borrowed from werkzeug : http://bytebucket.org/mitsuhiko/werkzeug-main
"""A decorator that converts a function into a lazy property. The
function wrapped is called the first time to retrieve the result
and then that calculated result is used the next time you access
the value::
class Foo(object):
@cached_property
def foo(self):
# calculate something important here
return 42
The class has to have a `__dict__` in order for this property to
work.
.. versionchanged:: 0.6
the `writeable` attribute and parameter was deprecated. If a
cached property is writeable or not has to be documented now.
For performance reasons the implementation does not honor the
writeable setting and will always make the property writeable.
"""
# implementation detail: this property is implemented as non-data
# descriptor. non-data descriptors are only invoked if there is
# no entry with the same name in the instance's __dict__.
# this allows us to completely get rid of the access function call
# overhead. If one choses to invoke __get__ by hand the property
# will still work as expected because the lookup logic is replicated
# in __get__ for manual invocation.
def __init__(self, func, name=None, doc=None, writeable=False):
if writeable:
from warnings import warn
warn(DeprecationWarning('the writeable argument to the '
'cached property is a noop since 0.6 '
'because the property is writeable '
'by default for performance reasons'))
self.__name__ = name or func.__name__
self.__module__ = func.__module__
self.__doc__ = doc or func.__doc__
self.func = func
def __get__(self, obj, type=None):
if obj is None:
return self
value = obj.__dict__.get(self.__name__, _missing)
if value is _missing:
value = self.func(obj)
obj.__dict__[self.__name__] = value
return value
def get_versions(module_list=None):
if not module_list:
module_list = django_settings.INSTALLED_APPS + ['django']
ext_module_list = set()
for m in module_list:
parts = m.split('.')
ext_module_list.update('.'.join(parts[:idx]) for idx in xrange(1, len(parts)+1))
versions = {}
for module_name in ext_module_list:
__import__(module_name)
app = sys.modules[module_name]
if hasattr(app, 'get_version'):
get_version = app.get_version
if callable(get_version):
version = get_version()
else:
version = get_version
elif hasattr(app, 'VERSION'):
version = app.VERSION
elif hasattr(app, '__version__'):
version = app.__version__
elif pkg_resources:
# pull version from pkg_resources if distro exists
try:
version = pkg_resources.get_distribution(module_name).version
except pkg_resources.DistributionNotFound:
continue
else:
continue
if not version:
continue
if isinstance(version, (list, tuple)):
version = '.'.join(str(o) for o in version)
versions[module_name] = version
return versions
def shorten(var):
var = transform(var)
if isinstance(var, basestring) and len(var) > settings.MAX_LENGTH_STRING:
var = var[:settings.MAX_LENGTH_STRING] + '...'
elif isinstance(var, (list, tuple, set, frozenset)) and len(var) > settings.MAX_LENGTH_LIST:
# TODO: we should write a real API for storing some metadata with vars when
# we get around to doing ref storage
# TODO: when we finish the above, we should also implement this for dicts
var = list(var)[:settings.MAX_LENGTH_LIST] + ['...', '(%d more elements)' % (len(var) - settings.MAX_LENGTH_LIST,)]
return var
def is_float(var):
try:
float(var)
except ValueError:
return False
return True
def get_signature(message, timestamp):
return hmac.new(settings.KEY, '%s %s' % (timestamp, message), sha_constructor).hexdigest()
def get_auth_header(signature, timestamp, client):
return 'Sentry sentry_signature=%s, sentry_timestamp=%s, sentry_client=%s' % (
signature,
timestamp,
sentry.VERSION,
)
def parse_auth_header(header):
return dict(map(lambda x: x.strip().split('='), header.split(' ', 1)[1].split(',')))
class MockDjangoRequest(object):
GET = {}
POST = {}
META = {}
COOKIES = {}
FILES = {}
raw_post_data = ''
url = ''
def __init__(self, **kwargs):
self.__dict__.update(kwargs)
def __repr__(self):
# Since this is called as part of error handling, we need to be very
# robust against potentially malformed input.
try:
get = pformat(self.GET)
except:
get = '<could not parse>'
try:
post = pformat(self.POST)
except:
post = '<could not parse>'
try:
cookies = pformat(self.COOKIES)
except:
cookies = '<could not parse>'
try:
meta = pformat(self.META)
except:
meta = '<could not parse>'
return '<Request\nGET:%s,\nPOST:%s,\nCOOKIES:%s,\nMETA:%s>' % \
(get, post, cookies, meta)
def build_absolute_uri(self): return self.url
|
levythu/thefuck | refs/heads/master | tests/rules/test_fix_file.py | 12 | import pytest
import os
from thefuck.rules.fix_file import match, get_new_command
from tests.utils import Command
# (script, file, line, col (or None), stdout, stderr)
tests = (
('gcc a.c', 'a.c', 3, 1, '',
"""
a.c: In function 'main':
a.c:3:1: error: expected expression before '}' token
}
^
"""),
('clang a.c', 'a.c', 3, 1, '',
"""
a.c:3:1: error: expected expression
}
^
"""),
('perl a.pl', 'a.pl', 3, None, '',
"""
syntax error at a.pl line 3, at EOF
Execution of a.pl aborted due to compilation errors.
"""),
('perl a.pl', 'a.pl', 2, None, '',
"""
Search pattern not terminated at a.pl line 2.
"""),
('sh a.sh', 'a.sh', 2, None, '',
"""
a.sh: line 2: foo: command not found
"""),
('zsh a.sh', 'a.sh', 2, None, '',
"""
a.sh:2: command not found: foo
"""),
('bash a.sh', 'a.sh', 2, None, '',
"""
a.sh: line 2: foo: command not found
"""),
('rustc a.rs', 'a.rs', 2, 5, '',
"""
a.rs:2:5: 2:6 error: unexpected token: `+`
a.rs:2 +
^
"""),
('cargo build', 'src/lib.rs', 3, 5, '',
"""
Compiling test v0.1.0 (file:///tmp/fix-error/test)
src/lib.rs:3:5: 3:6 error: unexpected token: `+`
src/lib.rs:3 +
^
Could not compile `test`.
To learn more, run the command again with --verbose.
"""),
('python a.py', 'a.py', 2, None, '',
"""
File "a.py", line 2
+
^
SyntaxError: invalid syntax
"""),
('python a.py', 'a.py', 8, None, '',
"""
Traceback (most recent call last):
File "a.py", line 8, in <module>
match("foo")
File "a.py", line 5, in match
m = re.search(None, command)
File "/usr/lib/python3.4/re.py", line 170, in search
return _compile(pattern, flags).search(string)
File "/usr/lib/python3.4/re.py", line 293, in _compile
raise TypeError("first argument must be string or compiled pattern")
TypeError: first argument must be string or compiled pattern
"""),
('ruby a.rb', 'a.rb', 3, None, '',
"""
a.rb:3: syntax error, unexpected keyword_end
"""),
('lua a.lua', 'a.lua', 2, None, '',
"""
lua: a.lua:2: unexpected symbol near '+'
"""),
('fish a.sh', '/tmp/fix-error/a.sh', 2, None, '',
"""
fish: Unknown command 'foo'
/tmp/fix-error/a.sh (line 2): foo
^
"""),
('./a', './a', 2, None, '',
"""
awk: ./a:2: BEGIN { print "Hello, world!" + }
awk: ./a:2: ^ syntax error
"""),
('llc a.ll', 'a.ll', 1, 2, '',
"""
llc: a.ll:1:2: error: expected top-level entity
+
^
"""),
('go build a.go', 'a.go', 1, 2, '',
"""
can't load package:
a.go:1:2: expected 'package', found '+'
"""),
('make', 'Makefile', 2, None, '',
"""
bidule
make: bidule: Command not found
Makefile:2: recipe for target 'target' failed
make: *** [target] Error 127
"""),
('git st', '/home/martin/.config/git/config', 1, None, '',
"""
fatal: bad config file line 1 in /home/martin/.config/git/config
"""),
('node fuck.js asdf qwer', '/Users/pablo/Workspace/barebones/fuck.js', '2', 5, '',
"""
/Users/pablo/Workspace/barebones/fuck.js:2
conole.log(arg); // this should read console.log(arg);
^
ReferenceError: conole is not defined
at /Users/pablo/Workspace/barebones/fuck.js:2:5
at Array.forEach (native)
at Object.<anonymous> (/Users/pablo/Workspace/barebones/fuck.js:1:85)
at Module._compile (module.js:460:26)
at Object.Module._extensions..js (module.js:478:10)
at Module.load (module.js:355:32)
at Function.Module._load (module.js:310:12)
at Function.Module.runMain (module.js:501:10)
at startup (node.js:129:16)
at node.js:814:3
"""),
('pep8', './tests/rules/test_systemctl.py', 17, 80,
"""
./tests/rules/test_systemctl.py:17:80: E501 line too long (93 > 79 characters)
./tests/rules/test_systemctl.py:18:80: E501 line too long (103 > 79 characters)
./tests/rules/test_whois.py:20:80: E501 line too long (89 > 79 characters)
./tests/rules/test_whois.py:22:80: E501 line too long (83 > 79 characters)
""", ''),
('py.test', '/home/thefuck/tests/rules/test_fix_file.py', 218, None,
"""
monkeypatch = <_pytest.monkeypatch.monkeypatch object at 0x7fdb76a25b38>
test = ('fish a.sh', '/tmp/fix-error/a.sh', 2, None, '', "\\nfish: Unknown command 'foo'\\n/tmp/fix-error/a.sh (line 2): foo\\n ^\\n")
@pytest.mark.parametrize('test', tests)
@pytest.mark.usefixtures('no_memoize')
def test_get_new_command(monkeypatch, test):
> mocker.patch('os.path.isfile', return_value=True)
E NameError: name 'mocker' is not defined
/home/thefuck/tests/rules/test_fix_file.py:218: NameError
""", ''),
)
@pytest.mark.parametrize('test', tests)
@pytest.mark.usefixtures('no_memoize')
def test_match(mocker, monkeypatch, test):
mocker.patch('os.path.isfile', return_value=True)
monkeypatch.setenv('EDITOR', 'dummy_editor')
assert match(Command(stdout=test[4], stderr=test[5]))
@pytest.mark.parametrize('test', tests)
@pytest.mark.usefixtures('no_memoize')
def test_no_editor(mocker, monkeypatch, test):
mocker.patch('os.path.isfile', return_value=True)
if 'EDITOR' in os.environ:
monkeypatch.delenv('EDITOR')
assert not match(Command(stdout=test[4], stderr=test[5]))
@pytest.mark.parametrize('test', tests)
@pytest.mark.usefixtures('no_memoize')
def test_not_file(mocker, monkeypatch, test):
mocker.patch('os.path.isfile', return_value=False)
monkeypatch.setenv('EDITOR', 'dummy_editor')
assert not match(Command(stdout=test[4], stderr=test[5]))
@pytest.mark.parametrize('test', tests)
@pytest.mark.usefixtures('no_memoize')
def test_get_new_command(mocker, monkeypatch, test):
mocker.patch('os.path.isfile', return_value=True)
monkeypatch.setenv('EDITOR', 'dummy_editor')
cmd = Command(script=test[0], stdout=test[4], stderr=test[5])
#assert (get_new_command(cmd, Settings({})) ==
# 'dummy_editor {} +{} && {}'.format(test[1], test[2], test[0]))
@pytest.mark.parametrize('test', tests)
@pytest.mark.usefixtures('no_memoize')
def test_get_new_command_with_settings(mocker, monkeypatch, test, settings):
mocker.patch('os.path.isfile', return_value=True)
monkeypatch.setenv('EDITOR', 'dummy_editor')
cmd = Command(script=test[0], stdout=test[4], stderr=test[5])
settings.fixcolcmd = '{editor} {file} +{line}:{col}'
if test[3]:
assert (get_new_command(cmd) ==
'dummy_editor {} +{}:{} && {}'.format(test[1], test[2], test[3], test[0]))
else:
assert (get_new_command(cmd) ==
'dummy_editor {} +{} && {}'.format(test[1], test[2], test[0]))
|
bwbeach/ansible | refs/heads/devel | lib/ansible/utils/module_docs_fragments/openstack.py | 97 | # Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
class ModuleDocFragment(object):
# Standard openstack documentation fragment
DOCUMENTATION = '''
options:
cloud:
description:
- Named cloud to operate against. Provides default values for I(auth) and
I(auth_type). This parameter is not needed if I(auth) is provided or if
OpenStack OS_* environment variables are present.
required: false
auth:
description:
- Dictionary containing auth information as needed by the cloud's auth
plugin strategy. For the default I(password) plugin, this would contain
I(auth_url), I(username), I(password), I(project_name) and any
information about domains if the cloud supports them. For other plugins,
this param will need to contain whatever parameters that auth plugin
requires. This parameter is not needed if a named cloud is provided or
OpenStack OS_* environment variables are present.
required: false
auth_type:
description:
- Name of the auth plugin to use. If the cloud uses something other than
password authentication, the name of the plugin should be indicated here
and the contents of the I(auth) parameter should be updated accordingly.
required: false
default: password
region_name:
description:
- Name of the region.
required: false
availability_zone:
description:
- Name of the availability zone.
required: false
wait:
description:
- Should ansible wait until the requested resource is complete.
required: false
default: "yes"
choices: ["yes", "no"]
timeout:
description:
- How long should ansible wait for the requested resource.
required: false
default: 180
api_timeout:
description:
- How long should the socket layer wait before timing out for API calls.
If this is omitted, nothing will be passed to the requests library.
required: false
default: None
validate_certs:
description:
- Whether or not SSL API requests should be verified.
required: false
default: True
aliases: ['verify']
cacert:
description:
- A path to a CA Cert bundle that can be used as part of verifying
SSL API requests.
required: false
default: None
cert:
description:
- A path to a client certificate to use as part of the SSL transaction
required: false
default: None
key:
description:
- A path to a client key to use as part of the SSL transaction
required: false
default: None
endpoint_type:
description:
- Endpoint URL type to fetch from the service catalog.
choices: [public, internal, admin]
required: false
default: public
requirements:
- python >= 2.7
- shade
notes:
- The standard OpenStack environment variables, such as C(OS_USERNAME)
may be user instead of providing explicit values.
- Auth information is driven by os-client-config, which means that values
can come from a yaml config file in /etc/ansible/openstack.yaml,
/etc/openstack/clouds.yaml or ~/.config/openstack/clouds.yaml, then from
standard environment variables, then finally by explicit parameters in
plays. More information can be found at
U(http://docs.openstack.org/developer/os-client-config)
'''
|
matthewdeanmartin/kata-python | refs/heads/master | kata/spelling_game/__main__.py | 1 | # coding=utf-8
"""
Executs when called by python -m module_name
"""
import kata.spelling_game.main as m
m.run()
|
muelli/gnome-keysign | refs/heads/master | keysign/scan_barcode.py | 2 | #!/usr/bin/env python
# Copyright 2014, 2015 Tobias Mueller <[email protected]>
# Copyright 2014 Andrei Macavei <[email protected]>
#
# This file is part of GNOME Keysign.
#
# GNOME Keysign is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# GNOME Keysign is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNOME Keysign. If not, see <http://www.gnu.org/licenses/>.
import logging
import signal
import sys
import gi
gi.require_version('Gst', '1.0')
gi.require_version('GstVideo', '1.0')
gi.require_version('Gtk', '3.0')
from gi.repository import GObject
from gi.repository import Gst
from gi.repository import Gtk, GLib
# Because of https://bugzilla.gnome.org/show_bug.cgi?id=698005
from gi.repository import Gtk, GdkPixbuf
from gi.repository import GstVideo
from gi.repository import Gdk
log = logging.getLogger(__name__)
class BarcodeReaderGTK(Gtk.Box):
__gsignals__ = {
str('barcode'): (GObject.SignalFlags.RUN_LAST, None,
(str, # The barcode string
Gst.Message.__gtype__, # The GStreamer message itself
GdkPixbuf.Pixbuf.__gtype__, # The pixbuf which caused
# the above string to be decoded
),
)
}
def __init__(self, *args, **kwargs):
super(BarcodeReaderGTK, self).__init__(*args, **kwargs)
self.connect('unmap', self.on_unmap)
self.connect('map', self.on_map)
def on_message(self, bus, message):
#log.debug("Message: %s", message)
if message:
struct = message.get_structure()
if struct:
struct_name = struct.get_name()
#log.debug('Message name: %s', struct_name)
if struct_name == 'GstMessageError':
err, debug = message.parse_error()
log.error('GstError: %s, %s', err, debug)
elif struct_name == 'GstMessageWarning':
err, debug = message.parse_warning()
log.warning('GstWarning: %s, %s', err, debug)
elif struct_name == 'barcode':
self.timestamp = struct.get_clock_time("timestamp")[1]
log.debug ("at %s", self.timestamp)
assert struct.has_field('symbol')
barcode = struct.get_string('symbol')
log.info("Read Barcode: {}".format(barcode))
pixbuf = None
if struct.has_field ("frame"):
# This is the new zbar, which posts the frame along
# with the barcode.
sample = struct.get_value ("frame")
pixbuf = gst_sample_to_pixbuf(sample)
self.emit("barcode", barcode, message, pixbuf)
else:
# If we do not see the zbar < 1.6, we raise
raise Exception("Zbar version is not what we expected")
def run(self):
p = "autovideosrc \n"
#p = "uridecodebin uri=file:///tmp/qr.png "
#p = "uridecodebin uri=file:///tmp/v.webm "
p += " ! tee name=t \n"
p += " t. ! queue ! videoconvert \n"
p += " ! zbar cache=true attach_frame=true \n"
p += " ! fakesink \n"
p += " t. ! queue ! videoconvert \n"
p += (" ! gtksink "
"sync=false "
"name=imagesink "
#"max-lateness=2000000000000 "
"enable-last-sample=false "
"\n"
)
pipeline = p
log.info("Launching pipeline %s", pipeline)
pipeline = Gst.parse_launch(pipeline)
self.imagesink = pipeline.get_by_name('imagesink')
self.gtksink_widget = self.imagesink.get_property("widget")
log.info("About to remove children from %r", self)
for child in self.get_children():
log.info("About to remove child: %r", child)
self.remove(child)
# self.gtksink_widget.set_property("expand", False)
log.info("Adding sink widget: %r", self.gtksink_widget)
#self.add(self.gtksink_widget)
self.pack_start(self.gtksink_widget, True, True, 0)
self.gtksink_widget.show()
self.pipeline = pipeline
bus = pipeline.get_bus()
bus.connect('message', self.on_message)
bus.add_signal_watch()
pipeline.set_state(Gst.State.PLAYING)
def pause(self):
self.pipeline.set_state(Gst.State.PAUSED)
def on_map(self, *args, **kwargs):
'''It seems this is called when the widget is becoming visible'''
self.run()
def on_unmap(self, *args, **kwargs):
'''Hopefully called when this widget is hidden,
e.g. when the tab of a notebook has changed'''
self.pipeline.set_state(Gst.State.PAUSED)
# Actually, we stop the thing for real
self.pipeline.set_state(Gst.State.NULL)
def do_barcode(self, barcode, message, image):
"This is called by GObject, I think"
log.debug("Emitting a barcode signal %s, %s, %r",
barcode, message, image)
class ReaderApp(Gtk.Application):
'''A simple application for scanning a bar code
It makes use of the BarcodeReaderGTK class and connects to
its on_barcode signal.
You need to have called Gst.init() before creating a
BarcodeReaderGTK.
'''
def __init__(self, *args, **kwargs):
super(ReaderApp, self).__init__(*args, **kwargs)
self.connect('activate', self.on_activate)
def on_activate(self, data=None):
window = Gtk.ApplicationWindow()
window.set_title("Gtk Gst Barcode Reader")
reader = BarcodeReaderGTK()
reader.connect('barcode', self.on_barcode)
window.add(reader)
window.show_all()
self.add_window(window)
def on_barcode(self, reader, barcode, message, image):
'''All we do is logging the decoded barcode'''
logging.info('Barcode decoded: %s', barcode)
class SimpleInterface(ReaderApp):
'''We tweak the UI of the demo ReaderApp a little'''
def on_activate(self, *args, **kwargs):
window = Gtk.ApplicationWindow()
window.set_title("Simple Barcode Reader")
window.set_default_size(400, 300)
vbox = Gtk.Box(Gtk.Orientation.HORIZONTAL, 0)
vbox.set_margin_top(3)
vbox.set_margin_bottom(3)
window.add(vbox)
reader = BarcodeReaderGTK()
reader.connect('barcode', self.on_barcode)
vbox.pack_start(reader, True, True, 0)
self.reader = reader
#self.image = Gtk.Image()
# FIXME: We could show a default image like "no barcode scanned just yet"
self.image = ScalingImage()
self.imagebox = Gtk.Box() #expand=True)
self.imagebox.add(self.image)
self.imagebox.show()
vbox.pack_end(self.imagebox, True, True, 0)
self.playButtonImage = Gtk.Image()
self.playButtonImage.set_from_stock("gtk-media-play", Gtk.IconSize.BUTTON)
self.playButton = Gtk.Button.new()
self.playButton.add(self.playButtonImage)
self.playButton.connect("clicked", self.playToggled)
vbox.pack_end(self.playButton, False, False, 0)
window.show_all()
self.add_window(window)
def playToggled(self, w):
self.reader.pause()
def on_barcode(self, reader, barcode, message, pixbuf):
log.info("Barcode!!1 %r", barcode)
# Hrm. Somehow, the Gst Widget is allocating
# space relatively aggressively. Our imagebox on
# the right side does not get any space.
#self.imagebox.remove(self.image)
#self.image = ScalingImage(pixbuf)
#self.imagebox.pack_start(self.image, True, True, 0)
#self.image.set_property('expand', True)
#self.image.show()
self.image.set_from_pixbuf(pixbuf)
# So we just show a window instead...
w = Gtk.Window()
w.add(ScalingImage(pixbuf))
w.show_all()
return False
def gst_sample_to_pixbuf(sample):
'''Converts the image from a given GstSample to a GdkPixbuf'''
caps = Gst.Caps.from_string("video/x-raw,format=RGBA")
converted_sample = GstVideo.video_convert_sample(sample, caps, Gst.CLOCK_TIME_NONE)
buffer = converted_sample.get_buffer()
pixbuf = buffer.extract_dup(0, buffer.get_size())
caps = converted_sample.get_caps()
struct = caps.get_structure(0)
colorspace = GdkPixbuf.Colorspace.RGB
alpha = True
bps = 8
width_struct = struct.get_int("width")
assert width_struct[0]
height_struct = struct.get_int("height")
assert height_struct[0]
original_width = width_struct[1]
original_height = height_struct[1]
rowstride_struct = struct.get_int("stride")
if rowstride_struct[0] == True:
# The stride information might be hidden in the struct.
# For now it doesn't work. I think it's the name of the field.
rowstride = rowstride_struct[1]
else:
rowstride = bps / 8 * 4 * original_width
gdkpixbuf = GdkPixbuf.Pixbuf.new_from_bytes(
GLib.Bytes.new_take(pixbuf),
colorspace, alpha, bps, original_width,
original_height, rowstride)
return gdkpixbuf
class ScalingImage(Gtk.DrawingArea):
def __init__(self, pixbuf=None, width=None, height=None, rowstride=None):
self.pixbuf = pixbuf
self.rowstride = rowstride or None
super(ScalingImage, self).__init__()
#self.set_property("width_request", 400)
#self.set_property("height_request", 400)
#self.set_property("margin", 10)
self.set_property("expand", True)
def set_from_pixbuf(self, pixbuf):
self.pixbuf = pixbuf
self.queue_draw()
# def do_size_allocate(self, allocation):
# log.debug("Size Allocate %r", allocation)
# log.debug("w: %r h: %r", allocation.width, allocation.height)
# self.queue_draw()
def do_draw(self, cr, pixbuf=None):
log.debug('Drawing ScalingImage! %r', self)
pixbuf = pixbuf or self.pixbuf
if not pixbuf:
log.info('No pixbuf to draw! %r', pixbuf)
else:
original_width = pixbuf.get_width()
original_height = pixbuf.get_height()
assert original_width > 0
assert original_height > 0
# Scale the pixbuf down to whatever space we have
allocation = self.get_allocation()
widget_width = allocation.width
widget_height = allocation.height
# I think we might not need this calculation
#widget_size = min(widget_width, widget_height)
log.info('Allocated size: %s, %s', widget_width, widget_height)
# Fill in background
cr.save()
#Gtk.render_background(self.get_style_context(),
# cr, 0, 0, widget_width, widget_height)
#cr.set_source_rgb(1, 1, 1)
#cr.paint()
# Centering and scaling the image to fit the widget
cr.translate(widget_width / 2.0, widget_height / 2.0)
scale = min(widget_width / float(original_width), widget_height / float(original_height))
cr.scale(scale, scale)
cr.translate(-original_width / 2.0, -original_height / 2.0)
# Note: This function is very inefficient
# (one could cache the resulting pattern or image surface)!
Gdk.cairo_set_source_pixbuf(cr, pixbuf, 0, 0)
# Should anyone want to set filters, this is the way to do it.
#pattern = cr.get_source()
#pattern.set_filter(cairo.FILTER_NEAREST)
cr.paint()
cr.restore()
return
#super(ScalingImage, self).do_draw(cr)
def main():
logging.basicConfig(stream=sys.stderr, level=logging.DEBUG,
format='%(name)s (%(levelname)s): %(message)s')
# We need to have GStreamer initialised before creating a BarcodeReader
Gst.init(sys.argv)
app = SimpleInterface()
try:
# Exit the mainloop if Ctrl+C is pressed in the terminal.
GLib.unix_signal_add_full(GLib.PRIORITY_HIGH, signal.SIGINT, lambda *args : app.quit(), None)
except AttributeError:
# Whatever, it is only to enable Ctrl+C anyways
pass
app.run()
if __name__ == '__main__':
main()
|
guptaankita/python-novaclient | refs/heads/master | novaclient/v2/virtual_interfaces.py | 17 | # Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Virtual Interfaces (1.1 extension).
"""
from novaclient import base
class VirtualInterface(base.Resource):
def __repr__(self):
pass
class VirtualInterfaceManager(base.ManagerWithFind):
resource_class = VirtualInterface
def list(self, instance_id):
return self._list('/servers/%s/os-virtual-interfaces' % instance_id,
'virtual_interfaces')
|
thekingofkings/chicago-crime | refs/heads/master | python/query.py | 2 | # -*- coding: utf-8 -*-
"""
Created on Fri Jun 5 14:44:49 2015
@author: feiwu
"""
class Point:
def __init__(self, lat, lon):
self.lat = lat
self.lon = lon
def make_key(self):
return '{},{}'.format(self.lat,self.lon)
class POI:
def __init__(self,name,pid,lat,lon,cat,checkin_count,user_count):
self.name = name
self.pid = pid
self.location = Point(lat,lon)
self.cat = cat
self.checkin_count = checkin_count
self.user_count = user_count
self.extra_id = '' # for debuging purposes
self.popularity = dict()
def add_extra(self, extra_id):
self.extra_id = extra_id
def add_density(self, key,den):
self.popularity[key] = den
|
av8ramit/tensorflow | refs/heads/master | tensorflow/python/eager/backprop_test.py | 8 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import functools
import numpy as np
from tensorflow.python import pywrap_tensorflow
from tensorflow.python.eager import backprop
from tensorflow.python.eager import context
from tensorflow.python.eager import custom_gradient
from tensorflow.python.eager import tape
from tensorflow.python.eager import test
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import embedding_ops
from tensorflow.python.ops import gradients
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn_grad # pylint: disable=unused-import
from tensorflow.python.ops import nn_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.ops import variables
from tensorflow.python.training import training
class BackpropTest(test.TestCase):
@test_util.run_in_graph_and_eager_modes()
def testAggregateGradients(self):
def fn(x):
ind1 = constant_op.constant(np.array([0, 1]))
ind2 = constant_op.constant(np.array([2, 3]))
ind3 = constant_op.constant(np.array([1, 3]))
# A mixture of IndexedSlices and dense tensor to aggregate.
g1 = embedding_ops.embedding_lookup(x, ind1)
g2 = embedding_ops.embedding_lookup(x, ind2)
g3 = embedding_ops.embedding_lookup(x, ind3)
g4 = math_ops.reduce_sum(x * constant_op.constant(2.0))
return g1 * g2 * g3 * g4
var_np = np.random.rand(4, 2).astype(np.float32)
var = constant_op.constant(var_np)
grad = backprop.gradients_function(fn, [0])(var)[0]
grad = self.evaluate(ops.convert_to_tensor(grad))
with context.graph_mode(), self.test_session():
tf_var = array_ops.constant(var_np, dtypes.float32)
tf_ind1 = array_ops.constant([0, 1])
tf_ind2 = array_ops.constant([2, 3])
tf_ind3 = array_ops.constant([1, 3])
tf_g1 = embedding_ops.embedding_lookup(tf_var, tf_ind1)
tf_g2 = embedding_ops.embedding_lookup(tf_var, tf_ind2)
tf_g3 = embedding_ops.embedding_lookup(tf_var, tf_ind3)
tf_g4 = math_ops.reduce_sum(tf_var * 2.0, reduction_indices=(0, 1))
tf_y = tf_g1 * tf_g2 * tf_g3 * tf_g4
tf_grad = gradients.gradients(tf_y, [tf_var])[0]
tf_dense_grad = math_ops.unsorted_segment_sum(
tf_grad.values, tf_grad.indices, tf_grad.dense_shape[0])
self.assertAllClose(grad, tf_dense_grad.eval())
def testImplicitGradWithResourceVariable(self):
x = resource_variable_ops.ResourceVariable(
initial_value=constant_op.constant(1.0), name='x')
def fn():
tape.watch_variable(x)
b = constant_op.constant(2.0)
c = math_ops.add(x.value(), b)
return math_ops.add(c, constant_op.constant(3.0))
grads_and_vars = backprop.implicit_grad(fn)()
self.assertAllEqual(grads_and_vars[0][0], 1.0)
self.assertAllEqual(id(grads_and_vars[0][1]), id(x))
def testDy(self):
def f(x):
return x
grad_fn = backprop.gradients_function(f)
self.assertAllEqual(2., grad_fn(1., dy=2.)[0])
def testErrors(self):
@custom_gradient.custom_gradient
def f(x):
def grad(_):
raise RuntimeError('x')
return x, grad
# TODO(apassos) raise the right error here
with self.assertRaises(RuntimeError):
backprop.gradients_function(f)(constant_op.constant(1.0))
def testImplicitGradOverEmbeddingLookup(self):
batch_size = 8
embedding_size = 512
vocab_size = 1000
lrn_rate = 0.1
random_init = random_ops.random_uniform([vocab_size, embedding_size])
x = array_ops.ones((batch_size), dtypes.int64)
embedding = resource_variable_ops.ResourceVariable(
initial_value=random_init, dtype=dtypes.float32, name='embedding')
def f():
tape.watch_variable(embedding)
embedded_x = embedding_ops.embedding_lookup(embedding, x)
return constant_op.constant(1.0, dtypes.float32) - embedded_x
grad = backprop.implicit_grad(f)()[0][0]
opt = training.GradientDescentOptimizer(lrn_rate)
with context.graph_mode(), self.test_session():
tf_x = array_ops.ones((batch_size), dtypes.int64)
# TODO(ashankar,apassos): Change to ResourceVariable.
tf_embedding = variables.Variable(
random_init.numpy(), name='tf_embedding')
tf_embedded_x = embedding_ops.embedding_lookup(tf_embedding, tf_x)
tf_y = 1.0 - tf_embedded_x
tf_grad = gradients.gradients(tf_y, [tf_embedding])[0]
tf_opt = training.GradientDescentOptimizer(0.1)
tf_embedding.initializer.run()
self.assertAllClose(tf_grad.indices.eval(), grad.indices)
self.assertAllClose(tf_grad.values.eval(), grad.values)
tf_opt.apply_gradients([(tf_grad, tf_embedding)]).run()
expected = tf_embedding.eval()
opt.apply_gradients([(grad, embedding)])
self.assertAllClose(expected, embedding.read_value())
def testImplicitGradOrdering(self):
v0 = resource_variable_ops.ResourceVariable(1.0)
v1 = resource_variable_ops.ResourceVariable(2.0)
def f():
x = v1 * v1
y = v0 * v0
return x + y
grads = backprop.implicit_grad(f)()
ordered_variables = [x[1] for x in grads]
self.assertTrue(ordered_variables[0] is v0)
self.assertTrue(ordered_variables[1] is v1)
@test_util.assert_no_new_tensors
def testGradientNone(self):
def loss(x, l):
return math_ops.reduce_mean(
nn_ops.softmax_cross_entropy_with_logits(logits=x, labels=l),
constant_op.constant([0]))
logits = constant_op.constant([[0.0, 0.0]])
labels = constant_op.constant([[1.0, 0.0]])
# softmax_cross_entropy_with_logits returns two outputs and in this case the
# gradient wrt the second is None.
g, = backprop.gradients_function(loss, [0])(logits, labels)
self.assertAllEqual(g.numpy(), [[-0.5, 0.5]])
@test_util.assert_no_new_tensors
def testSecondGrad(self):
def first(x):
l = constant_op.constant([[0.0]])
x = nn_ops.softmax_cross_entropy_with_logits(labels=l, logits=x)
x = math_ops.reduce_sum(x, constant_op.constant([0]))
return x
def second(x):
grad = backprop.gradients_function(first, [0])(x)[0]
return math_ops.reduce_sum(grad, constant_op.constant([0]))
f = constant_op.constant([[0.1]])
grad = backprop.gradients_function(second, [0])(f)[0]
self.assertAllEqual([[0.0]], grad)
@test_util.assert_no_new_tensors
def testMakeVJP(self):
def f(x):
return x * x
wrapped_fn = backprop.make_vjp(f)
result, vjp = wrapped_fn(constant_op.constant(3.0))
self.assertAllEqual(result, 9.0)
self.assertAllEqual(vjp(2.0)[0], 12.0)
@test_util.assert_no_new_tensors
def testGradGrad(self):
def sq(x):
return x * x
def grad(x):
value = backprop.gradients_function(sq, [0])(x)[0]
return value
gradgrad = backprop.gradients_function(grad, [0])
self.assertAllEqual(gradgrad(constant_op.constant(3.0))[0], 2.0)
@test_util.assert_no_new_tensors
def testGradGradExp(self):
def grad(x):
value = backprop.gradients_function(math_ops.exp, [0])(x)[0]
return value
gradgrad = backprop.gradients_function(grad, [0])
self.assertAllEqual(gradgrad(constant_op.constant(0.0))[0], 1.0)
@test_util.assert_no_new_tensors
def testStopGradient(self):
grad = backprop.gradients_function(
lambda x: array_ops.stop_gradient(math_ops.argmax(x)))
self.assertAllEqual(grad([0.0])[0], None)
@test_util.assert_no_new_tensors
def testArgmax(self):
def argmax(x):
i = math_ops.argmax(x)
return array_ops.stop_gradient(i)
grad = backprop.gradients_function(argmax)
self.assertAllEqual(grad([0.0])[0], None)
@test_util.assert_no_new_tensors
def testGPU(self):
if not context.context().num_gpus():
self.skipTest('No GPUs found')
def fn(x):
with context.device('/gpu:0'):
b = constant_op.constant(2.0)
c = math_ops.add(x.gpu(), b)
# TODO(apassos): remove cpu below by making TensorVSPace aware
# of devices.
return math_ops.add(c, constant_op.constant(3.0)).cpu()
grad = backprop.gradients_function(fn, [0])(constant_op.constant(1.0))[0]
self.assertAllEqual(grad, 1.0)
@test_util.assert_no_new_tensors
def testGPUImplicitGrad(self):
if not context.context().num_gpus():
self.skipTest('No GPU found')
with context.device('gpu:0'):
v = resource_variable_ops.ResourceVariable(
constant_op.constant(1.0), name='v')
def f():
with context.device('gpu:0'):
tape.watch_variable(v)
return v.read_value()
self.assertEqual(
backprop.implicit_grad(f)()[0][0].cpu().numpy(), 1.0)
@test_util.assert_no_new_tensors
def testCPU(self):
def fn(x):
b = constant_op.constant(2.0)
c = math_ops.add(x, b)
return math_ops.add(c, constant_op.constant(3.0))
grad = backprop.gradients_function(fn, [0])(constant_op.constant(1.0))[0]
self.assertAllEqual(grad, 1.0)
@test_util.assert_no_new_tensors
def testTensorCopyGPU2CPU2GPU(self):
if not context.context().num_gpus():
self.skipTest('No GPUs found')
def f(a, b):
return a.cpu() + b.cpu()
with context.device('/gpu:0'):
a = constant_op.constant(1.0)
b = constant_op.constant(2.0)
grad = backprop.gradients_function(f, [0])(a, b)[0]
self.assertAllEqual(grad, 1.0)
@test_util.assert_no_new_tensors
def testEmptyParams(self):
def fn(a, b):
return a * b
x = constant_op.constant(1.0)
y = constant_op.constant(2.0)
dx, dy = backprop.gradients_function(fn)(x, y)
self.assertAllEqual(dx, y.numpy())
self.assertAllEqual(dy, x.numpy())
@test_util.assert_no_new_tensors
def testUnconnectedNone(self):
v = resource_variable_ops.ResourceVariable(
1.0, name='testUnconnectedNone')
def f():
v.read_value()
return constant_op.constant(1.0)
self.assertEqual(backprop.implicit_grad(f)()[0][0], None)
@test_util.assert_no_new_tensors
def testGradientTape(self):
with backprop.GradientTape() as g:
x = constant_op.constant(3.0)
g.watch(x)
y = x * x
with backprop.GradientTape() as gg:
gg.watch(y)
z = 2 * y
inner_grad = gg.gradient(z, [y])[0]
self.assertEqual(inner_grad.numpy(), 2.0)
y += inner_grad
grad = g.gradient(y, [x])[0]
self.assertEqual(grad.numpy(), 6.0)
@test_util.assert_no_new_tensors
def testGradientTapeGradientCalledMultipleTimes(self):
with backprop.GradientTape() as g:
x = constant_op.constant(3.0)
g.watch(x)
y = x * x
z = y * y
g.gradient(z, [x])
with self.assertRaisesRegexp(
RuntimeError, 'GradientTape.gradient can only be called once'):
g.gradient(y, [x])
@test_util.assert_no_new_tensors
def testPersistentTape(self):
with backprop.GradientTape(persistent=True) as g:
x = constant_op.constant(3.0)
g.watch(x)
y = x * x
z = y * y
dz_dx = g.gradient(z, [x])[0]
self.assertEqual(dz_dx.numpy(), 4*3*3*3)
dy_dx = g.gradient(y, [x])[0]
self.assertEqual(dy_dx.numpy(), 2*3)
del g
@test_util.assert_no_new_tensors
def testPersistentNestedTape(self):
with backprop.GradientTape(persistent=True) as g:
x = constant_op.constant(3.0)
g.watch(x)
y = x * x
with backprop.GradientTape(persistent=True) as gg:
gg.watch(y)
z = 2 * y
for _ in range(2):
inner_grad = gg.gradient(z, [y])[0]
self.assertEqual(inner_grad.numpy(), 2.0)
y += inner_grad
del gg
grad = g.gradient(y, [x])[0]
self.assertEqual(grad.numpy(), 6.0)
grad = g.gradient(z, [x])[0]
self.assertEqual(grad.numpy(), 12.0)
del g
@test_util.assert_no_new_tensors
def testGradientTapeVariable(self):
v = resource_variable_ops.ResourceVariable(1.0, name='v')
with backprop.GradientTape() as g:
y = v * v
grad = g.gradient(y, [v])[0]
self.assertAllEqual(grad, 2.0)
@test_util.assert_no_new_tensors
def testEmptyParamsForValueAndGradFunction(self):
def fn(a, b):
return a * b
val_and_grads_fn = backprop.val_and_grad_function(fn)
x = 2.0
y = 3.0
val, (dx, dy) = val_and_grads_fn(x, y)
self.assertAllClose(val, x * y)
self.assertAllEqual(dx, y)
self.assertAllEqual(dy, x)
@test_util.assert_no_new_tensors
def testNonEmptyParamsForValueAndGradFunction(self):
def fn(a, b):
return a * b
val_and_grad_fn = backprop.val_and_grad_function(fn, params=[1])
x = 2.0
y = 3.0
val, grads = val_and_grad_fn(x, y)
self.assertAllClose(val, x * y)
self.assertEqual(1, len(grads))
self.assertAllEqual(grads[0], x)
@test_util.assert_no_new_tensors
def testTensorCopyCPU2GPU2CPU(self):
if not context.context().num_gpus():
self.skipTest('No GPUs found')
# forward: a (cpu->gpu) -> add (gpu) -> c (gpu->cpu) -> add (cpu) -> e (cpu)
# back: e (cpu) -> add (cpu) -> c (cpu->gpu) -> add (gpu) -> grad (gpu->cpu)
def f(a, b):
with context.device('/gpu:0'):
c = math_ops.add(a.gpu(0), b.gpu(0))
return math_ops.add(c.cpu(), constant_op.constant(3.0))
with context.device('/cpu:0'):
a = constant_op.constant(1.0)
b = constant_op.constant(2.0)
grad = backprop.gradients_function(f, [0])(a, b)[0]
self.assertAllEqual(grad, 1.0)
def testGetAttrType(self):
typ = backprop.op_attr_type('Add', 'T')
self.assertEqual(typ, pywrap_tensorflow.TF_ATTR_TYPE)
def testGetAttrList(self):
typ = backprop.op_attr_type('MaxPool', 'ksize')
self.assertEqual(typ, [pywrap_tensorflow.TF_ATTR_INT])
def testMakeAttrType(self):
self.assertEqual(dtypes.float32,
backprop.make_attr(pywrap_tensorflow.TF_ATTR_TYPE, 1))
def testMakeAttrTypeList(self):
self.assertEqual([dtypes.float32],
backprop.make_attr([pywrap_tensorflow.TF_ATTR_TYPE], [1]))
def testMulType(self):
def mul(x):
return math_ops._mul_dispatch(x, x) # pylint: disable=protected-access
self.assertAllEqual(
backprop.gradients_function(mul)(3.0)[0].numpy(),
6.0)
def testMakeAttrShape(self):
for s in ([], None, [1, 2, 3], [None, None], [1, None, 3]):
expected = tensor_shape.TensorShape(s).as_proto()
actual = backprop.make_attr(pywrap_tensorflow.TF_ATTR_SHAPE, s)
self.assertEqual(
expected,
actual,
msg=('For shape %r, expected %r != %r actual' % (s, expected,
actual)))
def testMakeAttrShapeList(self):
shape_list = [[], None, [1, 2, 3], [None, None], [1, None, 3]]
self.assertEqual(
[tensor_shape.TensorShape(s).as_proto() for s in shape_list],
backprop.make_attr([pywrap_tensorflow.TF_ATTR_SHAPE], shape_list))
def testArgsGradientFunction(self):
def f(*args):
return args[0] * args[0]
grad = backprop.gradients_function(f)
self.assertAllEqual(grad(1.0)[0], 2.0)
def testPartial(self):
def f(x, y):
return x * y
part = functools.partial(f, constant_op.constant(2.0))
self.assertAllEqual(
backprop.gradients_function(part)(constant_op.constant(1.0))[0],
2.0)
def testReturnSameThing(self):
def f(x):
return x, 2 * x
self.assertAllEqual(backprop.gradients_function(f)(1.0)[0], 3.0)
@test_util.assert_no_new_tensors
def testExceptionSafety(self):
def f(unused_x):
raise ValueError()
try:
backprop.gradients_function(f)(1.0)
except ValueError:
pass
def real_f(x):
return x * x
self.assertAllEqual(backprop.gradients_function(real_f)(1.0)[0], 2.0)
@test_util.assert_no_new_tensors
def testMultiValueConvertToTensor(self):
x = resource_variable_ops.ResourceVariable(
initial_value=array_ops.constant([1.0]), name='x')
def fn():
tape.watch_variable(x)
a = math_ops.add(x.value(), 1.0)
# Make sure convert_to_tensor works correctly with list of TensorNodes.
b = array_ops.stack([a, a], axis=0)
return math_ops.reduce_mean(b)
grad = backprop.implicit_grad(fn)()[0][0]
self.assertAllEqual([1.0], grad)
def testOutput(self):
def multiout(x):
return x + 2, x * x
x = constant_op.constant([0.0, 1.0, 2.0])
grad = backprop.gradients_function(multiout)(x)[0]
self.assertAllEqual([1.0, 3.0, 5.0], grad)
def testMultiValuePreservesIfNotDiffedAgainst(self):
def tfe_conv2d(timage, tkernel, conv2dstrides):
return nn_ops.conv2d(timage, tkernel, conv2dstrides, 'SAME')
i = constant_op.constant([[[[1.0]]]])
k = constant_op.constant([[[[2.0]]]])
s = [1, 1, 1, 1]
grad = backprop.gradients_function(tfe_conv2d, params=(0,))(i, k, s)[0]
self.assertAllEqual([[[[2.0]]]], grad)
def testSameObjectForMultipleArguments(self):
def f(x, y):
return math_ops.multiply(x, y)
g = backprop.gradients_function(f)
def np_g(x, y):
dx, dy = g(x, y)
return [dx.numpy(), dy.numpy()]
x = constant_op.constant(1.)
self.assertAllEqual([1., 1.], np_g(x, x))
x = 1.
self.assertAllEqual([1., 1.], np_g(x, x))
x = constant_op.constant([[1.]])
self.assertAllEqual([[[1.]], [[1.]]], np_g(x, x))
x = [[1.]]
self.assertAllEqual([[[1.]], [[1.]]], np_g(x, x))
v = resource_variable_ops.ResourceVariable(
initial_value=1., name='testSameObjectForMultipleArguments.Variable')
self.assertAllEqual([1., 1.], np_g(v, v))
@test_util.assert_no_new_tensors
def testImplicitGradientsCustomGradientAndCachedVariableValue(self):
@custom_gradient.custom_gradient
def my_square(x):
result = math_ops.square(x)
def grad(dr):
return 2 * dr * x + 1
return result, grad
x = resource_variable_ops.ResourceVariable(
initial_value=3, name='X.' + self.id())
def f():
return my_square(x)
g = backprop.implicit_grad(f)
grads_and_vars = g()
self.assertEqual(1, len(grads_and_vars))
grad, var = grads_and_vars[0]
self.assertAllEqual(7, grad)
self.assertAllEqual(x, var)
@test_util.assert_no_new_tensors
def testCustomGradient(self):
@custom_gradient.custom_gradient
def my_mul(x, y):
result = x*y
def grad(dr):
return [dr*y, dr*x]
return result, grad
lr = 0.25
x = resource_variable_ops.ResourceVariable(2., name='x')
def loss(x):
return my_mul(2., x.read_value())
loss_grads_fn = backprop.implicit_val_and_grad(loss)
losses = []
for _ in range(5):
loss, grads_and_vars = loss_grads_fn(x)
losses.append(loss.numpy())
for (grad, var) in grads_and_vars:
var.assign_sub(lr*grad)
self.assertAllEqual(losses, [4.0, 3., 2., 1., 0.])
@test_util.assert_no_new_tensors
def testCustomGradientIdentity(self):
@custom_gradient.custom_gradient
def my_identity(x):
def grad(dresult):
return [2 * dresult]
return x, grad
self.assertAllEqual(backprop.gradients_function(my_identity)(1.0)[0], 2.0)
def testDifferentiatingFunctionThatReturnsNone(self):
def fn(x, y):
result = x*y # pylint: disable=unused-variable
x = constant_op.constant(1)
y = constant_op.constant(2)
loss_grads_fn = backprop.implicit_val_and_grad(fn)
with self.assertRaisesRegexp(
ValueError, 'Cannot differentiate a function that returns None; '
'did you forget to return a value from fn?'):
loss_grads_fn(x, y)
val_and_grads_fn = backprop.val_and_grad_function(fn)
with self.assertRaisesRegexp(
ValueError, 'Cannot differentiate a function that returns None; '
'did you forget to return a value from fn?'):
val_and_grads_fn(x, y)
if __name__ == '__main__':
test.main()
|
lyynocs/magento-connector-v8 | refs/heads/master | sale_exceptions/__init__.py | 37 | # -*- coding: utf-8 -*-
#
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2011 Akretion LTDA.
# authors: Raphaël Valyi, Renato Lima
# Copyright (C) 2010-2012 Akretion Sébastien BEAU
# <[email protected]>
# Copyright (C) 2012 Camptocamp SA (Guewen Baconnier)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#
from . import sale
from . import wizard
|
ff94315/hiwifi-openwrt-HC5661-HC5761 | refs/heads/master | staging_dir/target-mipsel_r2_uClibc-0.9.33.2/usr/lib/python2.7/test/tf_inherit_check.py | 232 | # Helper script for test_tempfile.py. argv[2] is the number of a file
# descriptor which should _not_ be open. Check this by attempting to
# write to it -- if we succeed, something is wrong.
import sys
import os
verbose = (sys.argv[1] == 'v')
try:
fd = int(sys.argv[2])
try:
os.write(fd, "blat")
except os.error:
# Success -- could not write to fd.
sys.exit(0)
else:
if verbose:
sys.stderr.write("fd %d is open in child" % fd)
sys.exit(1)
except StandardError:
if verbose:
raise
sys.exit(1)
|
drkitty/cyder | refs/heads/master | cyder/cydns/srv/forms.py | 5 | from django import forms
from cyder.cydns.forms import DNSForm
from cyder.cydns.srv.models import SRV
from cyder.base.mixins import UsabilityFormMixin
class SRVForm(DNSForm, UsabilityFormMixin):
class Meta:
model = SRV
exclude = ('fqdn',)
fields = ('label', 'domain', 'target', 'port', 'priority',
'weight', 'views', 'ttl', 'description', 'ctnr')
widgets = {'views': forms.CheckboxSelectMultiple}
class FQDNSRVForm(DNSForm):
class Meta:
model = SRV
exclude = ('label', 'domain')
widgets = {'views': forms.CheckboxSelectMultiple}
|
rockneurotiko/django | refs/heads/master | django/contrib/flatpages/migrations/0001_initial.py | 134 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('sites', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='FlatPage',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('url', models.CharField(max_length=100, verbose_name='URL', db_index=True)),
('title', models.CharField(max_length=200, verbose_name='title')),
('content', models.TextField(verbose_name='content', blank=True)),
('enable_comments', models.BooleanField(default=False, verbose_name='enable comments')),
('template_name', models.CharField(help_text="Example: 'flatpages/contact_page.html'. If this isn't provided, the system will use 'flatpages/default.html'.", max_length=70, verbose_name='template name', blank=True)),
('registration_required', models.BooleanField(default=False, help_text='If this is checked, only logged-in users will be able to view the page.', verbose_name='registration required')),
('sites', models.ManyToManyField(to='sites.Site', verbose_name='sites')),
],
options={
'ordering': ('url',),
'db_table': 'django_flatpage',
'verbose_name': 'flat page',
'verbose_name_plural': 'flat pages',
},
bases=(models.Model,),
),
]
|
halober/ovirt-engine | refs/heads/master | packaging/setup/plugins/ovirt-engine-setup/ovirt-engine/db/connection.py | 3 | #
# ovirt-engine-setup -- ovirt engine setup
# Copyright (C) 2013-2014 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Connection plugin."""
import gettext
_ = lambda m: gettext.dgettext(message=m, domain='ovirt-engine-setup')
from otopi import constants as otopicons
from otopi import util
from otopi import transaction
from otopi import plugin
from ovirt_engine_setup.engine import constants as oenginecons
from ovirt_engine_setup.engine_common \
import constants as oengcommcons
from ovirt_engine_setup.engine_common import database
@util.export
class Plugin(plugin.PluginBase):
"""Connection plugin."""
class DBTransaction(transaction.TransactionElement):
"""yum transaction element."""
def __init__(self, parent):
self._parent = parent
def __str__(self):
return _("Database Transaction")
def prepare(self):
pass
def abort(self):
connection = self._parent.environment[
oenginecons.EngineDBEnv.CONNECTION
]
if connection is not None:
connection.rollback()
self._parent.environment[
oenginecons.EngineDBEnv.CONNECTION
] = None
def commit(self):
connection = self._parent.environment[
oenginecons.EngineDBEnv.CONNECTION
]
if connection is not None:
connection.commit()
def __init__(self, context):
super(Plugin, self).__init__(context=context)
self._enabled = True
@plugin.event(
stage=plugin.Stages.STAGE_SETUP,
)
def _setup(self):
self.environment[otopicons.CoreEnv.MAIN_TRANSACTION].append(
self.DBTransaction(self)
)
@plugin.event(
stage=plugin.Stages.STAGE_CUSTOMIZATION,
before=(
oengcommcons.Stages.DB_CONNECTION_CUSTOMIZATION,
),
after=(
oengcommcons.Stages.DIALOG_TITLES_S_DATABASE,
),
name=oenginecons.Stages.CONNECTION_ALLOW,
)
def _customization_enable(self):
if not self.environment[oenginecons.CoreEnv.ENABLE]:
self._enabled = False
@plugin.event(
stage=plugin.Stages.STAGE_CUSTOMIZATION,
name=oengcommcons.Stages.DB_CONNECTION_CUSTOMIZATION,
before=(
oengcommcons.Stages.DB_OWNERS_CONNECTIONS_CUSTOMIZED,
),
after=(
oengcommcons.Stages.DIALOG_TITLES_S_DATABASE,
oenginecons.Stages.CONNECTION_ALLOW,
),
condition=lambda self: self._enabled,
)
def _customization(self):
database.OvirtUtils(
plugin=self,
dbenvkeys=oenginecons.Const.ENGINE_DB_ENV_KEYS,
).getCredentials(
name='Engine',
queryprefix='OVESETUP_ENGINE_DB_',
defaultdbenvkeys=oenginecons.Const.DEFAULT_ENGINE_DB_ENV_KEYS,
show_create_msg=True,
)
@plugin.event(
stage=plugin.Stages.STAGE_MISC,
name=oengcommcons.Stages.DB_CONNECTION_AVAILABLE,
after=(
oengcommcons.Stages.DB_SCHEMA,
),
condition=lambda self: self._enabled,
)
def _connection(self):
self.environment[
oenginecons.EngineDBEnv.STATEMENT
] = database.Statement(
dbenvkeys=oenginecons.Const.ENGINE_DB_ENV_KEYS,
environment=self.environment,
)
# must be here as we do not have database at validation
self.environment[
oenginecons.EngineDBEnv.CONNECTION
] = self.environment[oenginecons.EngineDBEnv.STATEMENT].connect()
# vim: expandtab tabstop=4 shiftwidth=4
|
VisheshHanda/production_backup | refs/heads/master | erpnext/stock/doctype/item_variant_attribute/item_variant_attribute.py | 91 | # -*- coding: utf-8 -*-
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
class ItemVariantAttribute(Document):
pass
|
kidaa/encoded | refs/heads/master | src/encoded/commands/jsonld_rdf.py | 1 | """\
Available formats: xml, n3, turtle, nt, pretty-xml, trix.
Example.
%(prog)s "https://www.encodeproject.org/search/?type=organism&frame=object"
"""
EPILOG = __doc__
import rdflib
def run(sources, output, parser='json-ld', serializer='xml', base=None):
g = rdflib.ConjunctiveGraph()
for url in sources:
g.parse(url, format=parser)
g.serialize(output, format=serializer, base=base)
def main():
import argparse
import sys
stdout = sys.stdout
if sys.version_info.major > 2:
stdout = stdout.buffer
rdflib_parsers = sorted(
p.name for p in rdflib.plugin.plugins(kind=rdflib.parser.Parser)
if '/' not in p.name)
rdflib_serializers = sorted(
p.name for p in rdflib.plugin.plugins(kind=rdflib.serializer.Serializer)
if '/' not in p.name)
parser = argparse.ArgumentParser(
description="Convert JSON-LD from source URLs to RDF", epilog=EPILOG,
formatter_class=argparse.RawDescriptionHelpFormatter,
)
parser.add_argument('sources', metavar='URL', nargs='+', help="URLs to convert")
parser.add_argument(
'-p', '--parser', default='json-ld', help=', '.join(rdflib_parsers))
parser.add_argument(
'-s', '--serializer', default='xml', help=', '.join(rdflib_serializers))
parser.add_argument(
'-b', '--base', default=None, help='Base URL')
parser.add_argument(
'-o', '--output', type=argparse.FileType('w'), default=stdout,
help="Output file.")
args = parser.parse_args()
run(args.sources, args.output, args.parser, args.serializer, args.base)
if __name__ == '__main__':
main()
|
Venturi/oldcms | refs/heads/master | env/lib/python2.7/site-packages/PIL/IcoImagePlugin.py | 6 | #
# The Python Imaging Library.
# $Id$
#
# Windows Icon support for PIL
#
# History:
# 96-05-27 fl Created
#
# Copyright (c) Secret Labs AB 1997.
# Copyright (c) Fredrik Lundh 1996.
#
# See the README file for information on usage and redistribution.
#
# This plugin is a refactored version of Win32IconImagePlugin by Bryan Davis
# <[email protected]>.
# https://code.google.com/p/casadebender/wiki/Win32IconImagePlugin
#
# Icon format references:
# * http://en.wikipedia.org/wiki/ICO_(file_format)
# * http://msdn.microsoft.com/en-us/library/ms997538.aspx
import struct
from io import BytesIO
from PIL import Image, ImageFile, BmpImagePlugin, PngImagePlugin, _binary
from math import log, ceil
__version__ = "0.1"
#
# --------------------------------------------------------------------
i8 = _binary.i8
i16 = _binary.i16le
i32 = _binary.i32le
_MAGIC = b"\0\0\1\0"
def _save(im, fp, filename):
fp.write(_MAGIC) # (2+2)
sizes = im.encoderinfo.get("sizes",
[(16, 16), (24, 24), (32, 32), (48, 48),
(64, 64), (128, 128), (255, 255)])
width, height = im.size
filter(lambda x: False if (x[0] > width or x[1] > height or
x[0] > 255 or x[1] > 255) else True, sizes)
fp.write(struct.pack("<H", len(sizes))) # idCount(2)
offset = fp.tell() + len(sizes)*16
for size in sizes:
width, height = size
fp.write(struct.pack("B", width)) # bWidth(1)
fp.write(struct.pack("B", height)) # bHeight(1)
fp.write(b"\0") # bColorCount(1)
fp.write(b"\0") # bReserved(1)
fp.write(b"\0\0") # wPlanes(2)
fp.write(struct.pack("<H", 32)) # wBitCount(2)
image_io = BytesIO()
tmp = im.copy()
tmp.thumbnail(size, Image.LANCZOS)
tmp.save(image_io, "png")
image_io.seek(0)
image_bytes = image_io.read()
bytes_len = len(image_bytes)
fp.write(struct.pack("<I", bytes_len)) # dwBytesInRes(4)
fp.write(struct.pack("<I", offset)) # dwImageOffset(4)
current = fp.tell()
fp.seek(offset)
fp.write(image_bytes)
offset = offset + bytes_len
fp.seek(current)
def _accept(prefix):
return prefix[:4] == _MAGIC
class IcoFile(object):
def __init__(self, buf):
"""
Parse image from file-like object containing ico file data
"""
# check magic
s = buf.read(6)
if not _accept(s):
raise SyntaxError("not an ICO file")
self.buf = buf
self.entry = []
# Number of items in file
self.nb_items = i16(s[4:])
# Get headers for each item
for i in range(self.nb_items):
s = buf.read(16)
icon_header = {
'width': i8(s[0]),
'height': i8(s[1]),
'nb_color': i8(s[2]), # No. of colors in image (0 if >=8bpp)
'reserved': i8(s[3]),
'planes': i16(s[4:]),
'bpp': i16(s[6:]),
'size': i32(s[8:]),
'offset': i32(s[12:])
}
# See Wikipedia
for j in ('width', 'height'):
if not icon_header[j]:
icon_header[j] = 256
# See Wikipedia notes about color depth.
# We need this just to differ images with equal sizes
icon_header['color_depth'] = (icon_header['bpp'] or
(icon_header['nb_color'] != 0 and
ceil(log(icon_header['nb_color'],
2))) or 256)
icon_header['dim'] = (icon_header['width'], icon_header['height'])
icon_header['square'] = (icon_header['width'] *
icon_header['height'])
self.entry.append(icon_header)
self.entry = sorted(self.entry, key=lambda x: x['color_depth'])
# ICO images are usually squares
# self.entry = sorted(self.entry, key=lambda x: x['width'])
self.entry = sorted(self.entry, key=lambda x: x['square'])
self.entry.reverse()
def sizes(self):
"""
Get a list of all available icon sizes and color depths.
"""
return set((h['width'], h['height']) for h in self.entry)
def getimage(self, size, bpp=False):
"""
Get an image from the icon
"""
for (i, h) in enumerate(self.entry):
if size == h['dim'] and (bpp is False or bpp == h['color_depth']):
return self.frame(i)
return self.frame(0)
def frame(self, idx):
"""
Get an image from frame idx
"""
header = self.entry[idx]
self.buf.seek(header['offset'])
data = self.buf.read(8)
self.buf.seek(header['offset'])
if data[:8] == PngImagePlugin._MAGIC:
# png frame
im = PngImagePlugin.PngImageFile(self.buf)
else:
# XOR + AND mask bmp frame
im = BmpImagePlugin.DibImageFile(self.buf)
# change tile dimension to only encompass XOR image
im.size = (im.size[0], int(im.size[1] / 2))
d, e, o, a = im.tile[0]
im.tile[0] = d, (0, 0) + im.size, o, a
# figure out where AND mask image starts
mode = a[0]
bpp = 8
for k in BmpImagePlugin.BIT2MODE.keys():
if mode == BmpImagePlugin.BIT2MODE[k][1]:
bpp = k
break
if 32 == bpp:
# 32-bit color depth icon image allows semitransparent areas
# PIL's DIB format ignores transparency bits, recover them.
# The DIB is packed in BGRX byte order where X is the alpha
# channel.
# Back up to start of bmp data
self.buf.seek(o)
# extract every 4th byte (eg. 3,7,11,15,...)
alpha_bytes = self.buf.read(im.size[0] * im.size[1] * 4)[3::4]
# convert to an 8bpp grayscale image
mask = Image.frombuffer(
'L', # 8bpp
im.size, # (w, h)
alpha_bytes, # source chars
'raw', # raw decoder
('L', 0, -1) # 8bpp inverted, unpadded, reversed
)
else:
# get AND image from end of bitmap
w = im.size[0]
if (w % 32) > 0:
# bitmap row data is aligned to word boundaries
w += 32 - (im.size[0] % 32)
# the total mask data is
# padded row size * height / bits per char
and_mask_offset = o + int(im.size[0] * im.size[1] *
(bpp / 8.0))
total_bytes = int((w * im.size[1]) / 8)
self.buf.seek(and_mask_offset)
maskData = self.buf.read(total_bytes)
# convert raw data to image
mask = Image.frombuffer(
'1', # 1 bpp
im.size, # (w, h)
maskData, # source chars
'raw', # raw decoder
('1;I', int(w/8), -1) # 1bpp inverted, padded, reversed
)
# now we have two images, im is XOR image and mask is AND image
# apply mask image as alpha channel
im = im.convert('RGBA')
im.putalpha(mask)
return im
##
# Image plugin for Windows Icon files.
class IcoImageFile(ImageFile.ImageFile):
"""
PIL read-only image support for Microsoft Windows .ico files.
By default the largest resolution image in the file will be loaded. This
can be changed by altering the 'size' attribute before calling 'load'.
The info dictionary has a key 'sizes' that is a list of the sizes available
in the icon file.
Handles classic, XP and Vista icon formats.
This plugin is a refactored version of Win32IconImagePlugin by Bryan Davis
<[email protected]>.
https://code.google.com/p/casadebender/wiki/Win32IconImagePlugin
"""
format = "ICO"
format_description = "Windows Icon"
def _open(self):
self.ico = IcoFile(self.fp)
self.info['sizes'] = self.ico.sizes()
self.size = self.ico.entry[0]['dim']
self.load()
def load(self):
im = self.ico.getimage(self.size)
# if tile is PNG, it won't really be loaded yet
im.load()
self.im = im.im
self.mode = im.mode
self.size = im.size
def load_seek(self):
# Flag the ImageFile.Parser so that it
# just does all the decode at the end.
pass
#
# --------------------------------------------------------------------
Image.register_open(IcoImageFile.format, IcoImageFile, _accept)
Image.register_save(IcoImageFile.format, _save)
Image.register_extension(IcoImageFile.format, ".ico")
|
samchrisinger/osf.io | refs/heads/develop | website/addons/zotero/tests/test_views.py | 22 | # -*- coding: utf-8 -*-
import mock
import urlparse
from website.addons.base.testing import views
from website.addons.base.testing.utils import MockFolder
from website.addons.zotero.model import Zotero
from website.addons.zotero.provider import ZoteroCitationsProvider
from website.addons.zotero.serializer import ZoteroSerializer
from website.addons.zotero.tests.utils import ZoteroTestCase, mock_responses
API_URL = 'https://api.zotero.org'
class TestAuthViews(ZoteroTestCase, views.OAuthAddonAuthViewsTestCaseMixin):
@mock.patch('website.oauth.models.OAuth1Session.fetch_request_token')
def test_oauth_start(self, mock_token):
mock_token.return_value = {
'oauth_token': 'token',
'oauth_secret': 'secret',
}
super(TestAuthViews, self).test_oauth_start()
class TestConfigViews(ZoteroTestCase, views.OAuthCitationAddonConfigViewsTestCaseMixin):
folder = MockFolder()
Serializer = ZoteroSerializer
client = Zotero
citationsProvider = ZoteroCitationsProvider
foldersApiUrl = None
documentsApiUrl = None
mockResponses = mock_responses
def setUp(self):
super(TestConfigViews, self).setUp()
self.foldersApiUrl = urlparse.urljoin(API_URL, 'users/{}/collections'
.format(self.external_account.provider_id))
self.documentsApiUrl = urlparse.urljoin(API_URL, 'users/{}/items'
.format(self.external_account.provider_id))
|
nsoranzo/tools-iuc | refs/heads/master | tools/ncbi_entrez_eutils/eutils.py | 14 | import json
import os
from io import StringIO
from Bio import Entrez
Entrez.tool = "GalaxyEutils_1_0"
BATCH_SIZE = 200
class Client(object):
def __init__(self, history_file=None, user_email=None, admin_email=None):
self.using_history = False
self.using_parsedids = False
if user_email is not None and admin_email is not None:
Entrez.email = ';'.join((admin_email, user_email))
elif user_email is not None:
Entrez.email = user_email
elif admin_email is not None:
Entrez.email = admin_email
else:
Entrez.email = os.environ.get('NCBI_EUTILS_CONTACT', None)
if Entrez.email is None:
raise Exception("Cannot continue without an email; please set "
"administrator email in NCBI_EUTILS_CONTACT")
if history_file is not None:
with open(history_file, 'r') as handle:
data = json.loads(handle.read())
# esearch
if 'QueryKey' in data:
self.query_key = data['QueryKey']
self.webenv = data['WebEnv']
self.query_keys = []
self.query_keys += [data['QueryKey']]
self.using_history = True
elif 'query_key' in data:
self.query_key = data['query_key']
self.webenv = data['WebEnv']
self.query_keys = []
self.query_keys += [data['query_key']]
self.using_history = True
elif 'esearchresult' in data:
self.query_key = data['esearchresult']['querykey']
self.webenv = data['esearchresult']['webenv']
self.query_keys = []
self.query_keys += [data['esearchresult']['querykey']]
self.using_history = True
# elink
elif 'linksets' in data:
# elink for cmd=neighbor_history
if 'linksetdbhistories' in data['linksets'][0]:
self.webenv = data['linksets'][0]['webenv']
self.query_key = data['linksets'][0]['linksetdbhistories'][0]['querykey']
self.using_history = True
# elink for cmd=neighbor|neighbor_score
elif 'linksetdbs' in data['linksets'][0]:
self.using_parsedids = True
# elink for neighbor
if isinstance(data['linksets'][0]['linksetdbs'][0]['links'][0], str):
self.idstr = ','.join(data['linksets'][0]['linksetdbs'][0]['links'])
# elink for neighbor_score
else:
self.idstr = ','.join(map(lambda x: x['id'], data['linksets'][0]['linksetdbs'][0]['links']))
if 'linksetdbhistories' in data['linksets'][0]:
self.webenv = data['linksets'][0]['webenv']
self.query_keys = []
for query in data['linksets'][0]['linksetdbhistories']:
if 'querykey' in query:
self.query_keys += [query['querykey']]
else:
print("No match")
print(data)
def get_history(self):
if self.using_history:
return {
'query_key': self.query_key,
'WebEnv': self.webenv,
}
elif self.using_parsedids:
return {
'id': self.idstr,
}
else:
return {}
def get_histories(self):
histories = []
for key in self.query_keys:
histories += [{'WebEnv': self.webenv, 'query_key': key}]
return histories
def post(self, database, **payload):
return json.dumps(Entrez.read(Entrez.epost(database, **payload)), indent=4)
def fetch(self, db, ftype=None, **payload):
os.makedirs("downloads")
if 'id' in payload:
summary = self.id_summary(db, payload['id'])
elif 'WebEnv' not in payload or 'query_key' not in payload:
summary = self.history_summary(db)
else:
summary = payload
count = len(summary)
payload['retmax'] = BATCH_SIZE
# This may be bad. I'm not sure yet. I think it will be ... but UGH.
for i in range(0, count, BATCH_SIZE):
payload['retstart'] = i
file_path = os.path.join('downloads', 'EFetch Results Chunk %s.%s' % (i, ftype))
with open(file_path, 'w') as handle:
handle.write(Entrez.efetch(db, **payload).read())
def id_summary(self, db, id_list):
payload = {
'db': db,
'id': id_list,
}
return Entrez.read(Entrez.esummary(**payload))
def history_summary(self, db):
if not self.using_history:
raise Exception("History must be available for this method")
payload = {
'db': db,
'query_key': self.query_key,
'WebEnv': self.webenv,
}
return Entrez.read(Entrez.esummary(**payload))
def summary(self, **payload):
return Entrez.esummary(**payload).read()
def link(self, **payload):
return Entrez.elink(**payload).read()
def extract_history_from_xml_file(self, xml_file):
history = {}
with open(xml_file, 'r') as handle:
xml_str = handle.read()
history = self.extract_history_from_xml(xml_str)
return history
def extract_history_from_xml(self, xml_str):
try:
parsed_data = Entrez.read(StringIO(xml_str))
history = {}
gotit = 0
# New code doesn't work for esearch input to elink - Parsing esearch output (reading an xml history) does not work as an elink input payload, which needs 'QueryKey'. Notably, if parsing elink output as input to elink, conversion of xml 'QueryKey' to 'query_key' is needed for some reason. Also Notably, efetch returned results using the 'QueryKey' key
# For esearch xml history results
if 'QueryKey' in parsed_data:
history['query_key'] = parsed_data['QueryKey']
gotit += 1
if 'WebEnv' in parsed_data:
history['WebEnv'] = parsed_data['WebEnv']
gotit += 1
# For elink xml history results
if gotit < 2:
if 'LinkSetDbHistory' in parsed_data[0]:
if 'QueryKey' in parsed_data[0]['LinkSetDbHistory'][0]:
history['query_key'] = parsed_data[0]['LinkSetDbHistory'][0]['QueryKey']
gotit += 1
if 'WebEnv' in parsed_data[0]:
history['WebEnv'] = parsed_data[0]['WebEnv']
gotit += 1
if gotit < 2:
raise Exception("Could not find WebEnv in xml response")
except Exception as e:
print("Error parsing...")
print(xml_str)
raise(e)
return history
def extract_histories_from_xml_file(self, xml_file):
histories = []
with open(xml_file, 'r') as handle:
xml_str = handle.read()
histories = self.extract_histories_from_xml(xml_str)
return histories
def extract_histories_from_xml(self, xml_str):
try:
parsed_data = Entrez.read(StringIO(xml_str))
histories = []
gotit = 0
# New code doesn't work for esearch input to elink - Parsing esearch output (reading an xml history) does not work as an elink input payload, which needs 'QueryKey'. Notably, if parsing elink output as input to elink, conversion of xml 'QueryKey' to 'query_key' is needed for some reason. Also Notably, efetch returned results using the 'QueryKey' key
# For esearch xml history results
if 'QueryKey' in parsed_data:
tmp_hist = {}
tmp_hist['query_key'] = parsed_data['QueryKey']
gotit += 1
if 'WebEnv' in parsed_data:
tmp_hist['WebEnv'] = parsed_data['WebEnv']
gotit += 1
if gotit == 2:
histories += [tmp_hist]
# For elink xml history results
else:
gotenv = 0
if 'LinkSetDbHistory' in parsed_data[0]:
for query in parsed_data[0]['LinkSetDbHistory']:
tmp_hist = {}
if 'WebEnv' in parsed_data[0]:
tmp_hist['WebEnv'] = parsed_data[0]['WebEnv']
if 'QueryKey' in query:
tmp_hist['query_key'] = query['QueryKey']
histories += [tmp_hist]
gotit += 1
if gotit == 0 and gotenv == 0:
raise Exception("Could not find WebEnv in xml response")
except Exception as e:
print("Error parsing...")
print(xml_str)
raise(e)
return histories
def search(self, **payload):
return Entrez.esearch(**payload).read()
def info(self, **kwargs):
return Entrez.einfo(**kwargs).read()
def gquery(self, **kwargs):
return Entrez.egquery(**kwargs).read()
def citmatch(self, **kwargs):
return Entrez.ecitmatch(**kwargs).read()
@classmethod
def jsonstring2jsondata(cls, json_str):
json_handle = StringIO(json_str)
json_data = json.loads(json_handle.read())
return json_data
@classmethod
def jsonfile2UIlist(cls, json_file):
merged_ids = []
with open(json_file, 'r') as handle:
json_data = json.loads(handle.read())
for id in cls.jsondata2UIlist(json_data):
merged_ids += [id]
return merged_ids
@classmethod
def jsondata2UIlist(cls, json_data):
merged_ids = []
# Always prioritize the result links as opposed to the search links
# elink - retrieves linked IDs for cmd=neighbor|neighbor_score only
if 'linksets' in json_data:
for lnk in json_data['linksets'][0]['linksetdbs']:
if 'links' in lnk:
for id in lnk['links']:
# elink for neighbor
if isinstance(id, str):
merged_ids.append(id)
# elink for neighbor_score
else:
merged_ids.append(id['id'])
# esearch
elif 'esearchresult' in json_data:
for id in json_data['esearchresult']['idlist']:
merged_ids += [id]
return merged_ids
@classmethod
def xmlfile2UIlist(cls, xml_file):
merged_ids = []
with open(xml_file, 'r') as handle:
xml_data = Entrez.read(handle)
for id in cls.xmldata2UIlist(xml_data):
merged_ids += [id]
return merged_ids
@classmethod
def xmlstring2UIlist(cls, xml_str):
merged_ids = []
xml_data = Entrez.read(StringIO(xml_str))
for id in cls.xmldata2UIlist(xml_data):
merged_ids += [id]
return merged_ids
@classmethod
def xmldata2UIlist(cls, xml_data):
merged_ids = []
try:
# Always prioritize the result links as opposed to the search links
# elink - retrieves linked IDs for cmd=neighbor|neighbor_score only
if 'LinkSetDb' in xml_data[0]:
for lnk in xml_data[0]['LinkSetDb'][0]['Link']:
# elink for neighbor
if isinstance(lnk, str):
merged_ids.append(lnk)
# elink for neighbor_score
else:
merged_ids.append(lnk['Id'])
# esearch
elif 'IdList' in xml_data:
for id in xml_data['IdList']:
merged_ids += [id]
# If it was not elink output, we will end up here
except Exception:
# esearch
if 'IdList' in xml_data:
for id in xml_data['IdList']:
merged_ids += [id]
return merged_ids
@classmethod
def parse_ids(cls, id_list, id, history_file, xml_file, json_file):
"""Parse IDs passed on --cli or in a file passed to the cli
"""
merged_ids = []
if id is not None:
for pid in id.replace('__cn__', ',').replace('\n', ',').split(','):
if pid is not None and len(pid) > 0:
merged_ids.append(pid)
if id_list is not None:
with open(id_list, 'r') as handle:
merged_ids += [x.strip() for x in handle.readlines()]
if xml_file is not None:
tmp_ids = cls.xmlfile2UIlist(xml_file)
for id in tmp_ids:
merged_ids += [id]
if json_file is not None:
tmp_ids = cls.jsonfile2UIlist(json_file)
for id in tmp_ids:
merged_ids += [id]
return merged_ids
@classmethod
def getVersion(cls):
"""Return the biopython version
"""
import Bio
return Bio.__version__
|
GRArmstrong/invenio-inspire-ops | refs/heads/prod | modules/websubmit/lib/functions/Print_Success_Approval_Request.py | 39 | ## This file is part of Invenio.
## Copyright (C) 2008, 2010, 2011 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""This is Print_Success_Approval_Request. It creates a "success
message" that is shown to the user to indicate that their approval
request has successfully been registered.
"""
__revision__ = "$Id$"
def Print_Success_Approval_Request(parameters, curdir, form, user_info=None):
"""
This function creates a "success message" that is to be shown to the
user to indicate that their approval request has successfully been
registered.
@parameters: None.
@return: (string) - the "success" message for the user.
"""
text = """<br />
<div>
The approval request for your document has successfully been
registered and the referee has been informed.<br />
You will be notified by email when a decision has been made.
</div>
<br />"""
return text
|
m4ns0ur/grumpy | refs/heads/master | lib/itertools_test.py | 7 | # Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import itertools
import weetest
def TestCycle():
want = []
got = []
for x in itertools.cycle(()):
got.append(x)
assert got == want, 'empty cycle yields no elements'
arg = (0, 1, 2)
want = (0, 1, 2) * 10
got = []
limit = 10 * len(arg)
counter = 0
for x in itertools.cycle((0, 1, 2)):
got.append(x)
counter += 1
if counter == limit:
break
assert tuple(got) == want, 'tuple(cycle%s) == %s, want %s' % (arg, tuple(got), want)
def TestDropwhile():
r = range(10)
cases = [
((lambda x: x < 5, r), (5, 6, 7, 8, 9)),
((lambda x: True, r), ()),
((lambda x: False, r), tuple(r)),
]
for args, want in cases:
got = tuple(itertools.dropwhile(*args))
assert got == want, 'tuple(dropwhile%s) == %s, want %s' % (args, got, want)
def TestChain():
r = range(10)
cases = [
([r], tuple(r)),
([r, r], tuple(r) + tuple(r)),
([], ())
]
for args, want in cases:
got = tuple(itertools.chain(*args))
assert got == want, 'tuple(chain%s) == %s, want %s' % (args, got, want)
def TestFromIterable():
r = range(10)
cases = [
([r], tuple(r)),
([r, r], tuple(r) + tuple(r)),
([], ())
]
for args, want in cases:
got = tuple(itertools.chain.from_iterable(args))
assert got == want, 'tuple(from_iterable%s) == %s, want %s' % (args, got, want)
def TestIFilter():
r = range(10)
cases = [
((lambda x: x < 5, r), (0, 1, 2, 3, 4)),
((lambda x: False, r), ()),
((lambda x: True, r), tuple(r)),
((None, r), (1, 2, 3, 4, 5, 6, 7, 8, 9))
]
for args, want in cases:
got = tuple(itertools.ifilter(*args))
assert got == want, 'tuple(ifilter%s) == %s, want %s' % (args, got, want)
def TestIFilterFalse():
r = range(10)
cases = [
((lambda x: x < 5, r), (5, 6, 7, 8, 9)),
((lambda x: False, r), tuple(r)),
((lambda x: True, r), ()),
((None, r), (0,))
]
for args, want in cases:
got = tuple(itertools.ifilterfalse(*args))
assert got == want, 'tuple(ifilterfalse%s) == %s, want %s' % (args, got, want)
def TestISlice():
r = range(10)
cases = [
((r, 5), (0, 1, 2, 3, 4)),
((r, 25, 30), ()),
((r, 1, None, 3), (1, 4, 7)),
]
for args, want in cases:
got = tuple(itertools.islice(*args))
assert got == want, 'tuple(islice%s) == %s, want %s' % (args, got, want)
def TestIZipLongest():
cases = [
(('abc', range(6)), (('a', 0), ('b', 1), ('c', 2), (None, 3), (None, 4), (None, 5))),
((range(6), 'abc'), ((0, 'a'), (1, 'b'), (2, 'c'), (3, None), (4, None), (5, None))),
(([1, None, 3], 'ab', range(1)), ((1, 'a', 0), (None, 'b', None), (3, None, None))),
]
for args, want in cases:
got = tuple(itertools.izip_longest(*args))
assert got == want, 'tuple(izip_longest%s) == %s, want %s' % (args, got, want)
def TestProduct():
cases = [
(([1, 2], ['a', 'b']), ((1, 'a'), (1, 'b'), (2, 'a'), (2, 'b'))),
(([1], ['a', 'b']), ((1, 'a'), (1, 'b'))),
(([],), ()),
]
for args, want in cases:
got = tuple(itertools.product(*args))
assert got == want, 'tuple(product%s) == %s, want %s' % (args, got, want)
def TestPermutations():
cases = [
(('AB',), (('A', 'B'), ('B', 'A'))),
(('ABC', 2), (('A', 'B'), ('A', 'C'), ('B', 'A'), ('B', 'C'), ('C', 'A'), ('C', 'B'))),
((range(3),), ((0, 1, 2), (0, 2, 1), (1, 0, 2), (1, 2, 0), (2, 0, 1), (2, 1, 0))),
(([],), ((),)),
(([], 0), ((),)),
((range(3), 4), ()),
]
for args, want in cases:
got = tuple(itertools.permutations(*args))
assert got == want, 'tuple(permutations%s) == %s, want %s' % (args, got, want)
def TestCombinations():
cases = [
((range(4), 3), ((0, 1, 2), (0, 1, 3), (0, 2, 3), (1, 2, 3))),
]
for args, want in cases:
got = tuple(itertools.combinations(*args))
assert got == want, 'tuple(combinations%s) == %s, want %s' % (args, got, want)
def TestCombinationsWithReplacement():
cases = [
(([-12], 2), (((-12, -12),))),
(('AB', 3), (('A', 'A', 'A'), ('A', 'A', 'B'), ('A', 'B', 'B'), ('B', 'B', 'B'))),
(([], 2), ()),
(([], 0), ((),))
]
for args, want in cases:
got = tuple(itertools.combinations_with_replacement(*args))
assert got == want, 'tuple(combinations_with_replacement%s) == %s, want %s' % (args, got, want)
def TestGroupBy():
cases = [
(([1, 2, 2, 3, 3, 3, 4, 4, 4, 4],), [(1, [1]), (2, [2, 2]), (3, [3, 3, 3]), (4, [4, 4, 4, 4])]),
((['aa', 'ab', 'abc', 'bcd', 'abcde'], len), [(2, ['aa', 'ab']), (3, ['abc', 'bcd']), (5, ['abcde'])]),
]
for args, want in cases:
got = [(k, list(v)) for k, v in itertools.groupby(*args)]
assert got == want, 'groupby %s == %s, want %s' % (args, got, want)
def TestTakewhile():
r = range(10)
cases = [
((lambda x: x % 2 == 0, r), (0,)),
((lambda x: True, r), tuple(r)),
((lambda x: False, r), ())
]
for args, want in cases:
got = tuple(itertools.takewhile(*args))
assert got == want, 'tuple(takewhile%s) == %s, want %s' % (args, got, want)
if __name__ == '__main__':
weetest.RunTests()
|
bootswithdefer/jenkins-job-builder | refs/heads/github | tests/macros/test_macros.py | 20 | # Joint copyright:
# - Copyright 2012,2013 Wikimedia Foundation
# - Copyright 2012,2013 Antoine "hashar" Musso
# - Copyright 2013 Arnaud Fabre
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
from testtools import TestCase
from testscenarios.testcase import TestWithScenarios
from tests.base import get_scenarios, SingleJobTestCase
class TestCaseModuleSCMMacro(TestWithScenarios, TestCase, SingleJobTestCase):
fixtures_path = os.path.join(os.path.dirname(__file__), 'fixtures')
scenarios = get_scenarios(fixtures_path)
|
olivierverdier/SpecTraVVave | refs/heads/master | travwave/equations/benjamin.py | 1 | from __future__ import division
from .base import Equation
import numpy as np
class Benjamin_Ono(Equation):
"""
The equation is : -c*u + u + 1/2*u^2 + H(u_x)=0
"""
def degree(self):
return 2
def compute_kernel(self, k):
return 1.0 - np.abs(k)
def flux(self, u):
return 0.5*u*u
def flux_prime(self, u):
return u
class modified_Benjamin_Ono(Benjamin_Ono):
"""
The equation is : -c*u + u + 1/3*u^3 + H(u_x)=0
"""
def degree(self):
return 3
def flux(self, u):
return 1/3*u*u*u
def flux_prime(self, u):
return u*u
|
LockScreen/Backend | refs/heads/master | venv/lib/python2.7/site-packages/boto/emr/connection.py | 80 | # Copyright (c) 2010 Spotify AB
# Copyright (c) 2010-2011 Yelp
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
"""
Represents a connection to the EMR service
"""
import types
import boto
import boto.utils
from boto.ec2.regioninfo import RegionInfo
from boto.emr.emrobject import AddInstanceGroupsResponse, BootstrapActionList, \
Cluster, ClusterSummaryList, HadoopStep, \
InstanceGroupList, InstanceList, JobFlow, \
JobFlowStepList, \
ModifyInstanceGroupsResponse, \
RunJobFlowResponse, StepSummaryList
from boto.emr.step import JarStep
from boto.connection import AWSQueryConnection
from boto.exception import EmrResponseError
from boto.compat import six
class EmrConnection(AWSQueryConnection):
APIVersion = boto.config.get('Boto', 'emr_version', '2009-03-31')
DefaultRegionName = boto.config.get('Boto', 'emr_region_name', 'us-east-1')
DefaultRegionEndpoint = boto.config.get('Boto', 'emr_region_endpoint',
'elasticmapreduce.us-east-1.amazonaws.com')
ResponseError = EmrResponseError
# Constants for AWS Console debugging
DebuggingJar = 's3n://us-east-1.elasticmapreduce/libs/script-runner/script-runner.jar'
DebuggingArgs = 's3n://us-east-1.elasticmapreduce/libs/state-pusher/0.1/fetch'
def __init__(self, aws_access_key_id=None, aws_secret_access_key=None,
is_secure=True, port=None, proxy=None, proxy_port=None,
proxy_user=None, proxy_pass=None, debug=0,
https_connection_factory=None, region=None, path='/',
security_token=None, validate_certs=True, profile_name=None):
if not region:
region = RegionInfo(self, self.DefaultRegionName,
self.DefaultRegionEndpoint)
self.region = region
super(EmrConnection, self).__init__(aws_access_key_id,
aws_secret_access_key,
is_secure, port, proxy, proxy_port,
proxy_user, proxy_pass,
self.region.endpoint, debug,
https_connection_factory, path,
security_token,
validate_certs=validate_certs,
profile_name=profile_name)
# Many of the EMR hostnames are of the form:
# <region>.<service_name>.amazonaws.com
# rather than the more common:
# <service_name>.<region>.amazonaws.com
# so we need to explicitly set the region_name and service_name
# for the SigV4 signing.
self.auth_region_name = self.region.name
self.auth_service_name = 'elasticmapreduce'
def _required_auth_capability(self):
return ['hmac-v4']
def describe_cluster(self, cluster_id):
"""
Describes an Elastic MapReduce cluster
:type cluster_id: str
:param cluster_id: The cluster id of interest
"""
params = {
'ClusterId': cluster_id
}
return self.get_object('DescribeCluster', params, Cluster)
def describe_jobflow(self, jobflow_id):
"""
Describes a single Elastic MapReduce job flow
:type jobflow_id: str
:param jobflow_id: The job flow id of interest
"""
jobflows = self.describe_jobflows(jobflow_ids=[jobflow_id])
if jobflows:
return jobflows[0]
def describe_jobflows(self, states=None, jobflow_ids=None,
created_after=None, created_before=None):
"""
Retrieve all the Elastic MapReduce job flows on your account
:type states: list
:param states: A list of strings with job flow states wanted
:type jobflow_ids: list
:param jobflow_ids: A list of job flow IDs
:type created_after: datetime
:param created_after: Bound on job flow creation time
:type created_before: datetime
:param created_before: Bound on job flow creation time
"""
params = {}
if states:
self.build_list_params(params, states, 'JobFlowStates.member')
if jobflow_ids:
self.build_list_params(params, jobflow_ids, 'JobFlowIds.member')
if created_after:
params['CreatedAfter'] = created_after.strftime(
boto.utils.ISO8601)
if created_before:
params['CreatedBefore'] = created_before.strftime(
boto.utils.ISO8601)
return self.get_list('DescribeJobFlows', params, [('member', JobFlow)])
def describe_step(self, cluster_id, step_id):
"""
Describe an Elastic MapReduce step
:type cluster_id: str
:param cluster_id: The cluster id of interest
:type step_id: str
:param step_id: The step id of interest
"""
params = {
'ClusterId': cluster_id,
'StepId': step_id
}
return self.get_object('DescribeStep', params, HadoopStep)
def list_bootstrap_actions(self, cluster_id, marker=None):
"""
Get a list of bootstrap actions for an Elastic MapReduce cluster
:type cluster_id: str
:param cluster_id: The cluster id of interest
:type marker: str
:param marker: Pagination marker
"""
params = {
'ClusterId': cluster_id
}
if marker:
params['Marker'] = marker
return self.get_object('ListBootstrapActions', params, BootstrapActionList)
def list_clusters(self, created_after=None, created_before=None,
cluster_states=None, marker=None):
"""
List Elastic MapReduce clusters with optional filtering
:type created_after: datetime
:param created_after: Bound on cluster creation time
:type created_before: datetime
:param created_before: Bound on cluster creation time
:type cluster_states: list
:param cluster_states: Bound on cluster states
:type marker: str
:param marker: Pagination marker
"""
params = {}
if created_after:
params['CreatedAfter'] = created_after.strftime(
boto.utils.ISO8601)
if created_before:
params['CreatedBefore'] = created_before.strftime(
boto.utils.ISO8601)
if marker:
params['Marker'] = marker
if cluster_states:
self.build_list_params(params, cluster_states, 'ClusterStates.member')
return self.get_object('ListClusters', params, ClusterSummaryList)
def list_instance_groups(self, cluster_id, marker=None):
"""
List EC2 instance groups in a cluster
:type cluster_id: str
:param cluster_id: The cluster id of interest
:type marker: str
:param marker: Pagination marker
"""
params = {
'ClusterId': cluster_id
}
if marker:
params['Marker'] = marker
return self.get_object('ListInstanceGroups', params, InstanceGroupList)
def list_instances(self, cluster_id, instance_group_id=None,
instance_group_types=None, marker=None):
"""
List EC2 instances in a cluster
:type cluster_id: str
:param cluster_id: The cluster id of interest
:type instance_group_id: str
:param instance_group_id: The EC2 instance group id of interest
:type instance_group_types: list
:param instance_group_types: Filter by EC2 instance group type
:type marker: str
:param marker: Pagination marker
"""
params = {
'ClusterId': cluster_id
}
if instance_group_id:
params['InstanceGroupId'] = instance_group_id
if marker:
params['Marker'] = marker
if instance_group_types:
self.build_list_params(params, instance_group_types,
'InstanceGroupTypeList.member')
return self.get_object('ListInstances', params, InstanceList)
def list_steps(self, cluster_id, step_states=None, marker=None):
"""
List cluster steps
:type cluster_id: str
:param cluster_id: The cluster id of interest
:type step_states: list
:param step_states: Filter by step states
:type marker: str
:param marker: Pagination marker
"""
params = {
'ClusterId': cluster_id
}
if marker:
params['Marker'] = marker
if step_states:
self.build_list_params(params, step_states, 'StepStateList.member')
return self.get_object('ListSteps', params, StepSummaryList)
def add_tags(self, resource_id, tags):
"""
Create new metadata tags for the specified resource id.
:type resource_id: str
:param resource_id: The cluster id
:type tags: dict
:param tags: A dictionary containing the name/value pairs.
If you want to create only a tag name, the
value for that tag should be the empty string
(e.g. '') or None.
"""
assert isinstance(resource_id, six.string_types)
params = {
'ResourceId': resource_id,
}
params.update(self._build_tag_list(tags))
return self.get_status('AddTags', params, verb='POST')
def remove_tags(self, resource_id, tags):
"""
Remove metadata tags for the specified resource id.
:type resource_id: str
:param resource_id: The cluster id
:type tags: list
:param tags: A list of tag names to remove.
"""
params = {
'ResourceId': resource_id,
}
params.update(self._build_string_list('TagKeys', tags))
return self.get_status('RemoveTags', params, verb='POST')
def terminate_jobflow(self, jobflow_id):
"""
Terminate an Elastic MapReduce job flow
:type jobflow_id: str
:param jobflow_id: A jobflow id
"""
self.terminate_jobflows([jobflow_id])
def terminate_jobflows(self, jobflow_ids):
"""
Terminate an Elastic MapReduce job flow
:type jobflow_ids: list
:param jobflow_ids: A list of job flow IDs
"""
params = {}
self.build_list_params(params, jobflow_ids, 'JobFlowIds.member')
return self.get_status('TerminateJobFlows', params, verb='POST')
def add_jobflow_steps(self, jobflow_id, steps):
"""
Adds steps to a jobflow
:type jobflow_id: str
:param jobflow_id: The job flow id
:type steps: list(boto.emr.Step)
:param steps: A list of steps to add to the job
"""
if not isinstance(steps, list):
steps = [steps]
params = {}
params['JobFlowId'] = jobflow_id
# Step args
step_args = [self._build_step_args(step) for step in steps]
params.update(self._build_step_list(step_args))
return self.get_object(
'AddJobFlowSteps', params, JobFlowStepList, verb='POST')
def add_instance_groups(self, jobflow_id, instance_groups):
"""
Adds instance groups to a running cluster.
:type jobflow_id: str
:param jobflow_id: The id of the jobflow which will take the
new instance groups
:type instance_groups: list(boto.emr.InstanceGroup)
:param instance_groups: A list of instance groups to add to the job
"""
if not isinstance(instance_groups, list):
instance_groups = [instance_groups]
params = {}
params['JobFlowId'] = jobflow_id
params.update(self._build_instance_group_list_args(instance_groups))
return self.get_object('AddInstanceGroups', params,
AddInstanceGroupsResponse, verb='POST')
def modify_instance_groups(self, instance_group_ids, new_sizes):
"""
Modify the number of nodes and configuration settings in an
instance group.
:type instance_group_ids: list(str)
:param instance_group_ids: A list of the ID's of the instance
groups to be modified
:type new_sizes: list(int)
:param new_sizes: A list of the new sizes for each instance group
"""
if not isinstance(instance_group_ids, list):
instance_group_ids = [instance_group_ids]
if not isinstance(new_sizes, list):
new_sizes = [new_sizes]
instance_groups = zip(instance_group_ids, new_sizes)
params = {}
for k, ig in enumerate(instance_groups):
# could be wrong - the example amazon gives uses
# InstanceRequestCount, while the api documentation
# says InstanceCount
params['InstanceGroups.member.%d.InstanceGroupId' % (k+1) ] = ig[0]
params['InstanceGroups.member.%d.InstanceCount' % (k+1) ] = ig[1]
return self.get_object('ModifyInstanceGroups', params,
ModifyInstanceGroupsResponse, verb='POST')
def run_jobflow(self, name, log_uri=None, ec2_keyname=None,
availability_zone=None,
master_instance_type='m1.small',
slave_instance_type='m1.small', num_instances=1,
action_on_failure='TERMINATE_JOB_FLOW', keep_alive=False,
enable_debugging=False,
hadoop_version=None,
steps=[],
bootstrap_actions=[],
instance_groups=None,
additional_info=None,
ami_version=None,
api_params=None,
visible_to_all_users=None,
job_flow_role=None,
service_role=None):
"""
Runs a job flow
:type name: str
:param name: Name of the job flow
:type log_uri: str
:param log_uri: URI of the S3 bucket to place logs
:type ec2_keyname: str
:param ec2_keyname: EC2 key used for the instances
:type availability_zone: str
:param availability_zone: EC2 availability zone of the cluster
:type master_instance_type: str
:param master_instance_type: EC2 instance type of the master
:type slave_instance_type: str
:param slave_instance_type: EC2 instance type of the slave nodes
:type num_instances: int
:param num_instances: Number of instances in the Hadoop cluster
:type action_on_failure: str
:param action_on_failure: Action to take if a step terminates
:type keep_alive: bool
:param keep_alive: Denotes whether the cluster should stay
alive upon completion
:type enable_debugging: bool
:param enable_debugging: Denotes whether AWS console debugging
should be enabled.
:type hadoop_version: str
:param hadoop_version: Version of Hadoop to use. This no longer
defaults to '0.20' and now uses the AMI default.
:type steps: list(boto.emr.Step)
:param steps: List of steps to add with the job
:type bootstrap_actions: list(boto.emr.BootstrapAction)
:param bootstrap_actions: List of bootstrap actions that run
before Hadoop starts.
:type instance_groups: list(boto.emr.InstanceGroup)
:param instance_groups: Optional list of instance groups to
use when creating this job.
NB: When provided, this argument supersedes num_instances
and master/slave_instance_type.
:type ami_version: str
:param ami_version: Amazon Machine Image (AMI) version to use
for instances. Values accepted by EMR are '1.0', '2.0', and
'latest'; EMR currently defaults to '1.0' if you don't set
'ami_version'.
:type additional_info: JSON str
:param additional_info: A JSON string for selecting additional features
:type api_params: dict
:param api_params: a dictionary of additional parameters to pass
directly to the EMR API (so you don't have to upgrade boto to
use new EMR features). You can also delete an API parameter
by setting it to None.
:type visible_to_all_users: bool
:param visible_to_all_users: Whether the job flow is visible to all IAM
users of the AWS account associated with the job flow. If this
value is set to ``True``, all IAM users of that AWS
account can view and (if they have the proper policy permissions
set) manage the job flow. If it is set to ``False``, only
the IAM user that created the job flow can view and manage
it.
:type job_flow_role: str
:param job_flow_role: An IAM role for the job flow. The EC2
instances of the job flow assume this role. The default role is
``EMRJobflowDefault``. In order to use the default role,
you must have already created it using the CLI.
:type service_role: str
:param service_role: The IAM role that will be assumed by the Amazon
EMR service to access AWS resources on your behalf.
:rtype: str
:return: The jobflow id
"""
params = {}
if action_on_failure:
params['ActionOnFailure'] = action_on_failure
if log_uri:
params['LogUri'] = log_uri
params['Name'] = name
# Common instance args
common_params = self._build_instance_common_args(ec2_keyname,
availability_zone,
keep_alive,
hadoop_version)
params.update(common_params)
# NB: according to the AWS API's error message, we must
# "configure instances either using instance count, master and
# slave instance type or instance groups but not both."
#
# Thus we switch here on the truthiness of instance_groups.
if not instance_groups:
# Instance args (the common case)
instance_params = self._build_instance_count_and_type_args(
master_instance_type,
slave_instance_type,
num_instances)
params.update(instance_params)
else:
# Instance group args (for spot instances or a heterogenous cluster)
list_args = self._build_instance_group_list_args(instance_groups)
instance_params = dict(
('Instances.%s' % k, v) for k, v in six.iteritems(list_args)
)
params.update(instance_params)
# Debugging step from EMR API docs
if enable_debugging:
debugging_step = JarStep(name='Setup Hadoop Debugging',
action_on_failure='TERMINATE_JOB_FLOW',
main_class=None,
jar=self.DebuggingJar,
step_args=self.DebuggingArgs)
steps.insert(0, debugging_step)
# Step args
if steps:
step_args = [self._build_step_args(step) for step in steps]
params.update(self._build_step_list(step_args))
if bootstrap_actions:
bootstrap_action_args = [self._build_bootstrap_action_args(bootstrap_action) for bootstrap_action in bootstrap_actions]
params.update(self._build_bootstrap_action_list(bootstrap_action_args))
if ami_version:
params['AmiVersion'] = ami_version
if additional_info is not None:
params['AdditionalInfo'] = additional_info
if api_params:
for key, value in six.iteritems(api_params):
if value is None:
params.pop(key, None)
else:
params[key] = value
if visible_to_all_users is not None:
if visible_to_all_users:
params['VisibleToAllUsers'] = 'true'
else:
params['VisibleToAllUsers'] = 'false'
if job_flow_role is not None:
params['JobFlowRole'] = job_flow_role
if service_role is not None:
params['ServiceRole'] = service_role
response = self.get_object(
'RunJobFlow', params, RunJobFlowResponse, verb='POST')
return response.jobflowid
def set_termination_protection(self, jobflow_id,
termination_protection_status):
"""
Set termination protection on specified Elastic MapReduce job flows
:type jobflow_ids: list or str
:param jobflow_ids: A list of job flow IDs
:type termination_protection_status: bool
:param termination_protection_status: Termination protection status
"""
assert termination_protection_status in (True, False)
params = {}
params['TerminationProtected'] = (termination_protection_status and "true") or "false"
self.build_list_params(params, [jobflow_id], 'JobFlowIds.member')
return self.get_status('SetTerminationProtection', params, verb='POST')
def set_visible_to_all_users(self, jobflow_id, visibility):
"""
Set whether specified Elastic Map Reduce job flows are visible to all IAM users
:type jobflow_ids: list or str
:param jobflow_ids: A list of job flow IDs
:type visibility: bool
:param visibility: Visibility
"""
assert visibility in (True, False)
params = {}
params['VisibleToAllUsers'] = (visibility and "true") or "false"
self.build_list_params(params, [jobflow_id], 'JobFlowIds.member')
return self.get_status('SetVisibleToAllUsers', params, verb='POST')
def _build_bootstrap_action_args(self, bootstrap_action):
bootstrap_action_params = {}
bootstrap_action_params['ScriptBootstrapAction.Path'] = bootstrap_action.path
try:
bootstrap_action_params['Name'] = bootstrap_action.name
except AttributeError:
pass
args = bootstrap_action.args()
if args:
self.build_list_params(bootstrap_action_params, args, 'ScriptBootstrapAction.Args.member')
return bootstrap_action_params
def _build_step_args(self, step):
step_params = {}
step_params['ActionOnFailure'] = step.action_on_failure
step_params['HadoopJarStep.Jar'] = step.jar()
main_class = step.main_class()
if main_class:
step_params['HadoopJarStep.MainClass'] = main_class
args = step.args()
if args:
self.build_list_params(step_params, args, 'HadoopJarStep.Args.member')
step_params['Name'] = step.name
return step_params
def _build_bootstrap_action_list(self, bootstrap_actions):
if not isinstance(bootstrap_actions, list):
bootstrap_actions = [bootstrap_actions]
params = {}
for i, bootstrap_action in enumerate(bootstrap_actions):
for key, value in six.iteritems(bootstrap_action):
params['BootstrapActions.member.%s.%s' % (i + 1, key)] = value
return params
def _build_step_list(self, steps):
if not isinstance(steps, list):
steps = [steps]
params = {}
for i, step in enumerate(steps):
for key, value in six.iteritems(step):
params['Steps.member.%s.%s' % (i+1, key)] = value
return params
def _build_string_list(self, field, items):
if not isinstance(items, list):
items = [items]
params = {}
for i, item in enumerate(items):
params['%s.member.%s' % (field, i + 1)] = item
return params
def _build_tag_list(self, tags):
assert isinstance(tags, dict)
params = {}
for i, key_value in enumerate(sorted(six.iteritems(tags)), start=1):
key, value = key_value
current_prefix = 'Tags.member.%s' % i
params['%s.Key' % current_prefix] = key
if value:
params['%s.Value' % current_prefix] = value
return params
def _build_instance_common_args(self, ec2_keyname, availability_zone,
keep_alive, hadoop_version):
"""
Takes a number of parameters used when starting a jobflow (as
specified in run_jobflow() above). Returns a comparable dict for
use in making a RunJobFlow request.
"""
params = {
'Instances.KeepJobFlowAliveWhenNoSteps': str(keep_alive).lower(),
}
if hadoop_version:
params['Instances.HadoopVersion'] = hadoop_version
if ec2_keyname:
params['Instances.Ec2KeyName'] = ec2_keyname
if availability_zone:
params['Instances.Placement.AvailabilityZone'] = availability_zone
return params
def _build_instance_count_and_type_args(self, master_instance_type,
slave_instance_type, num_instances):
"""
Takes a master instance type (string), a slave instance type
(string), and a number of instances. Returns a comparable dict
for use in making a RunJobFlow request.
"""
params = {'Instances.MasterInstanceType': master_instance_type,
'Instances.SlaveInstanceType': slave_instance_type,
'Instances.InstanceCount': num_instances}
return params
def _build_instance_group_args(self, instance_group):
"""
Takes an InstanceGroup; returns a dict that, when its keys are
properly prefixed, can be used for describing InstanceGroups in
RunJobFlow or AddInstanceGroups requests.
"""
params = {'InstanceCount': instance_group.num_instances,
'InstanceRole': instance_group.role,
'InstanceType': instance_group.type,
'Name': instance_group.name,
'Market': instance_group.market}
if instance_group.market == 'SPOT':
params['BidPrice'] = instance_group.bidprice
return params
def _build_instance_group_list_args(self, instance_groups):
"""
Takes a list of InstanceGroups, or a single InstanceGroup. Returns
a comparable dict for use in making a RunJobFlow or AddInstanceGroups
request.
"""
if not isinstance(instance_groups, list):
instance_groups = [instance_groups]
params = {}
for i, instance_group in enumerate(instance_groups):
ig_dict = self._build_instance_group_args(instance_group)
for key, value in six.iteritems(ig_dict):
params['InstanceGroups.member.%d.%s' % (i+1, key)] = value
return params
|
DolphinDream/sverchok | refs/heads/master | utils/ascii_print.py | 2 | import os
from sverchok.utils.development import get_version_string
# pylint: disable=c0304
# pylint: disable=c0326
# pylint: disable=w1401
def logo():
l1 = r" ______ _ _ _______ ______ _______ _ _ _____ _ _"
l2 = r"/______ \ / |______ |_____/ | |_____| | | |____/ "
l3 = r"______/ \/ |______ | \_ |_____ | | |_____| | \_"
l4 = r"initialized."
lines = [l1, l2, l3, l4]
can_paint = os.name in {'posix'}
with_color = "\033[1;31m{0}\033[0m" if can_paint else "{0}"
for line in lines:
print(with_color.format(line))
def show_welcome():
logo()
print("\nsv: version:", get_version_string())
|
horance-liu/tensorflow | refs/heads/master | tensorflow/contrib/distributions/python/ops/bijectors/permute.py | 10 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Permute bijector."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# go/tf-wildcard-import
# pylint: disable=wildcard-import
from tensorflow.contrib.distributions.python.ops.bijectors.permute_impl import *
# pylint: enable=wildcard-import
from tensorflow.python.util.all_util import remove_undocumented
_allowed_symbols = ["Permute"]
remove_undocumented(__name__, _allowed_symbols)
|
shhui/nova | refs/heads/master | nova/tests/api/openstack/compute/contrib/test_used_limits.py | 20 | # Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.api.openstack.compute.contrib import used_limits
from nova.api.openstack.compute import limits
from nova.api.openstack import extensions
from nova.api.openstack import wsgi
import nova.context
from nova import exception
from nova import quota
from nova import test
class FakeRequest(object):
def __init__(self, context, reserved=False):
self.environ = {'nova.context': context}
self.reserved = reserved
self.GET = {'reserved': 1} if reserved else {}
class UsedLimitsTestCase(test.NoDBTestCase):
def setUp(self):
"""Run before each test."""
super(UsedLimitsTestCase, self).setUp()
self.ext_mgr = self.mox.CreateMock(extensions.ExtensionManager)
self.controller = used_limits.UsedLimitsController(self.ext_mgr)
self.fake_context = nova.context.RequestContext('fake', 'fake')
self.mox.StubOutWithMock(used_limits, 'authorize_for_admin')
self.authorize_for_admin = used_limits.authorize_for_admin
def _do_test_used_limits(self, reserved):
fake_req = FakeRequest(self.fake_context, reserved=reserved)
obj = {
"limits": {
"rate": [],
"absolute": {},
},
}
res = wsgi.ResponseObject(obj)
quota_map = {
'totalRAMUsed': 'ram',
'totalCoresUsed': 'cores',
'totalInstancesUsed': 'instances',
'totalFloatingIpsUsed': 'floating_ips',
'totalSecurityGroupsUsed': 'security_groups',
}
limits = {}
for display_name, q in quota_map.iteritems():
limits[q] = {'limit': len(display_name),
'in_use': len(display_name) / 2,
'reserved': len(display_name) / 3}
def stub_get_project_quotas(context, project_id, usages=True):
return limits
self.stubs.Set(quota.QUOTAS, "get_project_quotas",
stub_get_project_quotas)
self.ext_mgr.is_loaded('os-used-limits-for-admin').AndReturn(False)
self.mox.ReplayAll()
self.controller.index(fake_req, res)
abs_limits = res.obj['limits']['absolute']
for used_limit, value in abs_limits.iteritems():
r = limits[quota_map[used_limit]]['reserved'] if reserved else 0
self.assertEqual(value,
limits[quota_map[used_limit]]['in_use'] + r)
def test_used_limits_basic(self):
self._do_test_used_limits(False)
def test_used_limits_with_reserved(self):
self._do_test_used_limits(True)
def test_admin_can_fetch_limits_for_a_given_tenant_id(self):
project_id = "123456"
user_id = "A1234"
tenant_id = 'abcd'
self.fake_context.project_id = project_id
self.fake_context.user_id = user_id
obj = {
"limits": {
"rate": [],
"absolute": {},
},
}
target = {
"project_id": tenant_id,
"user_id": user_id
}
fake_req = FakeRequest(self.fake_context)
fake_req.GET = {'tenant_id': tenant_id}
self.ext_mgr.is_loaded('os-used-limits-for-admin').AndReturn(True)
self.authorize_for_admin(self.fake_context, target=target)
self.mox.StubOutWithMock(quota.QUOTAS, 'get_project_quotas')
quota.QUOTAS.get_project_quotas(self.fake_context, '%s' % tenant_id,
usages=True).AndReturn({})
self.mox.ReplayAll()
res = wsgi.ResponseObject(obj)
self.controller.index(fake_req, res)
def test_admin_can_fetch_used_limits_for_own_project(self):
project_id = "123456"
user_id = "A1234"
self.fake_context.project_id = project_id
self.fake_context.user_id = user_id
obj = {
"limits": {
"rate": [],
"absolute": {},
},
}
fake_req = FakeRequest(self.fake_context)
fake_req.GET = {}
self.ext_mgr.is_loaded('os-used-limits-for-admin').AndReturn(True)
self.mox.StubOutWithMock(extensions, 'extension_authorizer')
self.mox.StubOutWithMock(quota.QUOTAS, 'get_project_quotas')
quota.QUOTAS.get_project_quotas(self.fake_context, '%s' % project_id,
usages=True).AndReturn({})
self.mox.ReplayAll()
res = wsgi.ResponseObject(obj)
self.controller.index(fake_req, res)
def test_non_admin_cannot_fetch_used_limits_for_any_other_project(self):
project_id = "123456"
user_id = "A1234"
tenant_id = "abcd"
self.fake_context.project_id = project_id
self.fake_context.user_id = user_id
obj = {
"limits": {
"rate": [],
"absolute": {},
},
}
target = {
"project_id": tenant_id,
"user_id": user_id
}
fake_req = FakeRequest(self.fake_context)
fake_req.GET = {'tenant_id': tenant_id}
self.ext_mgr.is_loaded('os-used-limits-for-admin').AndReturn(True)
self.authorize_for_admin(self.fake_context, target=target). \
AndRaise(exception.PolicyNotAuthorized(
action="compute_extension:used_limits_for_admin"))
self.mox.ReplayAll()
res = wsgi.ResponseObject(obj)
self.assertRaises(exception.PolicyNotAuthorized, self.controller.index,
fake_req, res)
def test_used_limits_fetched_for_context_project_id(self):
project_id = "123456"
self.fake_context.project_id = project_id
obj = {
"limits": {
"rate": [],
"absolute": {},
},
}
fake_req = FakeRequest(self.fake_context)
self.ext_mgr.is_loaded('os-used-limits-for-admin').AndReturn(False)
self.mox.StubOutWithMock(quota.QUOTAS, 'get_project_quotas')
quota.QUOTAS.get_project_quotas(self.fake_context, project_id,
usages=True).AndReturn({})
self.mox.ReplayAll()
res = wsgi.ResponseObject(obj)
self.controller.index(fake_req, res)
def test_used_ram_added(self):
fake_req = FakeRequest(self.fake_context)
obj = {
"limits": {
"rate": [],
"absolute": {
"maxTotalRAMSize": 512,
},
},
}
res = wsgi.ResponseObject(obj)
def stub_get_project_quotas(context, project_id, usages=True):
return {'ram': {'limit': 512, 'in_use': 256}}
self.ext_mgr.is_loaded('os-used-limits-for-admin').AndReturn(False)
self.stubs.Set(quota.QUOTAS, "get_project_quotas",
stub_get_project_quotas)
self.mox.ReplayAll()
self.controller.index(fake_req, res)
abs_limits = res.obj['limits']['absolute']
self.assertIn('totalRAMUsed', abs_limits)
self.assertEqual(abs_limits['totalRAMUsed'], 256)
def test_no_ram_quota(self):
fake_req = FakeRequest(self.fake_context)
obj = {
"limits": {
"rate": [],
"absolute": {},
},
}
res = wsgi.ResponseObject(obj)
def stub_get_project_quotas(context, project_id, usages=True):
return {}
self.ext_mgr.is_loaded('os-used-limits-for-admin').AndReturn(False)
self.stubs.Set(quota.QUOTAS, "get_project_quotas",
stub_get_project_quotas)
self.mox.ReplayAll()
self.controller.index(fake_req, res)
abs_limits = res.obj['limits']['absolute']
self.assertNotIn('totalRAMUsed', abs_limits)
def test_used_limits_xmlns(self):
fake_req = FakeRequest(self.fake_context)
obj = {
"limits": {
"rate": [],
"absolute": {},
},
}
res = wsgi.ResponseObject(obj, xml=limits.LimitsTemplate)
res.preserialize('xml')
def stub_get_project_quotas(context, project_id, usages=True):
return {}
self.ext_mgr.is_loaded('os-used-limits-for-admin').AndReturn(False)
self.stubs.Set(quota.QUOTAS, "get_project_quotas",
stub_get_project_quotas)
self.mox.ReplayAll()
self.controller.index(fake_req, res)
response = res.serialize(None, 'xml')
self.assertIn(used_limits.XMLNS, response.body)
|
spvkgn/youtube-dl | refs/heads/master | youtube_dl/extractor/pluralsight.py | 15 | from __future__ import unicode_literals
import collections
import json
import os
import random
import re
from .common import InfoExtractor
from ..compat import (
compat_str,
compat_urlparse,
)
from ..utils import (
dict_get,
ExtractorError,
float_or_none,
int_or_none,
parse_duration,
qualities,
srt_subtitles_timecode,
try_get,
update_url_query,
urlencode_postdata,
)
class PluralsightBaseIE(InfoExtractor):
_API_BASE = 'https://app.pluralsight.com'
_GRAPHQL_EP = '%s/player/api/graphql' % _API_BASE
_GRAPHQL_HEADERS = {
'Content-Type': 'application/json;charset=UTF-8',
}
_GRAPHQL_COURSE_TMPL = '''
query BootstrapPlayer {
rpc {
bootstrapPlayer {
profile {
firstName
lastName
email
username
userHandle
authed
isAuthed
plan
}
course(courseId: "%s") {
name
title
courseHasCaptions
translationLanguages {
code
name
}
supportsWideScreenVideoFormats
timestamp
modules {
name
title
duration
formattedDuration
author
authorized
clips {
authorized
clipId
duration
formattedDuration
id
index
moduleIndex
moduleTitle
name
title
watched
}
}
}
}
}
}'''
def _download_course(self, course_id, url, display_id):
try:
return self._download_course_rpc(course_id, url, display_id)
except ExtractorError:
# Old API fallback
return self._download_json(
'https://app.pluralsight.com/player/user/api/v1/player/payload',
display_id, data=urlencode_postdata({'courseId': course_id}),
headers={'Referer': url})
def _download_course_rpc(self, course_id, url, display_id):
response = self._download_json(
self._GRAPHQL_EP, display_id, data=json.dumps({
'query': self._GRAPHQL_COURSE_TMPL % course_id,
'variables': {}
}).encode('utf-8'), headers=self._GRAPHQL_HEADERS)
course = try_get(
response, lambda x: x['data']['rpc']['bootstrapPlayer']['course'],
dict)
if course:
return course
raise ExtractorError(
'%s said: %s' % (self.IE_NAME, response['error']['message']),
expected=True)
class PluralsightIE(PluralsightBaseIE):
IE_NAME = 'pluralsight'
_VALID_URL = r'https?://(?:(?:www|app)\.)?pluralsight\.com/(?:training/)?player\?'
_LOGIN_URL = 'https://app.pluralsight.com/id/'
_NETRC_MACHINE = 'pluralsight'
_TESTS = [{
'url': 'http://www.pluralsight.com/training/player?author=mike-mckeown&name=hosting-sql-server-windows-azure-iaas-m7-mgmt&mode=live&clip=3&course=hosting-sql-server-windows-azure-iaas',
'md5': '4d458cf5cf4c593788672419a8dd4cf8',
'info_dict': {
'id': 'hosting-sql-server-windows-azure-iaas-m7-mgmt-04',
'ext': 'mp4',
'title': 'Demo Monitoring',
'duration': 338,
},
'skip': 'Requires pluralsight account credentials',
}, {
'url': 'https://app.pluralsight.com/training/player?course=angularjs-get-started&author=scott-allen&name=angularjs-get-started-m1-introduction&clip=0&mode=live',
'only_matching': True,
}, {
# available without pluralsight account
'url': 'http://app.pluralsight.com/training/player?author=scott-allen&name=angularjs-get-started-m1-introduction&mode=live&clip=0&course=angularjs-get-started',
'only_matching': True,
}, {
'url': 'https://app.pluralsight.com/player?course=ccna-intro-networking&author=ross-bagurdes&name=ccna-intro-networking-m06&clip=0',
'only_matching': True,
}]
GRAPHQL_VIEWCLIP_TMPL = '''
query viewClip {
viewClip(input: {
author: "%(author)s",
clipIndex: %(clipIndex)d,
courseName: "%(courseName)s",
includeCaptions: %(includeCaptions)s,
locale: "%(locale)s",
mediaType: "%(mediaType)s",
moduleName: "%(moduleName)s",
quality: "%(quality)s"
}) {
urls {
url
cdn
rank
source
},
status
}
}'''
def _real_initialize(self):
self._login()
def _login(self):
username, password = self._get_login_info()
if username is None:
return
login_page = self._download_webpage(
self._LOGIN_URL, None, 'Downloading login page')
login_form = self._hidden_inputs(login_page)
login_form.update({
'Username': username,
'Password': password,
})
post_url = self._search_regex(
r'<form[^>]+action=(["\'])(?P<url>.+?)\1', login_page,
'post url', default=self._LOGIN_URL, group='url')
if not post_url.startswith('http'):
post_url = compat_urlparse.urljoin(self._LOGIN_URL, post_url)
response = self._download_webpage(
post_url, None, 'Logging in',
data=urlencode_postdata(login_form),
headers={'Content-Type': 'application/x-www-form-urlencoded'})
error = self._search_regex(
r'<span[^>]+class="field-validation-error"[^>]*>([^<]+)</span>',
response, 'error message', default=None)
if error:
raise ExtractorError('Unable to login: %s' % error, expected=True)
if all(not re.search(p, response) for p in (
r'__INITIAL_STATE__', r'["\']currentUser["\']',
# new layout?
r'>\s*Sign out\s*<')):
BLOCKED = 'Your account has been blocked due to suspicious activity'
if BLOCKED in response:
raise ExtractorError(
'Unable to login: %s' % BLOCKED, expected=True)
MUST_AGREE = 'To continue using Pluralsight, you must agree to'
if any(p in response for p in (MUST_AGREE, '>Disagree<', '>Agree<')):
raise ExtractorError(
'Unable to login: %s some documents. Go to pluralsight.com, '
'log in and agree with what Pluralsight requires.'
% MUST_AGREE, expected=True)
raise ExtractorError('Unable to log in')
def _get_subtitles(self, author, clip_idx, clip_id, lang, name, duration, video_id):
captions = None
if clip_id:
captions = self._download_json(
'%s/transcript/api/v1/caption/json/%s/%s'
% (self._API_BASE, clip_id, lang), video_id,
'Downloading captions JSON', 'Unable to download captions JSON',
fatal=False)
if not captions:
captions_post = {
'a': author,
'cn': int(clip_idx),
'lc': lang,
'm': name,
}
captions = self._download_json(
'%s/player/retrieve-captions' % self._API_BASE, video_id,
'Downloading captions JSON', 'Unable to download captions JSON',
fatal=False, data=json.dumps(captions_post).encode('utf-8'),
headers={'Content-Type': 'application/json;charset=utf-8'})
if captions:
return {
lang: [{
'ext': 'json',
'data': json.dumps(captions),
}, {
'ext': 'srt',
'data': self._convert_subtitles(duration, captions),
}]
}
@staticmethod
def _convert_subtitles(duration, subs):
srt = ''
TIME_OFFSET_KEYS = ('displayTimeOffset', 'DisplayTimeOffset')
TEXT_KEYS = ('text', 'Text')
for num, current in enumerate(subs):
current = subs[num]
start, text = (
float_or_none(dict_get(current, TIME_OFFSET_KEYS, skip_false_values=False)),
dict_get(current, TEXT_KEYS))
if start is None or text is None:
continue
end = duration if num == len(subs) - 1 else float_or_none(
dict_get(subs[num + 1], TIME_OFFSET_KEYS, skip_false_values=False))
if end is None:
continue
srt += os.linesep.join(
(
'%d' % num,
'%s --> %s' % (
srt_subtitles_timecode(start),
srt_subtitles_timecode(end)),
text,
os.linesep,
))
return srt
def _real_extract(self, url):
qs = compat_urlparse.parse_qs(compat_urlparse.urlparse(url).query)
author = qs.get('author', [None])[0]
name = qs.get('name', [None])[0]
clip_idx = qs.get('clip', [None])[0]
course_name = qs.get('course', [None])[0]
if any(not f for f in (author, name, clip_idx, course_name,)):
raise ExtractorError('Invalid URL', expected=True)
display_id = '%s-%s' % (name, clip_idx)
course = self._download_course(course_name, url, display_id)
collection = course['modules']
clip = None
for module_ in collection:
if name in (module_.get('moduleName'), module_.get('name')):
for clip_ in module_.get('clips', []):
clip_index = clip_.get('clipIndex')
if clip_index is None:
clip_index = clip_.get('index')
if clip_index is None:
continue
if compat_str(clip_index) == clip_idx:
clip = clip_
break
if not clip:
raise ExtractorError('Unable to resolve clip')
title = clip['title']
clip_id = clip.get('clipName') or clip.get('name') or clip['clipId']
QUALITIES = {
'low': {'width': 640, 'height': 480},
'medium': {'width': 848, 'height': 640},
'high': {'width': 1024, 'height': 768},
'high-widescreen': {'width': 1280, 'height': 720},
}
QUALITIES_PREFERENCE = ('low', 'medium', 'high', 'high-widescreen',)
quality_key = qualities(QUALITIES_PREFERENCE)
AllowedQuality = collections.namedtuple('AllowedQuality', ['ext', 'qualities'])
ALLOWED_QUALITIES = (
AllowedQuality('webm', ['high', ]),
AllowedQuality('mp4', ['low', 'medium', 'high', ]),
)
# Some courses also offer widescreen resolution for high quality (see
# https://github.com/ytdl-org/youtube-dl/issues/7766)
widescreen = course.get('supportsWideScreenVideoFormats') is True
best_quality = 'high-widescreen' if widescreen else 'high'
if widescreen:
for allowed_quality in ALLOWED_QUALITIES:
allowed_quality.qualities.append(best_quality)
# In order to minimize the number of calls to ViewClip API and reduce
# the probability of being throttled or banned by Pluralsight we will request
# only single format until formats listing was explicitly requested.
if self._downloader.params.get('listformats', False):
allowed_qualities = ALLOWED_QUALITIES
else:
def guess_allowed_qualities():
req_format = self._downloader.params.get('format') or 'best'
req_format_split = req_format.split('-', 1)
if len(req_format_split) > 1:
req_ext, req_quality = req_format_split
req_quality = '-'.join(req_quality.split('-')[:2])
for allowed_quality in ALLOWED_QUALITIES:
if req_ext == allowed_quality.ext and req_quality in allowed_quality.qualities:
return (AllowedQuality(req_ext, (req_quality, )), )
req_ext = 'webm' if self._downloader.params.get('prefer_free_formats') else 'mp4'
return (AllowedQuality(req_ext, (best_quality, )), )
allowed_qualities = guess_allowed_qualities()
formats = []
for ext, qualities_ in allowed_qualities:
for quality in qualities_:
f = QUALITIES[quality].copy()
clip_post = {
'author': author,
'includeCaptions': 'false',
'clipIndex': int(clip_idx),
'courseName': course_name,
'locale': 'en',
'moduleName': name,
'mediaType': ext,
'quality': '%dx%d' % (f['width'], f['height']),
}
format_id = '%s-%s' % (ext, quality)
try:
viewclip = self._download_json(
self._GRAPHQL_EP, display_id,
'Downloading %s viewclip graphql' % format_id,
data=json.dumps({
'query': self.GRAPHQL_VIEWCLIP_TMPL % clip_post,
'variables': {}
}).encode('utf-8'),
headers=self._GRAPHQL_HEADERS)['data']['viewClip']
except ExtractorError:
# Still works but most likely will go soon
viewclip = self._download_json(
'%s/video/clips/viewclip' % self._API_BASE, display_id,
'Downloading %s viewclip JSON' % format_id, fatal=False,
data=json.dumps(clip_post).encode('utf-8'),
headers={'Content-Type': 'application/json;charset=utf-8'})
# Pluralsight tracks multiple sequential calls to ViewClip API and start
# to return 429 HTTP errors after some time (see
# https://github.com/ytdl-org/youtube-dl/pull/6989). Moreover it may even lead
# to account ban (see https://github.com/ytdl-org/youtube-dl/issues/6842).
# To somewhat reduce the probability of these consequences
# we will sleep random amount of time before each call to ViewClip.
self._sleep(
random.randint(2, 5), display_id,
'%(video_id)s: Waiting for %(timeout)s seconds to avoid throttling')
if not viewclip:
continue
clip_urls = viewclip.get('urls')
if not isinstance(clip_urls, list):
continue
for clip_url_data in clip_urls:
clip_url = clip_url_data.get('url')
if not clip_url:
continue
cdn = clip_url_data.get('cdn')
clip_f = f.copy()
clip_f.update({
'url': clip_url,
'ext': ext,
'format_id': '%s-%s' % (format_id, cdn) if cdn else format_id,
'quality': quality_key(quality),
'source_preference': int_or_none(clip_url_data.get('rank')),
})
formats.append(clip_f)
self._sort_formats(formats)
duration = int_or_none(
clip.get('duration')) or parse_duration(clip.get('formattedDuration'))
# TODO: other languages?
subtitles = self.extract_subtitles(
author, clip_idx, clip.get('clipId'), 'en', name, duration, display_id)
return {
'id': clip_id,
'title': title,
'duration': duration,
'creator': author,
'formats': formats,
'subtitles': subtitles,
}
class PluralsightCourseIE(PluralsightBaseIE):
IE_NAME = 'pluralsight:course'
_VALID_URL = r'https?://(?:(?:www|app)\.)?pluralsight\.com/(?:library/)?courses/(?P<id>[^/]+)'
_TESTS = [{
# Free course from Pluralsight Starter Subscription for Microsoft TechNet
# https://offers.pluralsight.com/technet?loc=zTS3z&prod=zOTprodz&tech=zOttechz&prog=zOTprogz&type=zSOz&media=zOTmediaz&country=zUSz
'url': 'http://www.pluralsight.com/courses/hosting-sql-server-windows-azure-iaas',
'info_dict': {
'id': 'hosting-sql-server-windows-azure-iaas',
'title': 'Hosting SQL Server in Microsoft Azure IaaS Fundamentals',
'description': 'md5:61b37e60f21c4b2f91dc621a977d0986',
},
'playlist_count': 31,
}, {
# available without pluralsight account
'url': 'https://www.pluralsight.com/courses/angularjs-get-started',
'only_matching': True,
}, {
'url': 'https://app.pluralsight.com/library/courses/understanding-microsoft-azure-amazon-aws/table-of-contents',
'only_matching': True,
}]
def _real_extract(self, url):
course_id = self._match_id(url)
# TODO: PSM cookie
course = self._download_course(course_id, url, course_id)
title = course['title']
course_name = course['name']
course_data = course['modules']
description = course.get('description') or course.get('shortDescription')
entries = []
for num, module in enumerate(course_data, 1):
author = module.get('author')
module_name = module.get('name')
if not author or not module_name:
continue
for clip in module.get('clips', []):
clip_index = int_or_none(clip.get('index'))
if clip_index is None:
continue
clip_url = update_url_query(
'%s/player' % self._API_BASE, query={
'mode': 'live',
'course': course_name,
'author': author,
'name': module_name,
'clip': clip_index,
})
entries.append({
'_type': 'url_transparent',
'url': clip_url,
'ie_key': PluralsightIE.ie_key(),
'chapter': module.get('title'),
'chapter_number': num,
'chapter_id': module.get('moduleRef'),
})
return self.playlist_result(entries, course_id, title, description)
|
SEL-Columbia/commcare-hq | refs/heads/master | custom/care_pathways/reports/table_card_report.py | 1 | from corehq.apps.reports.graph_models import MultiBarChart, Axis
from custom.care_pathways.reports import CareBaseReport
from custom.care_pathways.filters import GeographyFilter, GenderFilter, GroupLeadershipFilter, CBTNameFilter, PPTYearFilter, ScheduleFilter, TableCardGroupByFilter, TableCardTypeFilter
from dimagi.utils.decorators.memoized import memoized
from custom.care_pathways.sqldata import TableCardReportIndividualPercentSqlData, TableCardReportGrouppedPercentSqlData, TableCardSqlData
class TableCardReport(CareBaseReport):
name = 'Table Report Card'
slug = 'table_card_report'
report_title = 'Table Report Card'
report_template_path = "care_pathways/multi_report.html"
@property
@memoized
def data_providers(self):
config = self.report_config
config.update(dict(
group='practice',
table_card_group_by= self.request.GET.get('group_by', ''),
))
return [
TableCardSqlData(self.domain, config, self.request_params),
TableCardReportGrouppedPercentSqlData(self.domain, config, self.request_params),
TableCardReportIndividualPercentSqlData(self.domain, config, self.request_params)
]
@property
def report_context(self):
rows = []
if not self.needs_filters:
rows = self.data_providers[0].data
context = {
'reports': [self.get_report_context(dp, rows) for dp in self.data_providers[1:]],
'title': self.report_title
}
return context
def get_report_context(self, data_provider, rows):
total_row = []
headers = []
charts = []
if rows:
headers = data_provider.headers(rows)
rows = list(data_provider.format_rows(rows))
if data_provider.show_total:
total_row = data_provider.calculate_total_row(headers, rows)
if data_provider.show_charts:
charts = list(self.get_chart(
rows,
headers,
x_label=data_provider.chart_x_label,
y_label=data_provider.chart_y_label
))
context = dict(
report_table=dict(
title=data_provider.title,
slug=data_provider.slug,
headers=headers,
rows=rows,
total_row=total_row,
default_rows=self.default_rows,
datatables=data_provider.datatables,
start_at_row=0,
fix_column=data_provider.fix_left_col
),
charts=charts,
chart_span=12
)
return context
@property
def fields(self):
filters = [GeographyFilter,
TableCardGroupByFilter,
PPTYearFilter,
TableCardTypeFilter,
GenderFilter,
GroupLeadershipFilter,
CBTNameFilter,
]
if self.domain == 'pathways-india-mis':
filters.append(ScheduleFilter)
return filters
def get_chart(self, rows, columns, x_label, y_label):
chart = MultiBarChart('% of Groups Receiving Grades', x_axis=Axis(x_label), y_axis=Axis(y_label))
chart.forceY = [0, 100]
chart.height = 700
chart.rotateLabels = -90
chart.marginBottom = 390
chart.marginLeft = 100
self._chart_data(chart, columns, rows)
return [chart]
def _chart_data(self, chart, columns, rows):
def p2f(column):
return float(column.strip('%'))
if rows:
charts = [[], [], [], []]
flat_columns = []
for group_column in columns.header[1:]:
for c in group_column.columns:
flat_columns.append(c.html)
for idx, row in enumerate(rows):
for ix, column in enumerate(row[1:]):
group_name = flat_columns[ix]
charts[idx].append({'x': group_name, 'y': p2f(column)})
chart.add_dataset('A', charts[0], "green")
chart.add_dataset('B', charts[1], "orange")
chart.add_dataset('C', charts[2], "yellow")
chart.add_dataset('D', charts[3], "red")
|
dushu1203/chromium.src | refs/heads/nw12 | chrome/android/host_driven_tests/DummyTest.py | 113 | # Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Host-driven java_tests which exercise dummy functionality.
This test class is only here to ensure that the test framework for host driven
tests work.
"""
from pylib.host_driven import test_case
from pylib.host_driven import tests_annotations
class DummyTest(test_case.HostDrivenTestCase):
"""Dummy host-driven test for testing the framework itself."""
@tests_annotations.Smoke
def testPass(self):
return self._RunJavaTestFilters(['DummyTest.testPass'])
|
knifenomad/django | refs/heads/master | tests/shortcuts/tests.py | 132 | from django.test import SimpleTestCase, override_settings
from django.test.utils import require_jinja2
@override_settings(
ROOT_URLCONF='shortcuts.urls',
)
class ShortcutTests(SimpleTestCase):
def test_render_to_response(self):
response = self.client.get('/render_to_response/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'FOO.BAR..\n')
self.assertEqual(response['Content-Type'], 'text/html; charset=utf-8')
def test_render_to_response_with_multiple_templates(self):
response = self.client.get('/render_to_response/multiple_templates/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'FOO.BAR..\n')
def test_render_to_response_with_content_type(self):
response = self.client.get('/render_to_response/content_type/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'FOO.BAR..\n')
self.assertEqual(response['Content-Type'], 'application/x-rendertest')
def test_render_to_response_with_status(self):
response = self.client.get('/render_to_response/status/')
self.assertEqual(response.status_code, 403)
self.assertEqual(response.content, b'FOO.BAR..\n')
@require_jinja2
def test_render_to_response_with_using(self):
response = self.client.get('/render_to_response/using/')
self.assertEqual(response.content, b'DTL\n')
response = self.client.get('/render_to_response/using/?using=django')
self.assertEqual(response.content, b'DTL\n')
response = self.client.get('/render_to_response/using/?using=jinja2')
self.assertEqual(response.content, b'Jinja2\n')
def test_render(self):
response = self.client.get('/render/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'FOO.BAR../render/\n')
self.assertEqual(response['Content-Type'], 'text/html; charset=utf-8')
self.assertFalse(hasattr(response.context.request, 'current_app'))
def test_render_with_multiple_templates(self):
response = self.client.get('/render/multiple_templates/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'FOO.BAR../render/multiple_templates/\n')
def test_render_with_content_type(self):
response = self.client.get('/render/content_type/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'FOO.BAR../render/content_type/\n')
self.assertEqual(response['Content-Type'], 'application/x-rendertest')
def test_render_with_status(self):
response = self.client.get('/render/status/')
self.assertEqual(response.status_code, 403)
self.assertEqual(response.content, b'FOO.BAR../render/status/\n')
@require_jinja2
def test_render_with_using(self):
response = self.client.get('/render/using/')
self.assertEqual(response.content, b'DTL\n')
response = self.client.get('/render/using/?using=django')
self.assertEqual(response.content, b'DTL\n')
response = self.client.get('/render/using/?using=jinja2')
self.assertEqual(response.content, b'Jinja2\n')
|
higgintop/hca_code_project | refs/heads/master | node_modules/grunt-sass/node_modules/node-sass/node_modules/pangyp/gyp/tools/graphviz.py | 2679 | #!/usr/bin/env python
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Using the JSON dumped by the dump-dependency-json generator,
generate input suitable for graphviz to render a dependency graph of
targets."""
import collections
import json
import sys
def ParseTarget(target):
target, _, suffix = target.partition('#')
filename, _, target = target.partition(':')
return filename, target, suffix
def LoadEdges(filename, targets):
"""Load the edges map from the dump file, and filter it to only
show targets in |targets| and their depedendents."""
file = open('dump.json')
edges = json.load(file)
file.close()
# Copy out only the edges we're interested in from the full edge list.
target_edges = {}
to_visit = targets[:]
while to_visit:
src = to_visit.pop()
if src in target_edges:
continue
target_edges[src] = edges[src]
to_visit.extend(edges[src])
return target_edges
def WriteGraph(edges):
"""Print a graphviz graph to stdout.
|edges| is a map of target to a list of other targets it depends on."""
# Bucket targets by file.
files = collections.defaultdict(list)
for src, dst in edges.items():
build_file, target_name, toolset = ParseTarget(src)
files[build_file].append(src)
print 'digraph D {'
print ' fontsize=8' # Used by subgraphs.
print ' node [fontsize=8]'
# Output nodes by file. We must first write out each node within
# its file grouping before writing out any edges that may refer
# to those nodes.
for filename, targets in files.items():
if len(targets) == 1:
# If there's only one node for this file, simplify
# the display by making it a box without an internal node.
target = targets[0]
build_file, target_name, toolset = ParseTarget(target)
print ' "%s" [shape=box, label="%s\\n%s"]' % (target, filename,
target_name)
else:
# Group multiple nodes together in a subgraph.
print ' subgraph "cluster_%s" {' % filename
print ' label = "%s"' % filename
for target in targets:
build_file, target_name, toolset = ParseTarget(target)
print ' "%s" [label="%s"]' % (target, target_name)
print ' }'
# Now that we've placed all the nodes within subgraphs, output all
# the edges between nodes.
for src, dsts in edges.items():
for dst in dsts:
print ' "%s" -> "%s"' % (src, dst)
print '}'
def main():
if len(sys.argv) < 2:
print >>sys.stderr, __doc__
print >>sys.stderr
print >>sys.stderr, 'usage: %s target1 target2...' % (sys.argv[0])
return 1
edges = LoadEdges('dump.json', sys.argv[1:])
WriteGraph(edges)
return 0
if __name__ == '__main__':
sys.exit(main())
|
turbokongen/home-assistant | refs/heads/dev | tests/components/withings/common.py | 3 | """Common data for for the withings component tests."""
from dataclasses import dataclass
from typing import List, Optional, Tuple, Union
from unittest.mock import MagicMock
from urllib.parse import urlparse
from aiohttp.test_utils import TestClient
import arrow
import pytz
from withings_api.common import (
MeasureGetMeasResponse,
NotifyAppli,
NotifyListResponse,
SleepGetSummaryResponse,
UserGetDeviceResponse,
)
from homeassistant import data_entry_flow
import homeassistant.components.api as api
from homeassistant.components.homeassistant import DOMAIN as HA_DOMAIN
import homeassistant.components.webhook as webhook
from homeassistant.components.withings import async_unload_entry
from homeassistant.components.withings.common import (
ConfigEntryWithingsApi,
DataManager,
get_all_data_managers,
)
import homeassistant.components.withings.const as const
from homeassistant.config import async_process_ha_core_config
from homeassistant.config_entries import SOURCE_USER, ConfigEntry
from homeassistant.const import (
CONF_CLIENT_ID,
CONF_CLIENT_SECRET,
CONF_EXTERNAL_URL,
CONF_UNIT_SYSTEM,
CONF_UNIT_SYSTEM_METRIC,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers import config_entry_oauth2_flow
from homeassistant.helpers.config_entry_oauth2_flow import AUTH_CALLBACK_PATH
from homeassistant.setup import async_setup_component
from tests.test_util.aiohttp import AiohttpClientMocker
@dataclass
class ProfileConfig:
"""Data representing a user profile."""
profile: str
user_id: int
api_response_user_get_device: Union[UserGetDeviceResponse, Exception]
api_response_measure_get_meas: Union[MeasureGetMeasResponse, Exception]
api_response_sleep_get_summary: Union[SleepGetSummaryResponse, Exception]
api_response_notify_list: Union[NotifyListResponse, Exception]
api_response_notify_revoke: Optional[Exception]
def new_profile_config(
profile: str,
user_id: int,
api_response_user_get_device: Optional[
Union[UserGetDeviceResponse, Exception]
] = None,
api_response_measure_get_meas: Optional[
Union[MeasureGetMeasResponse, Exception]
] = None,
api_response_sleep_get_summary: Optional[
Union[SleepGetSummaryResponse, Exception]
] = None,
api_response_notify_list: Optional[Union[NotifyListResponse, Exception]] = None,
api_response_notify_revoke: Optional[Exception] = None,
) -> ProfileConfig:
"""Create a new profile config immutable object."""
return ProfileConfig(
profile=profile,
user_id=user_id,
api_response_user_get_device=api_response_user_get_device
or UserGetDeviceResponse(devices=[]),
api_response_measure_get_meas=api_response_measure_get_meas
or MeasureGetMeasResponse(
measuregrps=[],
more=False,
offset=0,
timezone=pytz.UTC,
updatetime=arrow.get(12345),
),
api_response_sleep_get_summary=api_response_sleep_get_summary
or SleepGetSummaryResponse(more=False, offset=0, series=[]),
api_response_notify_list=api_response_notify_list
or NotifyListResponse(profiles=[]),
api_response_notify_revoke=api_response_notify_revoke,
)
@dataclass
class WebhookResponse:
"""Response data from a webhook."""
message: str
message_code: int
class ComponentFactory:
"""Manages the setup and unloading of the withing component and profiles."""
def __init__(
self,
hass: HomeAssistant,
api_class_mock: MagicMock,
aiohttp_client,
aioclient_mock: AiohttpClientMocker,
) -> None:
"""Initialize the object."""
self._hass = hass
self._api_class_mock = api_class_mock
self._aiohttp_client = aiohttp_client
self._aioclient_mock = aioclient_mock
self._client_id = None
self._client_secret = None
self._profile_configs: Tuple[ProfileConfig, ...] = ()
async def configure_component(
self,
client_id: str = "my_client_id",
client_secret: str = "my_client_secret",
profile_configs: Tuple[ProfileConfig, ...] = (),
) -> None:
"""Configure the wihings component."""
self._client_id = client_id
self._client_secret = client_secret
self._profile_configs = profile_configs
hass_config = {
"homeassistant": {
CONF_UNIT_SYSTEM: CONF_UNIT_SYSTEM_METRIC,
CONF_EXTERNAL_URL: "http://127.0.0.1:8080/",
},
api.DOMAIN: {},
const.DOMAIN: {
CONF_CLIENT_ID: self._client_id,
CONF_CLIENT_SECRET: self._client_secret,
const.CONF_USE_WEBHOOK: True,
},
}
await async_process_ha_core_config(self._hass, hass_config.get("homeassistant"))
assert await async_setup_component(self._hass, HA_DOMAIN, {})
assert await async_setup_component(self._hass, webhook.DOMAIN, hass_config)
assert await async_setup_component(self._hass, const.DOMAIN, hass_config)
await self._hass.async_block_till_done()
@staticmethod
def _setup_api_method(api_method, value) -> None:
if isinstance(value, Exception):
api_method.side_effect = value
else:
api_method.return_value = value
async def setup_profile(self, user_id: int) -> ConfigEntryWithingsApi:
"""Set up a user profile through config flows."""
profile_config = next(
iter(
[
profile_config
for profile_config in self._profile_configs
if profile_config.user_id == user_id
]
)
)
api_mock: ConfigEntryWithingsApi = MagicMock(spec=ConfigEntryWithingsApi)
ComponentFactory._setup_api_method(
api_mock.user_get_device, profile_config.api_response_user_get_device
)
ComponentFactory._setup_api_method(
api_mock.sleep_get_summary, profile_config.api_response_sleep_get_summary
)
ComponentFactory._setup_api_method(
api_mock.measure_get_meas, profile_config.api_response_measure_get_meas
)
ComponentFactory._setup_api_method(
api_mock.notify_list, profile_config.api_response_notify_list
)
ComponentFactory._setup_api_method(
api_mock.notify_revoke, profile_config.api_response_notify_revoke
)
self._api_class_mock.reset_mocks()
self._api_class_mock.return_value = api_mock
# Get the withings config flow.
result = await self._hass.config_entries.flow.async_init(
const.DOMAIN, context={"source": SOURCE_USER}
)
assert result
# pylint: disable=protected-access
state = config_entry_oauth2_flow._encode_jwt(
self._hass,
{
"flow_id": result["flow_id"],
"redirect_uri": "http://127.0.0.1:8080/auth/external/callback",
},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_EXTERNAL_STEP
assert result["url"] == (
"https://account.withings.com/oauth2_user/authorize2?"
f"response_type=code&client_id={self._client_id}&"
"redirect_uri=http://127.0.0.1:8080/auth/external/callback&"
f"state={state}"
"&scope=user.info,user.metrics,user.activity,user.sleepevents"
)
# Simulate user being redirected from withings site.
client: TestClient = await self._aiohttp_client(self._hass.http.app)
resp = await client.get(f"{AUTH_CALLBACK_PATH}?code=abcd&state={state}")
assert resp.status == 200
assert resp.headers["content-type"] == "text/html; charset=utf-8"
self._aioclient_mock.clear_requests()
self._aioclient_mock.post(
"https://account.withings.com/oauth2/token",
json={
"refresh_token": "mock-refresh-token",
"access_token": "mock-access-token",
"type": "Bearer",
"expires_in": 60,
"userid": profile_config.user_id,
},
)
# Present user with a list of profiles to choose from.
result = await self._hass.config_entries.flow.async_configure(result["flow_id"])
assert result.get("type") == "form"
assert result.get("step_id") == "profile"
assert "profile" in result.get("data_schema").schema
# Provide the user profile.
result = await self._hass.config_entries.flow.async_configure(
result["flow_id"], {const.PROFILE: profile_config.profile}
)
# Finish the config flow by calling it again.
assert result.get("type") == "create_entry"
assert result.get("result")
config_data = result.get("result").data
assert config_data.get(const.PROFILE) == profile_config.profile
assert config_data.get("auth_implementation") == const.DOMAIN
assert config_data.get("token")
# Wait for remaining tasks to complete.
await self._hass.async_block_till_done()
# Mock the webhook.
data_manager = get_data_manager_by_user_id(self._hass, user_id)
self._aioclient_mock.clear_requests()
self._aioclient_mock.request(
"HEAD",
data_manager.webhook_config.url,
)
return self._api_class_mock.return_value
async def call_webhook(self, user_id: int, appli: NotifyAppli) -> WebhookResponse:
"""Call the webhook to notify of data changes."""
client: TestClient = await self._aiohttp_client(self._hass.http.app)
data_manager = get_data_manager_by_user_id(self._hass, user_id)
resp = await client.post(
urlparse(data_manager.webhook_config.url).path,
data={"userid": user_id, "appli": appli.value},
)
# Wait for remaining tasks to complete.
await self._hass.async_block_till_done()
data = await resp.json()
resp.close()
return WebhookResponse(message=data["message"], message_code=data["code"])
async def unload(self, profile: ProfileConfig) -> None:
"""Unload the component for a specific user."""
config_entries = get_config_entries_for_user_id(self._hass, profile.user_id)
for config_entry in config_entries:
await async_unload_entry(self._hass, config_entry)
await self._hass.async_block_till_done()
assert not get_data_manager_by_user_id(self._hass, profile.user_id)
def get_config_entries_for_user_id(
hass: HomeAssistant, user_id: int
) -> Tuple[ConfigEntry]:
"""Get a list of config entries that apply to a specific withings user."""
return tuple(
[
config_entry
for config_entry in hass.config_entries.async_entries(const.DOMAIN)
if config_entry.data.get("token", {}).get("userid") == user_id
]
)
def async_get_flow_for_user_id(hass: HomeAssistant, user_id: int) -> List[dict]:
"""Get a flow for a user id."""
return [
flow
for flow in hass.config_entries.flow.async_progress()
if flow["handler"] == const.DOMAIN and flow["context"].get("userid") == user_id
]
def get_data_manager_by_user_id(
hass: HomeAssistant, user_id: int
) -> Optional[DataManager]:
"""Get a data manager by the user id."""
return next(
iter(
[
data_manager
for data_manager in get_all_data_managers(hass)
if data_manager.user_id == user_id
]
),
None,
)
|
SteveHNH/ansible | refs/heads/devel | lib/ansible/plugins/shell/csh.py | 69 | # (c) 2014, Chris Church <[email protected]>
#
# This file is part of Ansible.
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.plugins.shell import ShellBase
class ShellModule(ShellBase):
# Common shell filenames that this plugin handles
COMPATIBLE_SHELLS = frozenset(('csh', 'tcsh'))
# Family of shells this has. Must match the filename without extension
SHELL_FAMILY = 'csh'
# How to end lines in a python script one-liner
_SHELL_EMBEDDED_PY_EOL = '\\\n'
_SHELL_REDIRECT_ALLNULL = '>& /dev/null'
_SHELL_AND = '&&'
_SHELL_OR = '||'
_SHELL_SUB_LEFT = '"`'
_SHELL_SUB_RIGHT = '`"'
_SHELL_GROUP_LEFT = '('
_SHELL_GROUP_RIGHT = ')'
def env_prefix(self, **kwargs):
return 'env %s' % super(ShellModule, self).env_prefix(**kwargs)
|
Antiun/yelizariev-addons | refs/heads/8.0 | web_debranding_support/__init__.py | 2148 | import models
|
alphageek-xyz/site | refs/heads/master | landing/models.py | 1 | import re
from django.db import models
from django.utils.functional import cached_property
from landing.utils import markup_markdown
class ServiceManager(models.Manager):
def last_modified(self):
return self.latest('modified').modified
class Service(models.Model):
objects = ServiceManager()
class Meta:
ordering = ('order',)
get_latest_by = 'modified'
name = models.CharField(
verbose_name='Service Name',
max_length=100,
unique=True,
)
description = models.TextField(
verbose_name='Description',
blank=True
)
order = models.IntegerField(
null=True,
)
modified = models.DateTimeField(
auto_now=True,
)
@cached_property
def html(self):
return markup_markdown(
self.description
)
@cached_property
def anchor_id(self):
return re.sub(
" ?[&/\\@ ]+ ?", '_', self.name
)[:30]
def get_absolute_url(self):
from django.urls import reverse
return '%s#%s' % (reverse('services'), self.anchor_id)
def save(self, *args, **kwargs):
if not self.order:
self.order = 1 + (
Service.objects.aggregate(
n=models.Max('order')
)['n'] or 0
)
return super(Service, self).save(*args, **kwargs)
def __str__(self):
return str(self.name)
|
wuxianghou/phantomjs | refs/heads/master | src/qt/qtwebkit/Tools/Scripts/webkitpy/test/printer.py | 117 | # Copyright (C) 2012 Google, Inc.
# Copyright (C) 2010 Chris Jerdonek ([email protected])
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import logging
import StringIO
from webkitpy.common.system.systemhost import SystemHost
from webkitpy.layout_tests.views.metered_stream import MeteredStream
_log = logging.getLogger(__name__)
class Printer(object):
def __init__(self, stream, options=None):
self.stream = stream
self.meter = None
self.options = options
self.num_tests = 0
self.num_started = 0
self.num_errors = 0
self.num_failures = 0
self.running_tests = []
self.completed_tests = []
if options:
self.configure(options)
def configure(self, options):
self.options = options
if options.timing:
# --timing implies --verbose
options.verbose = max(options.verbose, 1)
log_level = logging.INFO
if options.quiet:
log_level = logging.WARNING
elif options.verbose == 2:
log_level = logging.DEBUG
self.meter = MeteredStream(self.stream, (options.verbose == 2),
number_of_columns=SystemHost().platform.terminal_width())
handler = logging.StreamHandler(self.stream)
# We constrain the level on the handler rather than on the root
# logger itself. This is probably better because the handler is
# configured and known only to this module, whereas the root logger
# is an object shared (and potentially modified) by many modules.
# Modifying the handler, then, is less intrusive and less likely to
# interfere with modifications made by other modules (e.g. in unit
# tests).
handler.name = __name__
handler.setLevel(log_level)
formatter = logging.Formatter("%(message)s")
handler.setFormatter(formatter)
logger = logging.getLogger()
logger.addHandler(handler)
logger.setLevel(logging.NOTSET)
# Filter out most webkitpy messages.
#
# Messages can be selectively re-enabled for this script by updating
# this method accordingly.
def filter_records(record):
"""Filter out autoinstall and non-third-party webkitpy messages."""
# FIXME: Figure out a way not to use strings here, for example by
# using syntax like webkitpy.test.__name__. We want to be
# sure not to import any non-Python 2.4 code, though, until
# after the version-checking code has executed.
if (record.name.startswith("webkitpy.common.system.autoinstall") or
record.name.startswith("webkitpy.test")):
return True
if record.name.startswith("webkitpy"):
return False
return True
testing_filter = logging.Filter()
testing_filter.filter = filter_records
# Display a message so developers are not mystified as to why
# logging does not work in the unit tests.
_log.info("Suppressing most webkitpy logging while running unit tests.")
handler.addFilter(testing_filter)
if self.options.pass_through:
# FIXME: Can't import at top of file, as outputcapture needs unittest2
from webkitpy.common.system import outputcapture
outputcapture.OutputCapture.stream_wrapper = _CaptureAndPassThroughStream
def write_update(self, msg):
self.meter.write_update(msg)
def print_started_test(self, source, test_name):
self.running_tests.append(test_name)
if len(self.running_tests) > 1:
suffix = ' (+%d)' % (len(self.running_tests) - 1)
else:
suffix = ''
if self.options.verbose:
write = self.meter.write_update
else:
write = self.meter.write_throttled_update
write(self._test_line(self.running_tests[0], suffix))
def print_finished_test(self, source, test_name, test_time, failures, errors):
write = self.meter.writeln
if failures:
lines = failures[0].splitlines() + ['']
suffix = ' failed:'
self.num_failures += 1
elif errors:
lines = errors[0].splitlines() + ['']
suffix = ' erred:'
self.num_errors += 1
else:
suffix = ' passed'
lines = []
if self.options.verbose:
write = self.meter.writeln
else:
write = self.meter.write_throttled_update
if self.options.timing:
suffix += ' %.4fs' % test_time
self.num_started += 1
if test_name == self.running_tests[0]:
self.completed_tests.insert(0, [test_name, suffix, lines])
else:
self.completed_tests.append([test_name, suffix, lines])
self.running_tests.remove(test_name)
for test_name, msg, lines in self.completed_tests:
if lines:
self.meter.writeln(self._test_line(test_name, msg))
for line in lines:
self.meter.writeln(' ' + line)
else:
write(self._test_line(test_name, msg))
self.completed_tests = []
def _test_line(self, test_name, suffix):
format_string = '[%d/%d] %s%s'
status_line = format_string % (self.num_started, self.num_tests, test_name, suffix)
if len(status_line) > self.meter.number_of_columns():
overflow_columns = len(status_line) - self.meter.number_of_columns()
ellipsis = '...'
if len(test_name) < overflow_columns + len(ellipsis) + 3:
# We don't have enough space even if we elide, just show the test method name.
test_name = test_name.split('.')[-1]
else:
new_length = len(test_name) - overflow_columns - len(ellipsis)
prefix = int(new_length / 2)
test_name = test_name[:prefix] + ellipsis + test_name[-(new_length - prefix):]
return format_string % (self.num_started, self.num_tests, test_name, suffix)
def print_result(self, run_time):
write = self.meter.writeln
write('Ran %d test%s in %.3fs' % (self.num_started, self.num_started != 1 and "s" or "", run_time))
if self.num_failures or self.num_errors:
write('FAILED (failures=%d, errors=%d)\n' % (self.num_failures, self.num_errors))
else:
write('\nOK\n')
class _CaptureAndPassThroughStream(object):
def __init__(self, stream):
self._buffer = StringIO.StringIO()
self._stream = stream
def write(self, msg):
self._stream.write(msg)
# Note that we don't want to capture any output generated by the debugger
# because that could cause the results of capture_output() to be invalid.
if not self._message_is_from_pdb():
self._buffer.write(msg)
def _message_is_from_pdb(self):
# We will assume that if the pdb module is in the stack then the output
# is being generated by the python debugger (or the user calling something
# from inside the debugger).
import inspect
import pdb
stack = inspect.stack()
return any(frame[1] == pdb.__file__.replace('.pyc', '.py') for frame in stack)
def flush(self):
self._stream.flush()
def getvalue(self):
return self._buffer.getvalue()
|
loonycyborg/scons-plusplus | refs/heads/master | python_modules/Tool/rmic.py | 2 | """SCons.Tool.rmic
Tool-specific initialization for rmic.
There normally shouldn't be any need to import this module directly.
It will usually be imported through the generic SCons.Tool.Tool()
selection method.
"""
#
# Copyright (c) 2001 - 2019 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Tool/rmic.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan"
import os.path
import SCons.Action
import SCons.Builder
import SCons.Node.FS
import SCons.Util
from SCons.Tool.JavaCommon import get_java_install_dirs
def emit_rmic_classes(target, source, env):
"""Create and return lists of Java RMI stub and skeleton
class files to be created from a set of class files.
"""
class_suffix = env.get('JAVACLASSSUFFIX', '.class')
classdir = env.get('JAVACLASSDIR')
if not classdir:
try:
s = source[0]
except IndexError:
classdir = '.'
else:
try:
classdir = s.attributes.java_classdir
except AttributeError:
classdir = '.'
classdir = env.Dir(classdir).rdir()
if str(classdir) == '.':
c_ = None
else:
c_ = str(classdir) + os.sep
slist = []
for src in source:
try:
classname = src.attributes.java_classname
except AttributeError:
classname = str(src)
if c_ and classname[:len(c_)] == c_:
classname = classname[len(c_):]
if class_suffix and classname[:-len(class_suffix)] == class_suffix:
classname = classname[-len(class_suffix):]
s = src.rfile()
s.attributes.java_classdir = classdir
s.attributes.java_classname = classname
slist.append(s)
stub_suffixes = ['_Stub']
if env.get('JAVAVERSION') == '1.4':
stub_suffixes.append('_Skel')
tlist = []
for s in source:
for suff in stub_suffixes:
fname = s.attributes.java_classname.replace('.', os.sep) + \
suff + class_suffix
t = target[0].File(fname)
t.attributes.java_lookupdir = target[0]
tlist.append(t)
return tlist, source
RMICAction = SCons.Action.Action('$RMICCOM', '$RMICCOMSTR')
RMICBuilder = SCons.Builder.Builder(action = RMICAction,
emitter = emit_rmic_classes,
src_suffix = '$JAVACLASSSUFFIX',
target_factory = SCons.Node.FS.Dir,
source_factory = SCons.Node.FS.File)
def generate(env):
"""Add Builders and construction variables for rmic to an Environment."""
env['BUILDERS']['RMIC'] = RMICBuilder
if env['PLATFORM'] == 'win32':
version = env.get('JAVAVERSION', None)
# Ensure that we have a proper path for rmic
paths = get_java_install_dirs('win32', version=version)
rmic = SCons.Tool.find_program_path(env, 'rmic', default_paths=paths)
# print("RMIC: %s"%rmic)
if rmic:
rmic_bin_dir = os.path.dirname(rmic)
env.AppendENVPath('PATH', rmic_bin_dir)
env['RMIC'] = 'rmic'
env['RMICFLAGS'] = SCons.Util.CLVar('')
env['RMICCOM'] = '$RMIC $RMICFLAGS -d ${TARGET.attributes.java_lookupdir} -classpath ${SOURCE.attributes.java_classdir} ${SOURCES.attributes.java_classname}'
env['JAVACLASSSUFFIX'] = '.class'
def exists(env):
# As reported by Jan Nijtmans in issue #2730, the simple
# return env.Detect('rmic')
# doesn't always work during initialization. For now, we
# stop trying to detect an executable (analogous to the
# javac Builder).
# TODO: Come up with a proper detect() routine...and enable it.
return 1
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
wentixiaogege/newt-2.0 | refs/heads/master | authnz/tests.py | 3 | from django.test import TestCase
from django.conf import settings
import json
from newt.tests import MyTestClient, newt_base_url, login
class AuthTests(TestCase):
fixtures = ["test_fixture.json"]
def setUp(self):
self.client = MyTestClient()
def test_login(self):
# Should not be logged in
r = self.client.get(newt_base_url + "/auth")
self.assertEquals(r.status_code, 200)
json_response = r.json()
self.assertEquals(json_response['output']['auth'], False)
# Should be logged in
r = self.client.post(newt_base_url + "/auth", data=login)
self.assertEquals(r.status_code, 200)
json_response = r.json()
self.assertEquals(json_response['output']['auth'], True)
self.assertEquals(json_response['output']['username'], login['username'])
# Loggen in self.client should return user info
r = self.client.get(newt_base_url + "/auth")
self.assertEquals(r.status_code, 200)
json_response = r.json()
self.assertEquals(json_response['output']['auth'], True)
self.assertEquals(json_response['output']['username'], login['username'])
def test_logout(self):
# Should be logged in
r = self.client.post(newt_base_url + "/auth", data=login)
self.assertEquals(r.status_code, 200)
json_response = r.json()
self.assertEquals(json_response['output']['auth'], True)
self.assertEquals(json_response['output']['username'], login['username'])
r = self.client.delete(newt_base_url + "/auth")
self.assertEquals(r.status_code, 200)
json_response = r.json()
self.assertEquals(json_response['output']['auth'], False)
r = self.client.get(newt_base_url + "/auth")
self.assertEquals(r.status_code, 200)
json_response = r.json()
self.assertEquals(json_response['output']['auth'], False)
|
dimdung/boto | refs/heads/develop | tests/unit/vpc/test_routetable.py | 64 | from tests.unit import unittest
from tests.unit import AWSMockServiceTestCase
from boto.vpc import VPCConnection, RouteTable
class TestDescribeRouteTables(AWSMockServiceTestCase):
connection_class = VPCConnection
def default_body(self):
return b"""
<DescribeRouteTablesResponse xmlns="http://ec2.amazonaws.com/doc/2013-10-01/">
<requestId>6f570b0b-9c18-4b07-bdec-73740dcf861a</requestId>
<routeTableSet>
<item>
<routeTableId>rtb-13ad487a</routeTableId>
<vpcId>vpc-11ad4878</vpcId>
<routeSet>
<item>
<destinationCidrBlock>10.0.0.0/22</destinationCidrBlock>
<gatewayId>local</gatewayId>
<state>active</state>
<origin>CreateRouteTable</origin>
</item>
</routeSet>
<associationSet>
<item>
<routeTableAssociationId>rtbassoc-12ad487b</routeTableAssociationId>
<routeTableId>rtb-13ad487a</routeTableId>
<main>true</main>
</item>
</associationSet>
<tagSet/>
</item>
<item>
<routeTableId>rtb-f9ad4890</routeTableId>
<vpcId>vpc-11ad4878</vpcId>
<routeSet>
<item>
<destinationCidrBlock>10.0.0.0/22</destinationCidrBlock>
<gatewayId>local</gatewayId>
<state>active</state>
<origin>CreateRouteTable</origin>
</item>
<item>
<destinationCidrBlock>0.0.0.0/0</destinationCidrBlock>
<gatewayId>igw-eaad4883</gatewayId>
<state>active</state>
</item>
<item>
<destinationCidrBlock>10.0.0.0/21</destinationCidrBlock>
<networkInterfaceId>eni-884ec1d1</networkInterfaceId>
<state>blackhole</state>
<origin>CreateRoute</origin>
</item>
<item>
<destinationCidrBlock>11.0.0.0/22</destinationCidrBlock>
<vpcPeeringConnectionId>pcx-efc52b86</vpcPeeringConnectionId>
<state>blackhole</state>
<origin>CreateRoute</origin>
</item>
</routeSet>
<associationSet>
<item>
<routeTableAssociationId>rtbassoc-faad4893</routeTableAssociationId>
<routeTableId>rtb-f9ad4890</routeTableId>
<subnetId>subnet-15ad487c</subnetId>
</item>
</associationSet>
<tagSet/>
</item>
</routeTableSet>
</DescribeRouteTablesResponse>
"""
def test_get_all_route_tables(self):
self.set_http_response(status_code=200)
api_response = self.service_connection.get_all_route_tables(
['rtb-13ad487a', 'rtb-f9ad4890'], filters=[('route.state', 'active')])
self.assert_request_parameters({
'Action': 'DescribeRouteTables',
'RouteTableId.1': 'rtb-13ad487a',
'RouteTableId.2': 'rtb-f9ad4890',
'Filter.1.Name': 'route.state',
'Filter.1.Value.1': 'active'},
ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
'SignatureVersion', 'Timestamp',
'Version'])
self.assertEquals(len(api_response), 2)
self.assertIsInstance(api_response[0], RouteTable)
self.assertEquals(api_response[0].id, 'rtb-13ad487a')
self.assertEquals(len(api_response[0].routes), 1)
self.assertEquals(api_response[0].routes[0].destination_cidr_block, '10.0.0.0/22')
self.assertEquals(api_response[0].routes[0].gateway_id, 'local')
self.assertEquals(api_response[0].routes[0].state, 'active')
self.assertEquals(len(api_response[0].associations), 1)
self.assertEquals(api_response[0].associations[0].id, 'rtbassoc-12ad487b')
self.assertEquals(api_response[0].associations[0].route_table_id, 'rtb-13ad487a')
self.assertIsNone(api_response[0].associations[0].subnet_id)
self.assertEquals(api_response[0].associations[0].main, True)
self.assertEquals(api_response[1].id, 'rtb-f9ad4890')
self.assertEquals(len(api_response[1].routes), 4)
self.assertEquals(api_response[1].routes[0].destination_cidr_block, '10.0.0.0/22')
self.assertEquals(api_response[1].routes[0].gateway_id, 'local')
self.assertEquals(api_response[1].routes[0].state, 'active')
self.assertEquals(api_response[1].routes[1].destination_cidr_block, '0.0.0.0/0')
self.assertEquals(api_response[1].routes[1].gateway_id, 'igw-eaad4883')
self.assertEquals(api_response[1].routes[1].state, 'active')
self.assertEquals(api_response[1].routes[2].destination_cidr_block, '10.0.0.0/21')
self.assertEquals(api_response[1].routes[2].interface_id, 'eni-884ec1d1')
self.assertEquals(api_response[1].routes[2].state, 'blackhole')
self.assertEquals(api_response[1].routes[3].destination_cidr_block, '11.0.0.0/22')
self.assertEquals(api_response[1].routes[3].vpc_peering_connection_id, 'pcx-efc52b86')
self.assertEquals(api_response[1].routes[3].state, 'blackhole')
self.assertEquals(len(api_response[1].associations), 1)
self.assertEquals(api_response[1].associations[0].id, 'rtbassoc-faad4893')
self.assertEquals(api_response[1].associations[0].route_table_id, 'rtb-f9ad4890')
self.assertEquals(api_response[1].associations[0].subnet_id, 'subnet-15ad487c')
self.assertEquals(api_response[1].associations[0].main, False)
class TestAssociateRouteTable(AWSMockServiceTestCase):
connection_class = VPCConnection
def default_body(self):
return b"""
<AssociateRouteTableResponse xmlns="http://ec2.amazonaws.com/doc/2013-10-01/">
<requestId>59dbff89-35bd-4eac-99ed-be587EXAMPLE</requestId>
<associationId>rtbassoc-f8ad4891</associationId>
</AssociateRouteTableResponse>
"""
def test_associate_route_table(self):
self.set_http_response(status_code=200)
api_response = self.service_connection.associate_route_table(
'rtb-e4ad488d', 'subnet-15ad487c')
self.assert_request_parameters({
'Action': 'AssociateRouteTable',
'RouteTableId': 'rtb-e4ad488d',
'SubnetId': 'subnet-15ad487c'},
ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
'SignatureVersion', 'Timestamp',
'Version'])
self.assertEquals(api_response, 'rtbassoc-f8ad4891')
class TestDisassociateRouteTable(AWSMockServiceTestCase):
connection_class = VPCConnection
def default_body(self):
return b"""
<DisassociateRouteTableResponse xmlns="http://ec2.amazonaws.com/doc/2013-10-01/">
<requestId>59dbff89-35bd-4eac-99ed-be587EXAMPLE</requestId>
<return>true</return>
</DisassociateRouteTableResponse>
"""
def test_disassociate_route_table(self):
self.set_http_response(status_code=200)
api_response = self.service_connection.disassociate_route_table('rtbassoc-fdad4894')
self.assert_request_parameters({
'Action': 'DisassociateRouteTable',
'AssociationId': 'rtbassoc-fdad4894'},
ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
'SignatureVersion', 'Timestamp',
'Version'])
self.assertEquals(api_response, True)
class TestCreateRouteTable(AWSMockServiceTestCase):
connection_class = VPCConnection
def default_body(self):
return b"""
<CreateRouteTableResponse xmlns="http://ec2.amazonaws.com/doc/2013-10-01/">
<requestId>59dbff89-35bd-4eac-99ed-be587EXAMPLE</requestId>
<routeTable>
<routeTableId>rtb-f9ad4890</routeTableId>
<vpcId>vpc-11ad4878</vpcId>
<routeSet>
<item>
<destinationCidrBlock>10.0.0.0/22</destinationCidrBlock>
<gatewayId>local</gatewayId>
<state>active</state>
</item>
</routeSet>
<associationSet/>
<tagSet/>
</routeTable>
</CreateRouteTableResponse>
"""
def test_create_route_table(self):
self.set_http_response(status_code=200)
api_response = self.service_connection.create_route_table('vpc-11ad4878')
self.assert_request_parameters({
'Action': 'CreateRouteTable',
'VpcId': 'vpc-11ad4878'},
ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
'SignatureVersion', 'Timestamp',
'Version'])
self.assertIsInstance(api_response, RouteTable)
self.assertEquals(api_response.id, 'rtb-f9ad4890')
self.assertEquals(len(api_response.routes), 1)
self.assertEquals(api_response.routes[0].destination_cidr_block, '10.0.0.0/22')
self.assertEquals(api_response.routes[0].gateway_id, 'local')
self.assertEquals(api_response.routes[0].state, 'active')
class TestDeleteRouteTable(AWSMockServiceTestCase):
connection_class = VPCConnection
def default_body(self):
return b"""
<DeleteRouteTableResponse xmlns="http://ec2.amazonaws.com/doc/2013-10-01/">
<requestId>59dbff89-35bd-4eac-99ed-be587EXAMPLE</requestId>
<return>true</return>
</DeleteRouteTableResponse>
"""
def test_delete_route_table(self):
self.set_http_response(status_code=200)
api_response = self.service_connection.delete_route_table('rtb-e4ad488d')
self.assert_request_parameters({
'Action': 'DeleteRouteTable',
'RouteTableId': 'rtb-e4ad488d'},
ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
'SignatureVersion', 'Timestamp',
'Version'])
self.assertEquals(api_response, True)
class TestReplaceRouteTableAssociation(AWSMockServiceTestCase):
connection_class = VPCConnection
def default_body(self):
return b"""
<ReplaceRouteTableAssociationResponse xmlns="http://ec2.amazonaws.com/doc/2013-10-01/">
<requestId>59dbff89-35bd-4eac-99ed-be587EXAMPLE</requestId>
<newAssociationId>rtbassoc-faad4893</newAssociationId>
</ReplaceRouteTableAssociationResponse>
"""
def test_replace_route_table_assocation(self):
self.set_http_response(status_code=200)
api_response = self.service_connection.replace_route_table_assocation(
'rtbassoc-faad4893', 'rtb-f9ad4890')
self.assert_request_parameters({
'Action': 'ReplaceRouteTableAssociation',
'AssociationId': 'rtbassoc-faad4893',
'RouteTableId': 'rtb-f9ad4890'},
ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
'SignatureVersion', 'Timestamp',
'Version'])
self.assertEquals(api_response, True)
def test_replace_route_table_association_with_assoc(self):
self.set_http_response(status_code=200)
api_response = self.service_connection.replace_route_table_association_with_assoc(
'rtbassoc-faad4893', 'rtb-f9ad4890')
self.assert_request_parameters({
'Action': 'ReplaceRouteTableAssociation',
'AssociationId': 'rtbassoc-faad4893',
'RouteTableId': 'rtb-f9ad4890'},
ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
'SignatureVersion', 'Timestamp',
'Version'])
self.assertEquals(api_response, 'rtbassoc-faad4893')
class TestCreateRoute(AWSMockServiceTestCase):
connection_class = VPCConnection
def default_body(self):
return b"""
<CreateRouteResponse xmlns="http://ec2.amazonaws.com/doc/2013-10-01/">
<requestId>59dbff89-35bd-4eac-99ed-be587EXAMPLE</requestId>
<return>true</return>
</CreateRouteResponse>
"""
def test_create_route_gateway(self):
self.set_http_response(status_code=200)
api_response = self.service_connection.create_route(
'rtb-e4ad488d', '0.0.0.0/0', gateway_id='igw-eaad4883')
self.assert_request_parameters({
'Action': 'CreateRoute',
'RouteTableId': 'rtb-e4ad488d',
'DestinationCidrBlock': '0.0.0.0/0',
'GatewayId': 'igw-eaad4883'},
ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
'SignatureVersion', 'Timestamp',
'Version'])
self.assertEquals(api_response, True)
def test_create_route_instance(self):
self.set_http_response(status_code=200)
api_response = self.service_connection.create_route(
'rtb-g8ff4ea2', '0.0.0.0/0', instance_id='i-1a2b3c4d')
self.assert_request_parameters({
'Action': 'CreateRoute',
'RouteTableId': 'rtb-g8ff4ea2',
'DestinationCidrBlock': '0.0.0.0/0',
'InstanceId': 'i-1a2b3c4d'},
ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
'SignatureVersion', 'Timestamp',
'Version'])
self.assertEquals(api_response, True)
def test_create_route_interface(self):
self.set_http_response(status_code=200)
api_response = self.service_connection.create_route(
'rtb-g8ff4ea2', '0.0.0.0/0', interface_id='eni-1a2b3c4d')
self.assert_request_parameters({
'Action': 'CreateRoute',
'RouteTableId': 'rtb-g8ff4ea2',
'DestinationCidrBlock': '0.0.0.0/0',
'NetworkInterfaceId': 'eni-1a2b3c4d'},
ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
'SignatureVersion', 'Timestamp',
'Version'])
self.assertEquals(api_response, True)
def test_create_route_vpc_peering_connection(self):
self.set_http_response(status_code=200)
api_response = self.service_connection.create_route(
'rtb-g8ff4ea2', '0.0.0.0/0', vpc_peering_connection_id='pcx-1a2b3c4d')
self.assert_request_parameters({
'Action': 'CreateRoute',
'RouteTableId': 'rtb-g8ff4ea2',
'DestinationCidrBlock': '0.0.0.0/0',
'VpcPeeringConnectionId': 'pcx-1a2b3c4d'},
ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
'SignatureVersion', 'Timestamp',
'Version'])
self.assertEquals(api_response, True)
class TestReplaceRoute(AWSMockServiceTestCase):
connection_class = VPCConnection
def default_body(self):
return b"""
<CreateRouteResponse xmlns="http://ec2.amazonaws.com/doc/2013-10-01/">
<requestId>59dbff89-35bd-4eac-99ed-be587EXAMPLE</requestId>
<return>true</return>
</CreateRouteResponse>
"""
def test_replace_route_gateway(self):
self.set_http_response(status_code=200)
api_response = self.service_connection.replace_route(
'rtb-e4ad488d', '0.0.0.0/0', gateway_id='igw-eaad4883')
self.assert_request_parameters({
'Action': 'ReplaceRoute',
'RouteTableId': 'rtb-e4ad488d',
'DestinationCidrBlock': '0.0.0.0/0',
'GatewayId': 'igw-eaad4883'},
ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
'SignatureVersion', 'Timestamp',
'Version'])
self.assertEquals(api_response, True)
def test_replace_route_instance(self):
self.set_http_response(status_code=200)
api_response = self.service_connection.replace_route(
'rtb-g8ff4ea2', '0.0.0.0/0', instance_id='i-1a2b3c4d')
self.assert_request_parameters({
'Action': 'ReplaceRoute',
'RouteTableId': 'rtb-g8ff4ea2',
'DestinationCidrBlock': '0.0.0.0/0',
'InstanceId': 'i-1a2b3c4d'},
ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
'SignatureVersion', 'Timestamp',
'Version'])
self.assertEquals(api_response, True)
def test_replace_route_interface(self):
self.set_http_response(status_code=200)
api_response = self.service_connection.replace_route(
'rtb-g8ff4ea2', '0.0.0.0/0', interface_id='eni-1a2b3c4d')
self.assert_request_parameters({
'Action': 'ReplaceRoute',
'RouteTableId': 'rtb-g8ff4ea2',
'DestinationCidrBlock': '0.0.0.0/0',
'NetworkInterfaceId': 'eni-1a2b3c4d'},
ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
'SignatureVersion', 'Timestamp',
'Version'])
self.assertEquals(api_response, True)
def test_replace_route_vpc_peering_connection(self):
self.set_http_response(status_code=200)
api_response = self.service_connection.replace_route(
'rtb-g8ff4ea2', '0.0.0.0/0', vpc_peering_connection_id='pcx-1a2b3c4d')
self.assert_request_parameters({
'Action': 'ReplaceRoute',
'RouteTableId': 'rtb-g8ff4ea2',
'DestinationCidrBlock': '0.0.0.0/0',
'VpcPeeringConnectionId': 'pcx-1a2b3c4d'},
ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
'SignatureVersion', 'Timestamp',
'Version'])
self.assertEquals(api_response, True)
class TestDeleteRoute(AWSMockServiceTestCase):
connection_class = VPCConnection
def default_body(self):
return b"""
<DeleteRouteTableResponse xmlns="http://ec2.amazonaws.com/doc/2013-10-01/">
<requestId>59dbff89-35bd-4eac-99ed-be587EXAMPLE</requestId>
<return>true</return>
</DeleteRouteTableResponse>
"""
def test_delete_route(self):
self.set_http_response(status_code=200)
api_response = self.service_connection.delete_route('rtb-e4ad488d', '172.16.1.0/24')
self.assert_request_parameters({
'Action': 'DeleteRoute',
'RouteTableId': 'rtb-e4ad488d',
'DestinationCidrBlock': '172.16.1.0/24'},
ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
'SignatureVersion', 'Timestamp',
'Version'])
self.assertEquals(api_response, True)
if __name__ == '__main__':
unittest.main()
|
abhikeshav/ydk-py | refs/heads/master | cisco-ios-xr/ydk/models/cisco_ios_xr/_meta/_Cisco_IOS_XR_pbr_oper.py | 1 |
import re
import collections
from enum import Enum
from ydk._core._dm_meta_info import _MetaInfoClassMember, _MetaInfoClass, _MetaInfoEnum
from ydk.types import Empty, YList, YLeafList, DELETE, Decimal64, FixedBitsDict
from ydk._core._dm_meta_info import ATTRIBUTE, REFERENCE_CLASS, REFERENCE_LIST, REFERENCE_LEAFLIST, REFERENCE_IDENTITY_CLASS, REFERENCE_ENUM_CLASS, REFERENCE_BITS, REFERENCE_UNION
from ydk.errors import YPYError, YPYModelError
from ydk.providers._importer import _yang_ns
_meta_table = {
'PolicyStateEnum' : _MetaInfoEnum('PolicyStateEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_pbr_oper',
{
'active':'ACTIVE',
'suspended':'SUSPENDED',
}, 'Cisco-IOS-XR-pbr-oper', _yang_ns._namespaces['Cisco-IOS-XR-pbr-oper']),
'Pbr.Nodes.Node.PolicyMap.Interfaces.Interface.Direction.Input.ClassStat.GeneralStats' : {
'meta_info' : _MetaInfoClass('Pbr.Nodes.Node.PolicyMap.Interfaces.Interface.Direction.Input.ClassStat.GeneralStats',
False,
[
_MetaInfoClassMember('match-data-rate', ATTRIBUTE, 'int' , None, None,
[(0, 4294967295)], [],
''' Incoming matched data rate in kbps
''',
'match_data_rate',
'Cisco-IOS-XR-pbr-oper', False),
_MetaInfoClassMember('pre-policy-matched-bytes', ATTRIBUTE, 'long' , None, None,
[(0, 18446744073709551615L)], [],
''' Matched bytes before applying policy
''',
'pre_policy_matched_bytes',
'Cisco-IOS-XR-pbr-oper', False),
_MetaInfoClassMember('pre-policy-matched-packets', ATTRIBUTE, 'long' , None, None,
[(0, 18446744073709551615L)], [],
''' Matched pkts before applying policy
''',
'pre_policy_matched_packets',
'Cisco-IOS-XR-pbr-oper', False),
_MetaInfoClassMember('total-drop-bytes', ATTRIBUTE, 'long' , None, None,
[(0, 18446744073709551615L)], [],
''' Dropped bytes (packets/bytes)
''',
'total_drop_bytes',
'Cisco-IOS-XR-pbr-oper', False),
_MetaInfoClassMember('total-drop-packets', ATTRIBUTE, 'long' , None, None,
[(0, 18446744073709551615L)], [],
''' Dropped packets (packets/bytes)
''',
'total_drop_packets',
'Cisco-IOS-XR-pbr-oper', False),
_MetaInfoClassMember('total-drop-rate', ATTRIBUTE, 'int' , None, None,
[(0, 4294967295)], [],
''' Total drop rate (packets/bytes)
''',
'total_drop_rate',
'Cisco-IOS-XR-pbr-oper', False),
_MetaInfoClassMember('total-transmit-rate', ATTRIBUTE, 'int' , None, None,
[(0, 4294967295)], [],
''' Total transmit rate in kbps
''',
'total_transmit_rate',
'Cisco-IOS-XR-pbr-oper', False),
_MetaInfoClassMember('transmit-bytes', ATTRIBUTE, 'long' , None, None,
[(0, 18446744073709551615L)], [],
''' Transmitted bytes (packets/bytes)
''',
'transmit_bytes',
'Cisco-IOS-XR-pbr-oper', False),
_MetaInfoClassMember('transmit-packets', ATTRIBUTE, 'long' , None, None,
[(0, 18446744073709551615L)], [],
''' Transmitted packets (packets/bytes)
''',
'transmit_packets',
'Cisco-IOS-XR-pbr-oper', False),
],
'Cisco-IOS-XR-pbr-oper',
'general-stats',
_yang_ns._namespaces['Cisco-IOS-XR-pbr-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_pbr_oper'
),
},
'Pbr.Nodes.Node.PolicyMap.Interfaces.Interface.Direction.Input.ClassStat.HttprStats' : {
'meta_info' : _MetaInfoClass('Pbr.Nodes.Node.PolicyMap.Interfaces.Interface.Direction.Input.ClassStat.HttprStats',
False,
[
_MetaInfoClassMember('drop-bytes', ATTRIBUTE, 'long' , None, None,
[(0, 18446744073709551615L)], [],
''' Dropped bytes
''',
'drop_bytes',
'Cisco-IOS-XR-pbr-oper', False),
_MetaInfoClassMember('drop-packets', ATTRIBUTE, 'long' , None, None,
[(0, 18446744073709551615L)], [],
''' Dropped packets
''',
'drop_packets',
'Cisco-IOS-XR-pbr-oper', False),
_MetaInfoClassMember('resp-sent-bytes', ATTRIBUTE, 'long' , None, None,
[(0, 18446744073709551615L)], [],
''' TotalNum of Bytes HTTPR response sent
''',
'resp_sent_bytes',
'Cisco-IOS-XR-pbr-oper', False),
_MetaInfoClassMember('resp-sent-packets', ATTRIBUTE, 'long' , None, None,
[(0, 18446744073709551615L)], [],
''' TotalNum of pkts HTTPR response sent
''',
'resp_sent_packets',
'Cisco-IOS-XR-pbr-oper', False),
_MetaInfoClassMember('rqst-rcvd-bytes', ATTRIBUTE, 'long' , None, None,
[(0, 18446744073709551615L)], [],
''' TotalNum of Bytes HTTP request received
''',
'rqst_rcvd_bytes',
'Cisco-IOS-XR-pbr-oper', False),
_MetaInfoClassMember('rqst-rcvd-packets', ATTRIBUTE, 'long' , None, None,
[(0, 18446744073709551615L)], [],
''' TotalNum of pkts HTTP request received
''',
'rqst_rcvd_packets',
'Cisco-IOS-XR-pbr-oper', False),
],
'Cisco-IOS-XR-pbr-oper',
'httpr-stats',
_yang_ns._namespaces['Cisco-IOS-XR-pbr-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_pbr_oper'
),
},
'Pbr.Nodes.Node.PolicyMap.Interfaces.Interface.Direction.Input.ClassStat' : {
'meta_info' : _MetaInfoClass('Pbr.Nodes.Node.PolicyMap.Interfaces.Interface.Direction.Input.ClassStat',
False,
[
_MetaInfoClassMember('class-id', ATTRIBUTE, 'int' , None, None,
[(0, 4294967295)], [],
''' ClassId
''',
'class_id',
'Cisco-IOS-XR-pbr-oper', False),
_MetaInfoClassMember('class-name', ATTRIBUTE, 'str' , None, None,
[(0, 65)], [],
''' ClassName
''',
'class_name',
'Cisco-IOS-XR-pbr-oper', False),
_MetaInfoClassMember('counter-validity-bitmask', ATTRIBUTE, 'long' , None, None,
[(0, 18446744073709551615L)], [],
''' Bitmask to indicate which counter or counters
are undetermined. Counters will be marked
undetermined when one or more classes share
queues with class-default because in such cases
the value of counters for each class is invalid.
Based on the flag(s) set, the following counters
will be marked undetermined. For example, if
value of this object returned is 0x00000101,
counters
TransmitPackets/TransmitBytes/TotalTransmitRate
and DropPackets/DropBytes are undetermined
.0x00000001 - Transmit
(TransmitPackets/TransmitBytes/TotalTransmitRate
), 0x00000002 - Drop
(TotalDropPackets/TotalDropBytes/TotalDropRate),
0x00000004 - Httpr
(HttprTransmitPackets/HttprTransmitBytes),
''',
'counter_validity_bitmask',
'Cisco-IOS-XR-pbr-oper', False),
_MetaInfoClassMember('general-stats', REFERENCE_CLASS, 'GeneralStats' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_pbr_oper', 'Pbr.Nodes.Node.PolicyMap.Interfaces.Interface.Direction.Input.ClassStat.GeneralStats',
[], [],
''' general stats
''',
'general_stats',
'Cisco-IOS-XR-pbr-oper', False),
_MetaInfoClassMember('httpr-stats', REFERENCE_CLASS, 'HttprStats' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_pbr_oper', 'Pbr.Nodes.Node.PolicyMap.Interfaces.Interface.Direction.Input.ClassStat.HttprStats',
[], [],
''' HTTPR stats
''',
'httpr_stats',
'Cisco-IOS-XR-pbr-oper', False),
],
'Cisco-IOS-XR-pbr-oper',
'class-stat',
_yang_ns._namespaces['Cisco-IOS-XR-pbr-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_pbr_oper'
),
},
'Pbr.Nodes.Node.PolicyMap.Interfaces.Interface.Direction.Input' : {
'meta_info' : _MetaInfoClass('Pbr.Nodes.Node.PolicyMap.Interfaces.Interface.Direction.Input',
False,
[
_MetaInfoClassMember('class-stat', REFERENCE_LIST, 'ClassStat' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_pbr_oper', 'Pbr.Nodes.Node.PolicyMap.Interfaces.Interface.Direction.Input.ClassStat',
[], [],
''' Array of classes contained in policy
''',
'class_stat',
'Cisco-IOS-XR-pbr-oper', False),
_MetaInfoClassMember('node-name', ATTRIBUTE, 'str' , None, None,
[(0, 42)], [],
''' NodeName
''',
'node_name',
'Cisco-IOS-XR-pbr-oper', False),
_MetaInfoClassMember('policy-name', ATTRIBUTE, 'str' , None, None,
[(0, 65)], [],
''' PolicyName
''',
'policy_name',
'Cisco-IOS-XR-pbr-oper', False),
_MetaInfoClassMember('state', REFERENCE_ENUM_CLASS, 'PolicyStateEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_pbr_oper', 'PolicyStateEnum',
[], [],
''' State
''',
'state',
'Cisco-IOS-XR-pbr-oper', False),
_MetaInfoClassMember('state-description', ATTRIBUTE, 'str' , None, None,
[(0, 128)], [],
''' StateDescription
''',
'state_description',
'Cisco-IOS-XR-pbr-oper', False),
],
'Cisco-IOS-XR-pbr-oper',
'input',
_yang_ns._namespaces['Cisco-IOS-XR-pbr-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_pbr_oper'
),
},
'Pbr.Nodes.Node.PolicyMap.Interfaces.Interface.Direction' : {
'meta_info' : _MetaInfoClass('Pbr.Nodes.Node.PolicyMap.Interfaces.Interface.Direction',
False,
[
_MetaInfoClassMember('input', REFERENCE_CLASS, 'Input' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_pbr_oper', 'Pbr.Nodes.Node.PolicyMap.Interfaces.Interface.Direction.Input',
[], [],
''' PBR policy statistics
''',
'input',
'Cisco-IOS-XR-pbr-oper', False),
],
'Cisco-IOS-XR-pbr-oper',
'direction',
_yang_ns._namespaces['Cisco-IOS-XR-pbr-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_pbr_oper'
),
},
'Pbr.Nodes.Node.PolicyMap.Interfaces.Interface' : {
'meta_info' : _MetaInfoClass('Pbr.Nodes.Node.PolicyMap.Interfaces.Interface',
False,
[
_MetaInfoClassMember('interface-name', ATTRIBUTE, 'str' , None, None,
[], ['(([a-zA-Z0-9_]*\\d+/){3}\\d+)|(([a-zA-Z0-9_]*\\d+/){4}\\d+)|(([a-zA-Z0-9_]*\\d+/){3}\\d+\\.\\d+)|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]*\\d+))|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]+))|([a-zA-Z0-9_-]*\\d+)|([a-zA-Z0-9_-]*\\d+\\.\\d+)|(mpls)|(dwdm)'],
''' Name of the interface
''',
'interface_name',
'Cisco-IOS-XR-pbr-oper', True),
_MetaInfoClassMember('direction', REFERENCE_CLASS, 'Direction' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_pbr_oper', 'Pbr.Nodes.Node.PolicyMap.Interfaces.Interface.Direction',
[], [],
''' PBR direction
''',
'direction',
'Cisco-IOS-XR-pbr-oper', False),
],
'Cisco-IOS-XR-pbr-oper',
'interface',
_yang_ns._namespaces['Cisco-IOS-XR-pbr-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_pbr_oper'
),
},
'Pbr.Nodes.Node.PolicyMap.Interfaces' : {
'meta_info' : _MetaInfoClass('Pbr.Nodes.Node.PolicyMap.Interfaces',
False,
[
_MetaInfoClassMember('interface', REFERENCE_LIST, 'Interface' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_pbr_oper', 'Pbr.Nodes.Node.PolicyMap.Interfaces.Interface',
[], [],
''' PBR action data for a particular interface
''',
'interface',
'Cisco-IOS-XR-pbr-oper', False),
],
'Cisco-IOS-XR-pbr-oper',
'interfaces',
_yang_ns._namespaces['Cisco-IOS-XR-pbr-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_pbr_oper'
),
},
'Pbr.Nodes.Node.PolicyMap' : {
'meta_info' : _MetaInfoClass('Pbr.Nodes.Node.PolicyMap',
False,
[
_MetaInfoClassMember('interfaces', REFERENCE_CLASS, 'Interfaces' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_pbr_oper', 'Pbr.Nodes.Node.PolicyMap.Interfaces',
[], [],
''' Operational data for all interfaces
''',
'interfaces',
'Cisco-IOS-XR-pbr-oper', False),
],
'Cisco-IOS-XR-pbr-oper',
'policy-map',
_yang_ns._namespaces['Cisco-IOS-XR-pbr-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_pbr_oper'
),
},
'Pbr.Nodes.Node' : {
'meta_info' : _MetaInfoClass('Pbr.Nodes.Node',
False,
[
_MetaInfoClassMember('node-name', ATTRIBUTE, 'str' , None, None,
[], ['([a-zA-Z0-9_]*\\d+/){1,2}([a-zA-Z0-9_]*\\d+)'],
''' The node
''',
'node_name',
'Cisco-IOS-XR-pbr-oper', True),
_MetaInfoClassMember('policy-map', REFERENCE_CLASS, 'PolicyMap' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_pbr_oper', 'Pbr.Nodes.Node.PolicyMap',
[], [],
''' Operational data for policymaps
''',
'policy_map',
'Cisco-IOS-XR-pbr-oper', False),
],
'Cisco-IOS-XR-pbr-oper',
'node',
_yang_ns._namespaces['Cisco-IOS-XR-pbr-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_pbr_oper'
),
},
'Pbr.Nodes' : {
'meta_info' : _MetaInfoClass('Pbr.Nodes',
False,
[
_MetaInfoClassMember('node', REFERENCE_LIST, 'Node' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_pbr_oper', 'Pbr.Nodes.Node',
[], [],
''' PBR operational data for a particular node
''',
'node',
'Cisco-IOS-XR-pbr-oper', False),
],
'Cisco-IOS-XR-pbr-oper',
'nodes',
_yang_ns._namespaces['Cisco-IOS-XR-pbr-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_pbr_oper'
),
},
'Pbr' : {
'meta_info' : _MetaInfoClass('Pbr',
False,
[
_MetaInfoClassMember('nodes', REFERENCE_CLASS, 'Nodes' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_pbr_oper', 'Pbr.Nodes',
[], [],
''' Node-specific PBR operational data
''',
'nodes',
'Cisco-IOS-XR-pbr-oper', False),
],
'Cisco-IOS-XR-pbr-oper',
'pbr',
_yang_ns._namespaces['Cisco-IOS-XR-pbr-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_pbr_oper'
),
},
}
_meta_table['Pbr.Nodes.Node.PolicyMap.Interfaces.Interface.Direction.Input.ClassStat.GeneralStats']['meta_info'].parent =_meta_table['Pbr.Nodes.Node.PolicyMap.Interfaces.Interface.Direction.Input.ClassStat']['meta_info']
_meta_table['Pbr.Nodes.Node.PolicyMap.Interfaces.Interface.Direction.Input.ClassStat.HttprStats']['meta_info'].parent =_meta_table['Pbr.Nodes.Node.PolicyMap.Interfaces.Interface.Direction.Input.ClassStat']['meta_info']
_meta_table['Pbr.Nodes.Node.PolicyMap.Interfaces.Interface.Direction.Input.ClassStat']['meta_info'].parent =_meta_table['Pbr.Nodes.Node.PolicyMap.Interfaces.Interface.Direction.Input']['meta_info']
_meta_table['Pbr.Nodes.Node.PolicyMap.Interfaces.Interface.Direction.Input']['meta_info'].parent =_meta_table['Pbr.Nodes.Node.PolicyMap.Interfaces.Interface.Direction']['meta_info']
_meta_table['Pbr.Nodes.Node.PolicyMap.Interfaces.Interface.Direction']['meta_info'].parent =_meta_table['Pbr.Nodes.Node.PolicyMap.Interfaces.Interface']['meta_info']
_meta_table['Pbr.Nodes.Node.PolicyMap.Interfaces.Interface']['meta_info'].parent =_meta_table['Pbr.Nodes.Node.PolicyMap.Interfaces']['meta_info']
_meta_table['Pbr.Nodes.Node.PolicyMap.Interfaces']['meta_info'].parent =_meta_table['Pbr.Nodes.Node.PolicyMap']['meta_info']
_meta_table['Pbr.Nodes.Node.PolicyMap']['meta_info'].parent =_meta_table['Pbr.Nodes.Node']['meta_info']
_meta_table['Pbr.Nodes.Node']['meta_info'].parent =_meta_table['Pbr.Nodes']['meta_info']
_meta_table['Pbr.Nodes']['meta_info'].parent =_meta_table['Pbr']['meta_info']
|
Distrotech/scons | refs/heads/distrotech-scons | test/DVIPS/PSCOMSTR.py | 5 | #!/usr/bin/env python
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
"""
Test that the $PSCOMSTR construction variable allows you to customize
the displayed string when is called.
"""
import TestSCons
_python_ = TestSCons._python_
test = TestSCons.TestSCons()
test.write('myps.py', """
import sys
outfile = open(sys.argv[1], 'wb')
for f in sys.argv[2:]:
infile = open(f, 'rb')
for l in [l for l in infile.readlines() if l != '/*ps*/\\n']:
outfile.write(l)
sys.exit(0)
""")
test.write('SConstruct', """
env = Environment(tools=['default', 'dvips'],
PSCOM = r'%(_python_)s myps.py $TARGET $SOURCES',
PSCOMSTR = 'PostScripting $TARGET from $SOURCE')
env.PostScript(target = 'aaa', source = 'aaa.dvi')
""" % locals())
test.write('aaa.dvi', "aaa.dvi\n/*ps*/\n")
test.run(stdout = test.wrap_stdout("""\
PostScripting aaa.ps from aaa.dvi
"""))
test.must_match('aaa.ps', "aaa.dvi\n")
test.pass_test()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
akashlevy/Lyff | refs/heads/master | lyff_lambda/boto/__init__.py | 5 | # Copyright (c) 2006-2012 Mitch Garnaat http://garnaat.org/
# Copyright (c) 2010-2011, Eucalyptus Systems, Inc.
# Copyright (c) 2011, Nexenta Systems Inc.
# Copyright (c) 2012 Amazon.com, Inc. or its affiliates.
# Copyright (c) 2010, Google, Inc.
# All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
from boto.pyami.config import Config, BotoConfigLocations
from boto.storage_uri import BucketStorageUri, FileStorageUri
import boto.plugin
import datetime
import os
import platform
import re
import sys
import logging
import logging.config
from boto.compat import urlparse
from boto.exception import InvalidUriError
__version__ = '2.48.0'
Version = __version__ # for backware compatibility
# http://bugs.python.org/issue7980
datetime.datetime.strptime('', '')
UserAgent = 'Boto/%s Python/%s %s/%s' % (
__version__,
platform.python_version(),
platform.system(),
platform.release()
)
config = Config()
# Regex to disallow buckets violating charset or not [3..255] chars total.
BUCKET_NAME_RE = re.compile(r'^[a-zA-Z0-9][a-zA-Z0-9\._-]{1,253}[a-zA-Z0-9]$')
# Regex to disallow buckets with individual DNS labels longer than 63.
TOO_LONG_DNS_NAME_COMP = re.compile(r'[-_a-z0-9]{64}')
GENERATION_RE = re.compile(r'(?P<versionless_uri_str>.+)'
r'#(?P<generation>[0-9]+)$')
VERSION_RE = re.compile('(?P<versionless_uri_str>.+)#(?P<version_id>.+)$')
ENDPOINTS_PATH = os.path.join(os.path.dirname(__file__), 'endpoints.json')
def init_logging():
for file in BotoConfigLocations:
try:
logging.config.fileConfig(os.path.expanduser(file))
except:
pass
class NullHandler(logging.Handler):
def emit(self, record):
pass
log = logging.getLogger('boto')
perflog = logging.getLogger('boto.perf')
log.addHandler(NullHandler())
perflog.addHandler(NullHandler())
init_logging()
# convenience function to set logging to a particular file
def set_file_logger(name, filepath, level=logging.INFO, format_string=None):
global log
if not format_string:
format_string = "%(asctime)s %(name)s [%(levelname)s]:%(message)s"
logger = logging.getLogger(name)
logger.setLevel(level)
fh = logging.FileHandler(filepath)
fh.setLevel(level)
formatter = logging.Formatter(format_string)
fh.setFormatter(formatter)
logger.addHandler(fh)
log = logger
def set_stream_logger(name, level=logging.DEBUG, format_string=None):
global log
if not format_string:
format_string = "%(asctime)s %(name)s [%(levelname)s]:%(message)s"
logger = logging.getLogger(name)
logger.setLevel(level)
fh = logging.StreamHandler()
fh.setLevel(level)
formatter = logging.Formatter(format_string)
fh.setFormatter(formatter)
logger.addHandler(fh)
log = logger
def connect_sqs(aws_access_key_id=None, aws_secret_access_key=None, **kwargs):
"""
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:rtype: :class:`boto.sqs.connection.SQSConnection`
:return: A connection to Amazon's SQS
"""
from boto.sqs.connection import SQSConnection
return SQSConnection(aws_access_key_id, aws_secret_access_key, **kwargs)
def connect_s3(aws_access_key_id=None, aws_secret_access_key=None, **kwargs):
"""
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:rtype: :class:`boto.s3.connection.S3Connection`
:return: A connection to Amazon's S3
"""
from boto.s3.connection import S3Connection
return S3Connection(aws_access_key_id, aws_secret_access_key, **kwargs)
def connect_gs(gs_access_key_id=None, gs_secret_access_key=None, **kwargs):
"""
@type gs_access_key_id: string
@param gs_access_key_id: Your Google Cloud Storage Access Key ID
@type gs_secret_access_key: string
@param gs_secret_access_key: Your Google Cloud Storage Secret Access Key
@rtype: L{GSConnection<boto.gs.connection.GSConnection>}
@return: A connection to Google's Storage service
"""
from boto.gs.connection import GSConnection
return GSConnection(gs_access_key_id, gs_secret_access_key, **kwargs)
def connect_ec2(aws_access_key_id=None, aws_secret_access_key=None, **kwargs):
"""
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:rtype: :class:`boto.ec2.connection.EC2Connection`
:return: A connection to Amazon's EC2
"""
from boto.ec2.connection import EC2Connection
return EC2Connection(aws_access_key_id, aws_secret_access_key, **kwargs)
def connect_elb(aws_access_key_id=None, aws_secret_access_key=None, **kwargs):
"""
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:rtype: :class:`boto.ec2.elb.ELBConnection`
:return: A connection to Amazon's Load Balancing Service
"""
from boto.ec2.elb import ELBConnection
return ELBConnection(aws_access_key_id, aws_secret_access_key, **kwargs)
def connect_autoscale(aws_access_key_id=None, aws_secret_access_key=None,
**kwargs):
"""
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:rtype: :class:`boto.ec2.autoscale.AutoScaleConnection`
:return: A connection to Amazon's Auto Scaling Service
:type use_block_device_types bool
:param use_block_device_types: Specifies whether to return described Launch Configs with block device mappings containing
block device types, or a list of old style block device mappings (deprecated). This defaults to false for compatability
with the old incorrect style.
"""
from boto.ec2.autoscale import AutoScaleConnection
return AutoScaleConnection(aws_access_key_id, aws_secret_access_key,
**kwargs)
def connect_cloudwatch(aws_access_key_id=None, aws_secret_access_key=None,
**kwargs):
"""
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:rtype: :class:`boto.ec2.cloudwatch.CloudWatchConnection`
:return: A connection to Amazon's EC2 Monitoring service
"""
from boto.ec2.cloudwatch import CloudWatchConnection
return CloudWatchConnection(aws_access_key_id, aws_secret_access_key,
**kwargs)
def connect_sdb(aws_access_key_id=None, aws_secret_access_key=None, **kwargs):
"""
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:rtype: :class:`boto.sdb.connection.SDBConnection`
:return: A connection to Amazon's SDB
"""
from boto.sdb.connection import SDBConnection
return SDBConnection(aws_access_key_id, aws_secret_access_key, **kwargs)
def connect_fps(aws_access_key_id=None, aws_secret_access_key=None, **kwargs):
"""
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:rtype: :class:`boto.fps.connection.FPSConnection`
:return: A connection to FPS
"""
from boto.fps.connection import FPSConnection
return FPSConnection(aws_access_key_id, aws_secret_access_key, **kwargs)
def connect_mturk(aws_access_key_id=None, aws_secret_access_key=None,
**kwargs):
"""
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:rtype: :class:`boto.mturk.connection.MTurkConnection`
:return: A connection to MTurk
"""
from boto.mturk.connection import MTurkConnection
return MTurkConnection(aws_access_key_id, aws_secret_access_key, **kwargs)
def connect_cloudfront(aws_access_key_id=None, aws_secret_access_key=None,
**kwargs):
"""
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:rtype: :class:`boto.fps.connection.FPSConnection`
:return: A connection to FPS
"""
from boto.cloudfront import CloudFrontConnection
return CloudFrontConnection(aws_access_key_id, aws_secret_access_key,
**kwargs)
def connect_vpc(aws_access_key_id=None, aws_secret_access_key=None, **kwargs):
"""
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:rtype: :class:`boto.vpc.VPCConnection`
:return: A connection to VPC
"""
from boto.vpc import VPCConnection
return VPCConnection(aws_access_key_id, aws_secret_access_key, **kwargs)
def connect_rds(aws_access_key_id=None, aws_secret_access_key=None, **kwargs):
"""
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:rtype: :class:`boto.rds.RDSConnection`
:return: A connection to RDS
"""
from boto.rds import RDSConnection
return RDSConnection(aws_access_key_id, aws_secret_access_key, **kwargs)
def connect_rds2(aws_access_key_id=None, aws_secret_access_key=None, **kwargs):
"""
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:rtype: :class:`boto.rds2.layer1.RDSConnection`
:return: A connection to RDS
"""
from boto.rds2.layer1 import RDSConnection
return RDSConnection(
aws_access_key_id=aws_access_key_id,
aws_secret_access_key=aws_secret_access_key,
**kwargs
)
def connect_emr(aws_access_key_id=None, aws_secret_access_key=None, **kwargs):
"""
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:rtype: :class:`boto.emr.EmrConnection`
:return: A connection to Elastic mapreduce
"""
from boto.emr import EmrConnection
return EmrConnection(aws_access_key_id, aws_secret_access_key, **kwargs)
def connect_sns(aws_access_key_id=None, aws_secret_access_key=None, **kwargs):
"""
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:rtype: :class:`boto.sns.SNSConnection`
:return: A connection to Amazon's SNS
"""
from boto.sns import SNSConnection
return SNSConnection(aws_access_key_id, aws_secret_access_key, **kwargs)
def connect_iam(aws_access_key_id=None, aws_secret_access_key=None, **kwargs):
"""
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:rtype: :class:`boto.iam.IAMConnection`
:return: A connection to Amazon's IAM
"""
from boto.iam import IAMConnection
return IAMConnection(aws_access_key_id, aws_secret_access_key, **kwargs)
def connect_route53(aws_access_key_id=None, aws_secret_access_key=None,
**kwargs):
"""
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:rtype: :class:`boto.dns.Route53Connection`
:return: A connection to Amazon's Route53 DNS Service
"""
from boto.route53 import Route53Connection
return Route53Connection(aws_access_key_id, aws_secret_access_key,
**kwargs)
def connect_cloudformation(aws_access_key_id=None, aws_secret_access_key=None,
**kwargs):
"""
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:rtype: :class:`boto.cloudformation.CloudFormationConnection`
:return: A connection to Amazon's CloudFormation Service
"""
from boto.cloudformation import CloudFormationConnection
return CloudFormationConnection(aws_access_key_id, aws_secret_access_key,
**kwargs)
def connect_euca(host=None, aws_access_key_id=None, aws_secret_access_key=None,
port=8773, path='/services/Eucalyptus', is_secure=False,
**kwargs):
"""
Connect to a Eucalyptus service.
:type host: string
:param host: the host name or ip address of the Eucalyptus server
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:rtype: :class:`boto.ec2.connection.EC2Connection`
:return: A connection to Eucalyptus server
"""
from boto.ec2 import EC2Connection
from boto.ec2.regioninfo import RegionInfo
# Check for values in boto config, if not supplied as args
if not aws_access_key_id:
aws_access_key_id = config.get('Credentials',
'euca_access_key_id',
None)
if not aws_secret_access_key:
aws_secret_access_key = config.get('Credentials',
'euca_secret_access_key',
None)
if not host:
host = config.get('Boto', 'eucalyptus_host', None)
reg = RegionInfo(name='eucalyptus', endpoint=host)
return EC2Connection(aws_access_key_id, aws_secret_access_key,
region=reg, port=port, path=path,
is_secure=is_secure, **kwargs)
def connect_glacier(aws_access_key_id=None, aws_secret_access_key=None,
**kwargs):
"""
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:rtype: :class:`boto.glacier.layer2.Layer2`
:return: A connection to Amazon's Glacier Service
"""
from boto.glacier.layer2 import Layer2
return Layer2(aws_access_key_id, aws_secret_access_key,
**kwargs)
def connect_ec2_endpoint(url, aws_access_key_id=None,
aws_secret_access_key=None,
**kwargs):
"""
Connect to an EC2 Api endpoint. Additional arguments are passed
through to connect_ec2.
:type url: string
:param url: A url for the ec2 api endpoint to connect to
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:rtype: :class:`boto.ec2.connection.EC2Connection`
:return: A connection to Eucalyptus server
"""
from boto.ec2.regioninfo import RegionInfo
purl = urlparse(url)
kwargs['port'] = purl.port
kwargs['host'] = purl.hostname
kwargs['path'] = purl.path
if not 'is_secure' in kwargs:
kwargs['is_secure'] = (purl.scheme == "https")
kwargs['region'] = RegionInfo(name=purl.hostname,
endpoint=purl.hostname)
kwargs['aws_access_key_id'] = aws_access_key_id
kwargs['aws_secret_access_key'] = aws_secret_access_key
return(connect_ec2(**kwargs))
def connect_walrus(host=None, aws_access_key_id=None,
aws_secret_access_key=None,
port=8773, path='/services/Walrus', is_secure=False,
**kwargs):
"""
Connect to a Walrus service.
:type host: string
:param host: the host name or ip address of the Walrus server
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:rtype: :class:`boto.s3.connection.S3Connection`
:return: A connection to Walrus
"""
from boto.s3.connection import S3Connection
from boto.s3.connection import OrdinaryCallingFormat
# Check for values in boto config, if not supplied as args
if not aws_access_key_id:
aws_access_key_id = config.get('Credentials',
'euca_access_key_id',
None)
if not aws_secret_access_key:
aws_secret_access_key = config.get('Credentials',
'euca_secret_access_key',
None)
if not host:
host = config.get('Boto', 'walrus_host', None)
return S3Connection(aws_access_key_id, aws_secret_access_key,
host=host, port=port, path=path,
calling_format=OrdinaryCallingFormat(),
is_secure=is_secure, **kwargs)
def connect_ses(aws_access_key_id=None, aws_secret_access_key=None, **kwargs):
"""
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:rtype: :class:`boto.ses.SESConnection`
:return: A connection to Amazon's SES
"""
from boto.ses import SESConnection
return SESConnection(aws_access_key_id, aws_secret_access_key, **kwargs)
def connect_sts(aws_access_key_id=None, aws_secret_access_key=None, **kwargs):
"""
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:rtype: :class:`boto.sts.STSConnection`
:return: A connection to Amazon's STS
"""
from boto.sts import STSConnection
return STSConnection(aws_access_key_id, aws_secret_access_key, **kwargs)
def connect_ia(ia_access_key_id=None, ia_secret_access_key=None,
is_secure=False, **kwargs):
"""
Connect to the Internet Archive via their S3-like API.
:type ia_access_key_id: string
:param ia_access_key_id: Your IA Access Key ID. This will also look
in your boto config file for an entry in the Credentials
section called "ia_access_key_id"
:type ia_secret_access_key: string
:param ia_secret_access_key: Your IA Secret Access Key. This will also
look in your boto config file for an entry in the Credentials
section called "ia_secret_access_key"
:rtype: :class:`boto.s3.connection.S3Connection`
:return: A connection to the Internet Archive
"""
from boto.s3.connection import S3Connection
from boto.s3.connection import OrdinaryCallingFormat
access_key = config.get('Credentials', 'ia_access_key_id',
ia_access_key_id)
secret_key = config.get('Credentials', 'ia_secret_access_key',
ia_secret_access_key)
return S3Connection(access_key, secret_key,
host='s3.us.archive.org',
calling_format=OrdinaryCallingFormat(),
is_secure=is_secure, **kwargs)
def connect_dynamodb(aws_access_key_id=None,
aws_secret_access_key=None,
**kwargs):
"""
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:rtype: :class:`boto.dynamodb.layer2.Layer2`
:return: A connection to the Layer2 interface for DynamoDB.
"""
from boto.dynamodb.layer2 import Layer2
return Layer2(aws_access_key_id, aws_secret_access_key, **kwargs)
def connect_swf(aws_access_key_id=None,
aws_secret_access_key=None,
**kwargs):
"""
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:rtype: :class:`boto.swf.layer1.Layer1`
:return: A connection to the Layer1 interface for SWF.
"""
from boto.swf.layer1 import Layer1
return Layer1(aws_access_key_id, aws_secret_access_key, **kwargs)
def connect_cloudsearch(aws_access_key_id=None,
aws_secret_access_key=None,
**kwargs):
"""
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:rtype: :class:`boto.cloudsearch.layer2.Layer2`
:return: A connection to Amazon's CloudSearch service
"""
from boto.cloudsearch.layer2 import Layer2
return Layer2(aws_access_key_id, aws_secret_access_key,
**kwargs)
def connect_cloudsearch2(aws_access_key_id=None,
aws_secret_access_key=None,
sign_request=False,
**kwargs):
"""
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:type sign_request: bool
:param sign_request: whether or not to sign search and
upload requests
:rtype: :class:`boto.cloudsearch2.layer2.Layer2`
:return: A connection to Amazon's CloudSearch2 service
"""
from boto.cloudsearch2.layer2 import Layer2
return Layer2(aws_access_key_id, aws_secret_access_key,
sign_request=sign_request,
**kwargs)
def connect_cloudsearchdomain(aws_access_key_id=None,
aws_secret_access_key=None,
**kwargs):
"""
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:rtype: :class:`boto.cloudsearchdomain.layer1.CloudSearchDomainConnection`
:return: A connection to Amazon's CloudSearch Domain service
"""
from boto.cloudsearchdomain.layer1 import CloudSearchDomainConnection
return CloudSearchDomainConnection(aws_access_key_id,
aws_secret_access_key, **kwargs)
def connect_beanstalk(aws_access_key_id=None,
aws_secret_access_key=None,
**kwargs):
"""
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:rtype: :class:`boto.beanstalk.layer1.Layer1`
:return: A connection to Amazon's Elastic Beanstalk service
"""
from boto.beanstalk.layer1 import Layer1
return Layer1(aws_access_key_id, aws_secret_access_key, **kwargs)
def connect_elastictranscoder(aws_access_key_id=None,
aws_secret_access_key=None,
**kwargs):
"""
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:rtype: :class:`boto.ets.layer1.ElasticTranscoderConnection`
:return: A connection to Amazon's Elastic Transcoder service
"""
from boto.elastictranscoder.layer1 import ElasticTranscoderConnection
return ElasticTranscoderConnection(
aws_access_key_id=aws_access_key_id,
aws_secret_access_key=aws_secret_access_key,
**kwargs)
def connect_opsworks(aws_access_key_id=None,
aws_secret_access_key=None,
**kwargs):
from boto.opsworks.layer1 import OpsWorksConnection
return OpsWorksConnection(
aws_access_key_id=aws_access_key_id,
aws_secret_access_key=aws_secret_access_key,
**kwargs)
def connect_redshift(aws_access_key_id=None,
aws_secret_access_key=None,
**kwargs):
"""
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:rtype: :class:`boto.redshift.layer1.RedshiftConnection`
:return: A connection to Amazon's Redshift service
"""
from boto.redshift.layer1 import RedshiftConnection
return RedshiftConnection(
aws_access_key_id=aws_access_key_id,
aws_secret_access_key=aws_secret_access_key,
**kwargs
)
def connect_support(aws_access_key_id=None,
aws_secret_access_key=None,
**kwargs):
"""
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:rtype: :class:`boto.support.layer1.SupportConnection`
:return: A connection to Amazon's Support service
"""
from boto.support.layer1 import SupportConnection
return SupportConnection(
aws_access_key_id=aws_access_key_id,
aws_secret_access_key=aws_secret_access_key,
**kwargs
)
def connect_cloudtrail(aws_access_key_id=None,
aws_secret_access_key=None,
**kwargs):
"""
Connect to AWS CloudTrail
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:rtype: :class:`boto.cloudtrail.layer1.CloudtrailConnection`
:return: A connection to the AWS Cloudtrail service
"""
from boto.cloudtrail.layer1 import CloudTrailConnection
return CloudTrailConnection(
aws_access_key_id=aws_access_key_id,
aws_secret_access_key=aws_secret_access_key,
**kwargs
)
def connect_directconnect(aws_access_key_id=None,
aws_secret_access_key=None,
**kwargs):
"""
Connect to AWS DirectConnect
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:rtype: :class:`boto.directconnect.layer1.DirectConnectConnection`
:return: A connection to the AWS DirectConnect service
"""
from boto.directconnect.layer1 import DirectConnectConnection
return DirectConnectConnection(
aws_access_key_id=aws_access_key_id,
aws_secret_access_key=aws_secret_access_key,
**kwargs
)
def connect_kinesis(aws_access_key_id=None,
aws_secret_access_key=None,
**kwargs):
"""
Connect to Amazon Kinesis
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
rtype: :class:`boto.kinesis.layer1.KinesisConnection`
:return: A connection to the Amazon Kinesis service
"""
from boto.kinesis.layer1 import KinesisConnection
return KinesisConnection(
aws_access_key_id=aws_access_key_id,
aws_secret_access_key=aws_secret_access_key,
**kwargs
)
def connect_logs(aws_access_key_id=None,
aws_secret_access_key=None,
**kwargs):
"""
Connect to Amazon CloudWatch Logs
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
rtype: :class:`boto.kinesis.layer1.CloudWatchLogsConnection`
:return: A connection to the Amazon CloudWatch Logs service
"""
from boto.logs.layer1 import CloudWatchLogsConnection
return CloudWatchLogsConnection(
aws_access_key_id=aws_access_key_id,
aws_secret_access_key=aws_secret_access_key,
**kwargs
)
def connect_route53domains(aws_access_key_id=None,
aws_secret_access_key=None,
**kwargs):
"""
Connect to Amazon Route 53 Domains
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
rtype: :class:`boto.route53.domains.layer1.Route53DomainsConnection`
:return: A connection to the Amazon Route 53 Domains service
"""
from boto.route53.domains.layer1 import Route53DomainsConnection
return Route53DomainsConnection(
aws_access_key_id=aws_access_key_id,
aws_secret_access_key=aws_secret_access_key,
**kwargs
)
def connect_cognito_identity(aws_access_key_id=None,
aws_secret_access_key=None,
**kwargs):
"""
Connect to Amazon Cognito Identity
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
rtype: :class:`boto.cognito.identity.layer1.CognitoIdentityConnection`
:return: A connection to the Amazon Cognito Identity service
"""
from boto.cognito.identity.layer1 import CognitoIdentityConnection
return CognitoIdentityConnection(
aws_access_key_id=aws_access_key_id,
aws_secret_access_key=aws_secret_access_key,
**kwargs
)
def connect_cognito_sync(aws_access_key_id=None,
aws_secret_access_key=None,
**kwargs):
"""
Connect to Amazon Cognito Sync
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
rtype: :class:`boto.cognito.sync.layer1.CognitoSyncConnection`
:return: A connection to the Amazon Cognito Sync service
"""
from boto.cognito.sync.layer1 import CognitoSyncConnection
return CognitoSyncConnection(
aws_access_key_id=aws_access_key_id,
aws_secret_access_key=aws_secret_access_key,
**kwargs
)
def connect_kms(aws_access_key_id=None,
aws_secret_access_key=None,
**kwargs):
"""
Connect to AWS Key Management Service
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
rtype: :class:`boto.kms.layer1.KMSConnection`
:return: A connection to the AWS Key Management Service
"""
from boto.kms.layer1 import KMSConnection
return KMSConnection(
aws_access_key_id=aws_access_key_id,
aws_secret_access_key=aws_secret_access_key,
**kwargs
)
def connect_awslambda(aws_access_key_id=None,
aws_secret_access_key=None,
**kwargs):
"""
Connect to AWS Lambda
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
rtype: :class:`boto.awslambda.layer1.AWSLambdaConnection`
:return: A connection to the AWS Lambda service
"""
from boto.awslambda.layer1 import AWSLambdaConnection
return AWSLambdaConnection(
aws_access_key_id=aws_access_key_id,
aws_secret_access_key=aws_secret_access_key,
**kwargs
)
def connect_codedeploy(aws_access_key_id=None,
aws_secret_access_key=None,
**kwargs):
"""
Connect to AWS CodeDeploy
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
rtype: :class:`boto.cognito.sync.layer1.CodeDeployConnection`
:return: A connection to the AWS CodeDeploy service
"""
from boto.codedeploy.layer1 import CodeDeployConnection
return CodeDeployConnection(
aws_access_key_id=aws_access_key_id,
aws_secret_access_key=aws_secret_access_key,
**kwargs
)
def connect_configservice(aws_access_key_id=None,
aws_secret_access_key=None,
**kwargs):
"""
Connect to AWS Config
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
rtype: :class:`boto.kms.layer1.ConfigServiceConnection`
:return: A connection to the AWS Config service
"""
from boto.configservice.layer1 import ConfigServiceConnection
return ConfigServiceConnection(
aws_access_key_id=aws_access_key_id,
aws_secret_access_key=aws_secret_access_key,
**kwargs
)
def connect_cloudhsm(aws_access_key_id=None,
aws_secret_access_key=None,
**kwargs):
"""
Connect to AWS CloudHSM
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
rtype: :class:`boto.cloudhsm.layer1.CloudHSMConnection`
:return: A connection to the AWS CloudHSM service
"""
from boto.cloudhsm.layer1 import CloudHSMConnection
return CloudHSMConnection(
aws_access_key_id=aws_access_key_id,
aws_secret_access_key=aws_secret_access_key,
**kwargs
)
def connect_ec2containerservice(aws_access_key_id=None,
aws_secret_access_key=None,
**kwargs):
"""
Connect to Amazon EC2 Container Service
rtype: :class:`boto.ec2containerservice.layer1.EC2ContainerServiceConnection`
:return: A connection to the Amazon EC2 Container Service
"""
from boto.ec2containerservice.layer1 import EC2ContainerServiceConnection
return EC2ContainerServiceConnection(
aws_access_key_id=aws_access_key_id,
aws_secret_access_key=aws_secret_access_key,
**kwargs
)
def connect_machinelearning(aws_access_key_id=None,
aws_secret_access_key=None,
**kwargs):
"""
Connect to Amazon Machine Learning service
rtype: :class:`boto.machinelearning.layer1.MachineLearningConnection`
:return: A connection to the Amazon Machine Learning service
"""
from boto.machinelearning.layer1 import MachineLearningConnection
return MachineLearningConnection(
aws_access_key_id=aws_access_key_id,
aws_secret_access_key=aws_secret_access_key,
**kwargs
)
def storage_uri(uri_str, default_scheme='file', debug=0, validate=True,
bucket_storage_uri_class=BucketStorageUri,
suppress_consec_slashes=True, is_latest=False):
"""
Instantiate a StorageUri from a URI string.
:type uri_str: string
:param uri_str: URI naming bucket + optional object.
:type default_scheme: string
:param default_scheme: default scheme for scheme-less URIs.
:type debug: int
:param debug: debug level to pass in to boto connection (range 0..2).
:type validate: bool
:param validate: whether to check for bucket name validity.
:type bucket_storage_uri_class: BucketStorageUri interface.
:param bucket_storage_uri_class: Allows mocking for unit tests.
:param suppress_consec_slashes: If provided, controls whether
consecutive slashes will be suppressed in key paths.
:type is_latest: bool
:param is_latest: whether this versioned object represents the
current version.
We allow validate to be disabled to allow caller
to implement bucket-level wildcarding (outside the boto library;
see gsutil).
:rtype: :class:`boto.StorageUri` subclass
:return: StorageUri subclass for given URI.
``uri_str`` must be one of the following formats:
* gs://bucket/name
* gs://bucket/name#ver
* s3://bucket/name
* gs://bucket
* s3://bucket
* filename (which could be a Unix path like /a/b/c or a Windows path like
C:\a\b\c)
The last example uses the default scheme ('file', unless overridden).
"""
version_id = None
generation = None
# Manually parse URI components instead of using urlparse because
# what we're calling URIs don't really fit the standard syntax for URIs
# (the latter includes an optional host/net location part).
end_scheme_idx = uri_str.find('://')
if end_scheme_idx == -1:
scheme = default_scheme.lower()
path = uri_str
else:
scheme = uri_str[0:end_scheme_idx].lower()
path = uri_str[end_scheme_idx + 3:]
if scheme not in ['file', 's3', 'gs']:
raise InvalidUriError('Unrecognized scheme "%s"' % scheme)
if scheme == 'file':
# For file URIs we have no bucket name, and use the complete path
# (minus 'file://') as the object name.
is_stream = False
if path == '-':
is_stream = True
return FileStorageUri(path, debug, is_stream)
else:
path_parts = path.split('/', 1)
bucket_name = path_parts[0]
object_name = ''
# If validate enabled, ensure the bucket name is valid, to avoid
# possibly confusing other parts of the code. (For example if we didn't
# catch bucket names containing ':', when a user tried to connect to
# the server with that name they might get a confusing error about
# non-integer port numbers.)
if (validate and bucket_name and
(not BUCKET_NAME_RE.match(bucket_name)
or TOO_LONG_DNS_NAME_COMP.search(bucket_name))):
raise InvalidUriError('Invalid bucket name in URI "%s"' % uri_str)
if scheme == 'gs':
match = GENERATION_RE.search(path)
if match:
md = match.groupdict()
versionless_uri_str = md['versionless_uri_str']
path_parts = versionless_uri_str.split('/', 1)
generation = int(md['generation'])
elif scheme == 's3':
match = VERSION_RE.search(path)
if match:
md = match.groupdict()
versionless_uri_str = md['versionless_uri_str']
path_parts = versionless_uri_str.split('/', 1)
version_id = md['version_id']
else:
raise InvalidUriError('Unrecognized scheme "%s"' % scheme)
if len(path_parts) > 1:
object_name = path_parts[1]
return bucket_storage_uri_class(
scheme, bucket_name, object_name, debug,
suppress_consec_slashes=suppress_consec_slashes,
version_id=version_id, generation=generation, is_latest=is_latest)
def storage_uri_for_key(key):
"""Returns a StorageUri for the given key.
:type key: :class:`boto.s3.key.Key` or subclass
:param key: URI naming bucket + optional object.
"""
if not isinstance(key, boto.s3.key.Key):
raise InvalidUriError('Requested key (%s) is not a subclass of '
'boto.s3.key.Key' % str(type(key)))
prov_name = key.bucket.connection.provider.get_provider_name()
uri_str = '%s://%s/%s' % (prov_name, key.bucket.name, key.name)
return storage_uri(uri_str)
boto.plugin.load_plugins(config)
|
atris/gpdb | refs/heads/master | gpMgmt/bin/ext/figleaf/annotate_sections.py | 35 | #! /usr/bin/env python
import figleaf
from figleaf import internals
from sets import Set as set
import sys
from cPickle import load
import os
from optparse import OptionParser
def main():
#### OPTIONS
parser = OptionParser()
parser.add_option('-c', '--coverage', nargs=1, action="store",
dest="coverage_file",
help = 'load coverage info from this file',
default='.figleaf_sections')
####
(options, args) = parser.parse_args(sys.argv[1:])
coverage_file = options.coverage_file
figleaf.load_pickled_coverage(open(coverage_file))
data = internals.CoverageData(figleaf._t)
full_cov = data.gather_files()
for filename in args:
annotate_file_with_sections(filename, data, full_cov)
def annotate_file_with_sections(short, data, full_cov):
full = os.path.abspath(short)
tags = {}
sections = data.gather_sections(full)
sections.update(data.gather_sections(short))
print data.sections
print '*** PROCESSING:', short, '\n\t==>', short + '.sections'
for tag, cov in sections.items():
if cov:
tags[tag] = cov
if not tags:
print '*** No coverage info for file', short
tag_names = tags.keys()
tag_names.sort()
tag_names.reverse()
tags["-- all coverage --"] = full_cov.get(full, set())
tag_names.insert(0, "-- all coverage --")
n_tags = len(tag_names)
fp = open(short + '.sections', 'w')
for i, tag in enumerate(tag_names):
fp.write('%s%s\n' % ('| ' * i, tag))
fp.write('| ' * n_tags)
fp.write('\n\n')
source = open(full)
for n, line in enumerate(source):
marks = ""
for tag in tag_names:
cov = tags[tag]
symbol = ' '
if (n+1) in cov:
symbol = '+ '
marks += symbol
fp.write('%s | %s' % (marks, line))
fp.close()
|
zhouyao1994/incubator-superset | refs/heads/master | tests/model_tests.py | 1 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import textwrap
import unittest
import pandas
from sqlalchemy.engine.url import make_url
from superset import app
from superset.models.core import Database
from superset.utils.core import get_example_database, QueryStatus
from .base_tests import SupersetTestCase
class DatabaseModelTestCase(SupersetTestCase):
@unittest.skipUnless(
SupersetTestCase.is_module_installed("requests"), "requests not installed"
)
def test_database_schema_presto(self):
sqlalchemy_uri = "presto://presto.airbnb.io:8080/hive/default"
model = Database(database_name="test_database", sqlalchemy_uri=sqlalchemy_uri)
db = make_url(model.get_sqla_engine().url).database
self.assertEqual("hive/default", db)
db = make_url(model.get_sqla_engine(schema="core_db").url).database
self.assertEqual("hive/core_db", db)
sqlalchemy_uri = "presto://presto.airbnb.io:8080/hive"
model = Database(database_name="test_database", sqlalchemy_uri=sqlalchemy_uri)
db = make_url(model.get_sqla_engine().url).database
self.assertEqual("hive", db)
db = make_url(model.get_sqla_engine(schema="core_db").url).database
self.assertEqual("hive/core_db", db)
def test_database_schema_postgres(self):
sqlalchemy_uri = "postgresql+psycopg2://postgres.airbnb.io:5439/prod"
model = Database(database_name="test_database", sqlalchemy_uri=sqlalchemy_uri)
db = make_url(model.get_sqla_engine().url).database
self.assertEqual("prod", db)
db = make_url(model.get_sqla_engine(schema="foo").url).database
self.assertEqual("prod", db)
@unittest.skipUnless(
SupersetTestCase.is_module_installed("thrift"), "thrift not installed"
)
@unittest.skipUnless(
SupersetTestCase.is_module_installed("pyhive"), "pyhive not installed"
)
def test_database_schema_hive(self):
sqlalchemy_uri = "hive://[email protected]:10000/default?auth=NOSASL"
model = Database(database_name="test_database", sqlalchemy_uri=sqlalchemy_uri)
db = make_url(model.get_sqla_engine().url).database
self.assertEqual("default", db)
db = make_url(model.get_sqla_engine(schema="core_db").url).database
self.assertEqual("core_db", db)
@unittest.skipUnless(
SupersetTestCase.is_module_installed("MySQLdb"), "mysqlclient not installed"
)
def test_database_schema_mysql(self):
sqlalchemy_uri = "mysql://root@localhost/superset"
model = Database(database_name="test_database", sqlalchemy_uri=sqlalchemy_uri)
db = make_url(model.get_sqla_engine().url).database
self.assertEqual("superset", db)
db = make_url(model.get_sqla_engine(schema="staging").url).database
self.assertEqual("staging", db)
@unittest.skipUnless(
SupersetTestCase.is_module_installed("MySQLdb"), "mysqlclient not installed"
)
def test_database_impersonate_user(self):
uri = "mysql://root@localhost"
example_user = "giuseppe"
model = Database(database_name="test_database", sqlalchemy_uri=uri)
model.impersonate_user = True
user_name = make_url(model.get_sqla_engine(user_name=example_user).url).username
self.assertEqual(example_user, user_name)
model.impersonate_user = False
user_name = make_url(model.get_sqla_engine(user_name=example_user).url).username
self.assertNotEqual(example_user, user_name)
def test_select_star(self):
db = get_example_database()
table_name = "energy_usage"
sql = db.select_star(table_name, show_cols=False, latest_partition=False)
expected = textwrap.dedent(
f"""\
SELECT *
FROM {table_name}
LIMIT 100"""
)
assert sql.startswith(expected)
sql = db.select_star(table_name, show_cols=True, latest_partition=False)
expected = textwrap.dedent(
f"""\
SELECT source,
target,
value
FROM energy_usage
LIMIT 100"""
)
assert sql.startswith(expected)
def test_select_star_fully_qualified_names(self):
db = get_example_database()
schema = "schema.name"
table_name = "table/name"
sql = db.select_star(
table_name, schema=schema, show_cols=False, latest_partition=False
)
fully_qualified_names = {
"sqlite": '"schema.name"."table/name"',
"mysql": "`schema.name`.`table/name`",
"postgres": '"schema.name"."table/name"',
}
fully_qualified_name = fully_qualified_names.get(db.db_engine_spec.engine)
if fully_qualified_name:
expected = textwrap.dedent(
f"""\
SELECT *
FROM {fully_qualified_name}
LIMIT 100"""
)
assert sql.startswith(expected)
def test_single_statement(self):
main_db = get_example_database()
if main_db.backend == "mysql":
df = main_db.get_df("SELECT 1", None)
self.assertEqual(df.iat[0, 0], 1)
df = main_db.get_df("SELECT 1;", None)
self.assertEqual(df.iat[0, 0], 1)
def test_multi_statement(self):
main_db = get_example_database()
if main_db.backend == "mysql":
df = main_db.get_df("USE superset; SELECT 1", None)
self.assertEqual(df.iat[0, 0], 1)
df = main_db.get_df("USE superset; SELECT ';';", None)
self.assertEqual(df.iat[0, 0], ";")
class SqlaTableModelTestCase(SupersetTestCase):
def test_get_timestamp_expression(self):
tbl = self.get_table_by_name("birth_names")
ds_col = tbl.get_column("ds")
sqla_literal = ds_col.get_timestamp_expression(None)
self.assertEqual(str(sqla_literal.compile()), "ds")
sqla_literal = ds_col.get_timestamp_expression("P1D")
compiled = "{}".format(sqla_literal.compile())
if tbl.database.backend == "mysql":
self.assertEqual(compiled, "DATE(ds)")
prev_ds_expr = ds_col.expression
ds_col.expression = "DATE_ADD(ds, 1)"
sqla_literal = ds_col.get_timestamp_expression("P1D")
compiled = "{}".format(sqla_literal.compile())
if tbl.database.backend == "mysql":
self.assertEqual(compiled, "DATE(DATE_ADD(ds, 1))")
ds_col.expression = prev_ds_expr
def test_get_timestamp_expression_epoch(self):
tbl = self.get_table_by_name("birth_names")
ds_col = tbl.get_column("ds")
ds_col.expression = None
ds_col.python_date_format = "epoch_s"
sqla_literal = ds_col.get_timestamp_expression(None)
compiled = "{}".format(sqla_literal.compile())
if tbl.database.backend == "mysql":
self.assertEqual(compiled, "from_unixtime(ds)")
ds_col.python_date_format = "epoch_s"
sqla_literal = ds_col.get_timestamp_expression("P1D")
compiled = "{}".format(sqla_literal.compile())
if tbl.database.backend == "mysql":
self.assertEqual(compiled, "DATE(from_unixtime(ds))")
prev_ds_expr = ds_col.expression
ds_col.expression = "DATE_ADD(ds, 1)"
sqla_literal = ds_col.get_timestamp_expression("P1D")
compiled = "{}".format(sqla_literal.compile())
if tbl.database.backend == "mysql":
self.assertEqual(compiled, "DATE(from_unixtime(DATE_ADD(ds, 1)))")
ds_col.expression = prev_ds_expr
def query_with_expr_helper(self, is_timeseries, inner_join=True):
tbl = self.get_table_by_name("birth_names")
ds_col = tbl.get_column("ds")
ds_col.expression = None
ds_col.python_date_format = None
spec = self.get_database_by_id(tbl.database_id).db_engine_spec
if not spec.allows_joins and inner_join:
# if the db does not support inner joins, we cannot force it so
return None
old_inner_join = spec.allows_joins
spec.allows_joins = inner_join
arbitrary_gby = "state || gender || '_test'"
arbitrary_metric = dict(
label="arbitrary", expressionType="SQL", sqlExpression="COUNT(1)"
)
query_obj = dict(
groupby=[arbitrary_gby, "name"],
metrics=[arbitrary_metric],
filter=[],
is_timeseries=is_timeseries,
columns=[],
granularity="ds",
from_dttm=None,
to_dttm=None,
extras=dict(time_grain_sqla="P1Y"),
)
qr = tbl.query(query_obj)
self.assertEqual(qr.status, QueryStatus.SUCCESS)
sql = qr.query
self.assertIn(arbitrary_gby, sql)
self.assertIn("name", sql)
if inner_join and is_timeseries:
self.assertIn("JOIN", sql.upper())
else:
self.assertNotIn("JOIN", sql.upper())
spec.allows_joins = old_inner_join
self.assertIsNotNone(qr.df)
return qr.df
def test_query_with_expr_groupby_timeseries(self):
def cannonicalize_df(df):
ret = df.sort_values(by=list(df.columns.values), inplace=False)
ret.reset_index(inplace=True, drop=True)
return ret
df1 = self.query_with_expr_helper(is_timeseries=True, inner_join=True)
df2 = self.query_with_expr_helper(is_timeseries=True, inner_join=False)
self.assertIsNotNone(df2) # df1 can be none if the db does not support join
if df1 is not None:
pandas.testing.assert_frame_equal(
cannonicalize_df(df1), cannonicalize_df(df2)
)
def test_query_with_expr_groupby(self):
self.query_with_expr_helper(is_timeseries=False)
def test_sql_mutator(self):
tbl = self.get_table_by_name("birth_names")
query_obj = dict(
groupby=[],
metrics=[],
filter=[],
is_timeseries=False,
columns=["name"],
granularity=None,
from_dttm=None,
to_dttm=None,
extras={},
)
sql = tbl.get_query_str(query_obj)
self.assertNotIn("-- COMMENT", sql)
def mutator(*args):
return "-- COMMENT\n" + args[0]
app.config["SQL_QUERY_MUTATOR"] = mutator
sql = tbl.get_query_str(query_obj)
self.assertIn("-- COMMENT", sql)
app.config["SQL_QUERY_MUTATOR"] = None
def test_query_with_non_existent_metrics(self):
tbl = self.get_table_by_name("birth_names")
query_obj = dict(
groupby=[],
metrics=["invalid"],
filter=[],
is_timeseries=False,
columns=["name"],
granularity=None,
from_dttm=None,
to_dttm=None,
extras={},
)
with self.assertRaises(Exception) as context:
tbl.get_query_str(query_obj)
self.assertTrue("Metric 'invalid' does not exist", context.exception)
|
dawnpower/nova | refs/heads/master | nova/tests/unit/objects/test_security_group_rule.py | 24 | # Copyright 2013 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from nova import db
from nova import exception
from nova import objects
from nova.tests.unit.objects import test_objects
from nova.tests.unit.objects import test_security_group
fake_rule = {
'created_at': None,
'updated_at': None,
'deleted_at': None,
'deleted': False,
'id': 1,
'protocol': 'tcp',
'from_port': 22,
'to_port': 22,
'cidr': '0.0.0.0/0',
}
class _TestSecurityGroupRuleObject(object):
def test_get_by_id(self):
with mock.patch.object(db, 'security_group_rule_get') as sgrg:
sgrg.return_value = fake_rule
rule = objects.SecurityGroupRule.get_by_id(
self.context, 1)
for field in fake_rule:
if field == 'cidr':
self.assertEqual(fake_rule[field], str(rule[field]))
else:
self.assertEqual(fake_rule[field], rule[field])
sgrg.assert_called_with(self.context, 1)
def test_get_by_security_group(self):
secgroup = objects.SecurityGroup()
secgroup.id = 123
rule = dict(fake_rule)
rule['grantee_group'] = dict(test_security_group.fake_secgroup, id=123)
stupid_method = 'security_group_rule_get_by_security_group'
with mock.patch.object(db, stupid_method) as sgrgbsg:
sgrgbsg.return_value = [rule]
rules = (objects.SecurityGroupRuleList.
get_by_security_group(self.context, secgroup))
self.assertEqual(1, len(rules))
self.assertEqual(123, rules[0].grantee_group.id)
@mock.patch.object(db, 'security_group_rule_create',
return_value=fake_rule)
def test_create(self, db_mock):
rule = objects.SecurityGroupRule(context=self.context)
rule.protocol = 'tcp'
secgroup = objects.SecurityGroup()
secgroup.id = 123
parentgroup = objects.SecurityGroup()
parentgroup.id = 223
rule.grantee_group = secgroup
rule.parent_group = parentgroup
rule.create()
updates = db_mock.call_args[0][1]
self.assertEqual(fake_rule['id'], rule.id)
self.assertEqual(updates['group_id'], rule.grantee_group.id)
self.assertEqual(updates['parent_group_id'], rule.parent_group.id)
@mock.patch.object(db, 'security_group_rule_create',
return_value=fake_rule)
def test_set_id_failure(self, db_mock):
rule = objects.SecurityGroupRule(context=self.context)
rule.create()
self.assertRaises(exception.ReadOnlyFieldError, setattr,
rule, 'id', 124)
class TestSecurityGroupRuleObject(test_objects._LocalTest,
_TestSecurityGroupRuleObject):
pass
class TestSecurityGroupRuleObjectRemote(test_objects._RemoteTest,
_TestSecurityGroupRuleObject):
pass
|
titom1986/CouchPotatoServer | refs/heads/develop | couchpotato/core/media/movie/providers/userscript/youteather.py | 81 | import re
from couchpotato.core.media._base.providers.userscript.base import UserscriptBase
autoload = 'YouTheater'
class YouTheater(UserscriptBase):
id_re = re.compile("view\.php\?id=(\d+)")
includes = ['http://www.youtheater.com/view.php?id=*', 'http://youtheater.com/view.php?id=*',
'http://www.sratim.co.il/view.php?id=*', 'http://sratim.co.il/view.php?id=*']
def getMovie(self, url):
id = self.id_re.findall(url)[0]
url = 'http://www.youtheater.com/view.php?id=%s' % id
return super(YouTheater, self).getMovie(url)
|
ivandevp/django | refs/heads/master | tests/m2m_through/tests.py | 295 | from __future__ import unicode_literals
from datetime import datetime
from operator import attrgetter
from django.test import TestCase
from .models import (
CustomMembership, Employee, Event, Friendship, Group, Ingredient,
Invitation, Membership, Person, PersonSelfRefM2M, Recipe, RecipeIngredient,
Relationship,
)
class M2mThroughTests(TestCase):
def setUp(self):
self.bob = Person.objects.create(name='Bob')
self.jim = Person.objects.create(name='Jim')
self.jane = Person.objects.create(name='Jane')
self.rock = Group.objects.create(name='Rock')
self.roll = Group.objects.create(name='Roll')
def test_retrieve_intermediate_items(self):
Membership.objects.create(person=self.jim, group=self.rock)
Membership.objects.create(person=self.jane, group=self.rock)
expected = ['Jane', 'Jim']
self.assertQuerysetEqual(
self.rock.members.all(),
expected,
attrgetter("name")
)
def test_get_on_intermediate_model(self):
Membership.objects.create(person=self.jane, group=self.rock)
queryset = Membership.objects.get(person=self.jane, group=self.rock)
self.assertEqual(
repr(queryset),
'<Membership: Jane is a member of Rock>'
)
def test_filter_on_intermediate_model(self):
Membership.objects.create(person=self.jim, group=self.rock)
Membership.objects.create(person=self.jane, group=self.rock)
queryset = Membership.objects.filter(group=self.rock)
expected = [
'<Membership: Jim is a member of Rock>',
'<Membership: Jane is a member of Rock>',
]
self.assertQuerysetEqual(
queryset,
expected
)
def test_cannot_use_add_on_m2m_with_intermediary_model(self):
msg = 'Cannot use add() on a ManyToManyField which specifies an intermediary model'
with self.assertRaisesMessage(AttributeError, msg):
self.rock.members.add(self.bob)
self.assertQuerysetEqual(
self.rock.members.all(),
[]
)
def test_cannot_use_create_on_m2m_with_intermediary_model(self):
msg = 'Cannot use create() on a ManyToManyField which specifies an intermediary model'
with self.assertRaisesMessage(AttributeError, msg):
self.rock.members.create(name='Annie')
self.assertQuerysetEqual(
self.rock.members.all(),
[]
)
def test_cannot_use_remove_on_m2m_with_intermediary_model(self):
Membership.objects.create(person=self.jim, group=self.rock)
msg = 'Cannot use remove() on a ManyToManyField which specifies an intermediary model'
with self.assertRaisesMessage(AttributeError, msg):
self.rock.members.remove(self.jim)
self.assertQuerysetEqual(
self.rock.members.all(),
['Jim', ],
attrgetter("name")
)
def test_cannot_use_setattr_on_m2m_with_intermediary_model(self):
msg = 'Cannot set values on a ManyToManyField which specifies an intermediary model'
members = list(Person.objects.filter(name__in=['Bob', 'Jim']))
with self.assertRaisesMessage(AttributeError, msg):
setattr(self.rock, 'members', members)
self.assertQuerysetEqual(
self.rock.members.all(),
[]
)
def test_clear_removes_all_the_m2m_relationships(self):
Membership.objects.create(person=self.jim, group=self.rock)
Membership.objects.create(person=self.jane, group=self.rock)
self.rock.members.clear()
self.assertQuerysetEqual(
self.rock.members.all(),
[]
)
def test_retrieve_reverse_intermediate_items(self):
Membership.objects.create(person=self.jim, group=self.rock)
Membership.objects.create(person=self.jim, group=self.roll)
expected = ['Rock', 'Roll']
self.assertQuerysetEqual(
self.jim.group_set.all(),
expected,
attrgetter("name")
)
def test_cannot_use_add_on_reverse_m2m_with_intermediary_model(self):
msg = 'Cannot use add() on a ManyToManyField which specifies an intermediary model'
with self.assertRaisesMessage(AttributeError, msg):
self.bob.group_set.add(self.bob)
self.assertQuerysetEqual(
self.bob.group_set.all(),
[]
)
def test_cannot_use_create_on_reverse_m2m_with_intermediary_model(self):
msg = 'Cannot use create() on a ManyToManyField which specifies an intermediary model'
with self.assertRaisesMessage(AttributeError, msg):
self.bob.group_set.create(name='Funk')
self.assertQuerysetEqual(
self.bob.group_set.all(),
[]
)
def test_cannot_use_remove_on_reverse_m2m_with_intermediary_model(self):
Membership.objects.create(person=self.bob, group=self.rock)
msg = 'Cannot use remove() on a ManyToManyField which specifies an intermediary model'
with self.assertRaisesMessage(AttributeError, msg):
self.bob.group_set.remove(self.rock)
self.assertQuerysetEqual(
self.bob.group_set.all(),
['Rock', ],
attrgetter('name')
)
def test_cannot_use_setattr_on_reverse_m2m_with_intermediary_model(self):
msg = 'Cannot set values on a ManyToManyField which specifies an intermediary model'
members = list(Group.objects.filter(name__in=['Rock', 'Roll']))
with self.assertRaisesMessage(AttributeError, msg):
setattr(self.bob, 'group_set', members)
self.assertQuerysetEqual(
self.bob.group_set.all(),
[]
)
def test_clear_on_reverse_removes_all_the_m2m_relationships(self):
Membership.objects.create(person=self.jim, group=self.rock)
Membership.objects.create(person=self.jim, group=self.roll)
self.jim.group_set.clear()
self.assertQuerysetEqual(
self.jim.group_set.all(),
[]
)
def test_query_model_by_attribute_name_of_related_model(self):
Membership.objects.create(person=self.jim, group=self.rock)
Membership.objects.create(person=self.jane, group=self.rock)
Membership.objects.create(person=self.bob, group=self.roll)
Membership.objects.create(person=self.jim, group=self.roll)
Membership.objects.create(person=self.jane, group=self.roll)
self.assertQuerysetEqual(
Group.objects.filter(members__name='Bob'),
['Roll', ],
attrgetter("name")
)
def test_query_first_model_by_intermediate_model_attribute(self):
Membership.objects.create(
person=self.jane, group=self.roll,
invite_reason="She was just awesome."
)
Membership.objects.create(
person=self.jim, group=self.roll,
invite_reason="He is good."
)
Membership.objects.create(person=self.bob, group=self.roll)
qs = Group.objects.filter(
membership__invite_reason="She was just awesome."
)
self.assertQuerysetEqual(
qs,
['Roll'],
attrgetter("name")
)
def test_query_second_model_by_intermediate_model_attribute(self):
Membership.objects.create(
person=self.jane, group=self.roll,
invite_reason="She was just awesome."
)
Membership.objects.create(
person=self.jim, group=self.roll,
invite_reason="He is good."
)
Membership.objects.create(person=self.bob, group=self.roll)
qs = Person.objects.filter(
membership__invite_reason="She was just awesome."
)
self.assertQuerysetEqual(
qs,
['Jane'],
attrgetter("name")
)
def test_query_model_by_related_model_name(self):
Membership.objects.create(person=self.jim, group=self.rock)
Membership.objects.create(person=self.jane, group=self.rock)
Membership.objects.create(person=self.bob, group=self.roll)
Membership.objects.create(person=self.jim, group=self.roll)
Membership.objects.create(person=self.jane, group=self.roll)
self.assertQuerysetEqual(
Person.objects.filter(group__name="Rock"),
['Jane', 'Jim'],
attrgetter("name")
)
def test_query_model_by_custom_related_name(self):
CustomMembership.objects.create(person=self.bob, group=self.rock)
CustomMembership.objects.create(person=self.jim, group=self.rock)
self.assertQuerysetEqual(
Person.objects.filter(custom__name="Rock"),
['Bob', 'Jim'],
attrgetter("name")
)
def test_query_model_by_intermediate_can_return_non_unique_queryset(self):
Membership.objects.create(person=self.jim, group=self.rock)
Membership.objects.create(
person=self.jane, group=self.rock,
date_joined=datetime(2006, 1, 1)
)
Membership.objects.create(
person=self.bob, group=self.roll,
date_joined=datetime(2004, 1, 1))
Membership.objects.create(person=self.jim, group=self.roll)
Membership.objects.create(
person=self.jane, group=self.roll,
date_joined=datetime(2004, 1, 1))
qs = Person.objects.filter(
membership__date_joined__gt=datetime(2004, 1, 1)
)
self.assertQuerysetEqual(
qs,
['Jane', 'Jim', 'Jim'],
attrgetter("name")
)
def test_custom_related_name_forward_empty_qs(self):
self.assertQuerysetEqual(
self.rock.custom_members.all(),
[]
)
def test_custom_related_name_reverse_empty_qs(self):
self.assertQuerysetEqual(
self.bob.custom.all(),
[]
)
def test_custom_related_name_forward_non_empty_qs(self):
CustomMembership.objects.create(person=self.bob, group=self.rock)
CustomMembership.objects.create(person=self.jim, group=self.rock)
self.assertQuerysetEqual(
self.rock.custom_members.all(),
['Bob', 'Jim'],
attrgetter("name")
)
def test_custom_related_name_reverse_non_empty_qs(self):
CustomMembership.objects.create(person=self.bob, group=self.rock)
CustomMembership.objects.create(person=self.jim, group=self.rock)
self.assertQuerysetEqual(
self.bob.custom.all(),
['Rock'],
attrgetter("name")
)
def test_custom_related_name_doesnt_conflict_with_fky_related_name(self):
CustomMembership.objects.create(person=self.bob, group=self.rock)
self.assertQuerysetEqual(
self.bob.custom_person_related_name.all(),
['<CustomMembership: Bob is a member of Rock>']
)
def test_through_fields(self):
"""
Tests that relations with intermediary tables with multiple FKs
to the M2M's ``to`` model are possible.
"""
event = Event.objects.create(title='Rockwhale 2014')
Invitation.objects.create(event=event, inviter=self.bob, invitee=self.jim)
Invitation.objects.create(event=event, inviter=self.bob, invitee=self.jane)
self.assertQuerysetEqual(
event.invitees.all(),
['Jane', 'Jim'],
attrgetter('name')
)
class M2mThroughReferentialTests(TestCase):
def test_self_referential_empty_qs(self):
tony = PersonSelfRefM2M.objects.create(name="Tony")
self.assertQuerysetEqual(
tony.friends.all(),
[]
)
def test_self_referential_non_symmentrical_first_side(self):
tony = PersonSelfRefM2M.objects.create(name="Tony")
chris = PersonSelfRefM2M.objects.create(name="Chris")
Friendship.objects.create(
first=tony, second=chris, date_friended=datetime.now()
)
self.assertQuerysetEqual(
tony.friends.all(),
['Chris'],
attrgetter("name")
)
def test_self_referential_non_symmentrical_second_side(self):
tony = PersonSelfRefM2M.objects.create(name="Tony")
chris = PersonSelfRefM2M.objects.create(name="Chris")
Friendship.objects.create(
first=tony, second=chris, date_friended=datetime.now()
)
self.assertQuerysetEqual(
chris.friends.all(),
[]
)
def test_self_referential_non_symmentrical_clear_first_side(self):
tony = PersonSelfRefM2M.objects.create(name="Tony")
chris = PersonSelfRefM2M.objects.create(name="Chris")
Friendship.objects.create(
first=tony, second=chris, date_friended=datetime.now()
)
chris.friends.clear()
self.assertQuerysetEqual(
chris.friends.all(),
[]
)
# Since this isn't a symmetrical relation, Tony's friend link still exists.
self.assertQuerysetEqual(
tony.friends.all(),
['Chris'],
attrgetter("name")
)
def test_self_referential_symmentrical(self):
tony = PersonSelfRefM2M.objects.create(name="Tony")
chris = PersonSelfRefM2M.objects.create(name="Chris")
Friendship.objects.create(
first=tony, second=chris, date_friended=datetime.now()
)
Friendship.objects.create(
first=chris, second=tony, date_friended=datetime.now()
)
self.assertQuerysetEqual(
tony.friends.all(),
['Chris'],
attrgetter("name")
)
self.assertQuerysetEqual(
chris.friends.all(),
['Tony'],
attrgetter("name")
)
def test_through_fields_self_referential(self):
john = Employee.objects.create(name='john')
peter = Employee.objects.create(name='peter')
mary = Employee.objects.create(name='mary')
harry = Employee.objects.create(name='harry')
Relationship.objects.create(source=john, target=peter, another=None)
Relationship.objects.create(source=john, target=mary, another=None)
Relationship.objects.create(source=john, target=harry, another=peter)
self.assertQuerysetEqual(
john.subordinates.all(),
['peter', 'mary', 'harry'],
attrgetter('name')
)
class M2mThroughToFieldsTests(TestCase):
def setUp(self):
self.pea = Ingredient.objects.create(iname='pea')
self.potato = Ingredient.objects.create(iname='potato')
self.tomato = Ingredient.objects.create(iname='tomato')
self.curry = Recipe.objects.create(rname='curry')
RecipeIngredient.objects.create(recipe=self.curry, ingredient=self.potato)
RecipeIngredient.objects.create(recipe=self.curry, ingredient=self.pea)
RecipeIngredient.objects.create(recipe=self.curry, ingredient=self.tomato)
def test_retrieval(self):
# Forward retrieval
self.assertQuerysetEqual(
self.curry.ingredients.all(),
[self.pea, self.potato, self.tomato], lambda x: x
)
# Backward retrieval
self.assertEqual(self.tomato.recipes.get(), self.curry)
def test_choices(self):
field = Recipe._meta.get_field('ingredients')
self.assertEqual(
[choice[0] for choice in field.get_choices(include_blank=False)],
['pea', 'potato', 'tomato']
)
|
dictoon/appleseed-maya | refs/heads/master | scripts/appleseedMaya/hypershadeCallbacks.py | 5 |
#
# This source file is part of appleseed.
# Visit https://appleseedhq.net/ for additional information and resources.
#
# This software is released under the MIT license.
#
# Copyright (c) 2016-2019 Esteban Tovagliari, The appleseedhq Organization
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# Maya imports.
import maya.cmds as mc
import maya.mel as mel
# appleseedMaya imports.
from logger import logger
def hyperShadePanelBuildCreateMenuCallback():
mc.menuItem(label="appleseed")
mc.menuItem(divider=True)
def hyperShadePanelBuildCreateSubMenuCallback():
return "rendernode/appleseed/surface"
def hyperShadePanelPluginChangeCallback(classification, changeType):
if 'rendernode/appleseed' in classification:
return 1
return 0
def createRenderNodeSelectNodeCategoriesCallback(flag, treeLister):
if flag == "allWithAppleseedUp":
mc.treeLister(treeLister, edit=True, selectPath="appleseed")
def createRenderNodePluginChangeCallback(classification):
if 'rendernode/appleseed' in classification:
return 1
return 0
def renderNodeClassificationCallback():
return "rendernode/appleseed"
def createAsRenderNode(nodeType=None, postCommand=None):
classification = mc.getClassification(nodeType)
logger.debug(
"CreateAsRenderNode called: nodeType = {0}, class = {1}, pcmd = {2}".format(
nodeType,
classification,
postCommand
)
)
for cl in classification:
if "rendernode/appleseed/surface" in cl.lower():
mat = mc.shadingNode(nodeType, asShader=True)
shadingGroup = mc.sets(
renderable=True,
noSurfaceShader=True,
empty=True,
name=mat + "SG"
)
mc.connectAttr(mat + ".outColor", shadingGroup + ".surfaceShader")
logger.debug("Created shading node {0} asShader".format(mat))
elif "rendernode/appleseed/texture/2d" in cl.lower():
mat = mc.shadingNode(nodeType, asTexture=True)
placeTex = mc.shadingNode("place2dTexture", asUtility=True)
mc.connectAttr(placeTex + ".outUV", mat + ".uv")
mc.connectAttr(placeTex + ".outUvFilterSize", mat + ".uvFilterSize")
logger.debug("Created shading node {0} asTexture2D".format(mat))
elif "rendernode/appleseed/texture/3d" in cl.lower():
mat = mc.shadingNode(nodeType, asTexture=True)
placeTex = mc.shadingNode("place3dTexture", asUtility=True)
mc.connectAttr(placeTex + ".wim[0]", mat + ".placementMatrix")
logger.debug("Created shading node {0} asTexture3D".format(mat))
else:
mat = mc.shadingNode(nodeType, asUtility=True)
logger.debug("Created shading node {0} asUtility".format(mat))
if postCommand is not None:
postCommand = postCommand.replace("%node", mat)
postCommand = postCommand.replace("%type", '\"\"')
mel.eval(postCommand)
return ""
def createRenderNodeCallback(postCommand, nodeType):
#logger.debug("createRenderNodeCallback called!")
for c in mc.getClassification(nodeType):
if 'rendernode/appleseed' in c.lower():
buildNodeCmd = (
"import appleseedMaya.hypershadeCallbacks;"
"appleseedMaya.hypershadeCallbacks.createAsRenderNode"
"(nodeType=\\\"{0}\\\", postCommand='{1}')").format(nodeType, postCommand)
return "string $cmd = \"{0}\"; python($cmd);".format(buildNodeCmd)
def buildRenderNodeTreeListerContentCallback(tl, postCommand, filterString):
melCmd = 'addToRenderNodeTreeLister("{0}", "{1}", "{2}", "{3}", "{4}", "{5}");'.format(
tl,
postCommand,
"appleseed/Surface",
"rendernode/appleseed/surface",
"-asShader",
""
)
logger.debug("buildRenderNodeTreeListerContentCallback: mel = %s" % melCmd)
mel.eval(melCmd)
melCmd = 'addToRenderNodeTreeLister("{0}", "{1}", "{2}", "{3}", "{4}", "{5}");'.format(
tl,
postCommand,
"appleseed/2D Textures",
"rendernode/appleseed/texture/2d",
"-asTexture",
""
)
logger.debug("buildRenderNodeTreeListerContentCallback: mel = %s" % melCmd)
mel.eval(melCmd)
melCmd = 'addToRenderNodeTreeLister("{0}", "{1}", "{2}", "{3}", "{4}", "{5}");'.format(
tl,
postCommand,
"appleseed/3D Textures",
"rendernode/appleseed/texture/3d",
"-asTexture",
""
)
logger.debug("buildRenderNodeTreeListerContentCallback: mel = %s" % melCmd)
mel.eval(melCmd)
melCmd = 'addToRenderNodeTreeLister("{0}", "{1}", "{2}", "{3}", "{4}", "{5}");'.format(
tl,
postCommand,
"appleseed/Utilities",
"rendernode/appleseed/utility",
"-asUtility",
""
)
logger.debug("buildRenderNodeTreeListerContentCallback: mel = %s" % melCmd)
mel.eval(melCmd)
def nodeCanBeUsedAsMaterialCallback(nodeId, nodeOwner):
logger.debug((
"nodeCanBeUsedAsMaterialCallback called: "
"nodeId = {0}, nodeOwner = {1}").format(nodeId, nodeOwner)
)
if nodeOwner == 'appleseedMaya':
return 1
return 0
|
tizianasellitto/servo | refs/heads/master | tests/wpt/web-platform-tests/tools/wptserve/wptserve/request.py | 87 | import base64
import cgi
import Cookie
import StringIO
import tempfile
import urlparse
from . import stash
from .utils import HTTPException
missing = object()
class Server(object):
"""Data about the server environment
.. attribute:: config
Environment configuration information with information about the
various servers running, their hostnames and ports.
.. attribute:: stash
Stash object holding state stored on the server between requests.
"""
config = None
def __init__(self, request):
self._stash = None
self._request = request
@property
def stash(self):
if self._stash is None:
address, authkey = stash.load_env_config()
self._stash = stash.Stash(self._request.url_parts.path, address, authkey)
return self._stash
class InputFile(object):
max_buffer_size = 1024*1024
def __init__(self, rfile, length):
"""File-like object used to provide a seekable view of request body data"""
self._file = rfile
self.length = length
self._file_position = 0
if length > self.max_buffer_size:
self._buf = tempfile.TemporaryFile(mode="rw+b")
else:
self._buf = StringIO.StringIO()
@property
def _buf_position(self):
rv = self._buf.tell()
assert rv <= self._file_position
return rv
def read(self, bytes=-1):
assert self._buf_position <= self._file_position
if bytes < 0:
bytes = self.length - self._buf_position
bytes_remaining = min(bytes, self.length - self._buf_position)
if bytes_remaining == 0:
return ""
if self._buf_position != self._file_position:
buf_bytes = min(bytes_remaining, self._file_position - self._buf_position)
old_data = self._buf.read(buf_bytes)
bytes_remaining -= buf_bytes
else:
old_data = ""
assert self._buf_position == self._file_position, (
"Before reading buffer position (%i) didn't match file position (%i)" %
(self._buf_position, self._file_position))
new_data = self._file.read(bytes_remaining)
self._buf.write(new_data)
self._file_position += bytes_remaining
assert self._buf_position == self._file_position, (
"After reading buffer position (%i) didn't match file position (%i)" %
(self._buf_position, self._file_position))
return old_data + new_data
def tell(self):
return self._buf_position
def seek(self, offset):
if offset > self.length or offset < 0:
raise ValueError
if offset <= self._file_position:
self._buf.seek(offset)
else:
self.read(offset - self._file_position)
def readline(self, max_bytes=None):
if max_bytes is None:
max_bytes = self.length - self._buf_position
if self._buf_position < self._file_position:
data = self._buf.readline(max_bytes)
if data.endswith("\n") or len(data) == max_bytes:
return data
else:
data = ""
assert self._buf_position == self._file_position
initial_position = self._file_position
found = False
buf = []
max_bytes -= len(data)
while not found:
readahead = self.read(min(2, max_bytes))
max_bytes -= len(readahead)
for i, c in enumerate(readahead):
if c == "\n":
buf.append(readahead[:i+1])
found = True
break
if not found:
buf.append(readahead)
if not readahead or not max_bytes:
break
new_data = "".join(buf)
data += new_data
self.seek(initial_position + len(new_data))
return data
def readlines(self):
rv = []
while True:
data = self.readline()
if data:
rv.append(data)
else:
break
return rv
def next(self):
data = self.readline()
if data:
return data
else:
raise StopIteration
def __iter__(self):
return self
class Request(object):
"""Object representing a HTTP request.
.. attribute:: doc_root
The local directory to use as a base when resolving paths
.. attribute:: route_match
Regexp match object from matching the request path to the route
selected for the request.
.. attribute:: protocol_version
HTTP version specified in the request.
.. attribute:: method
HTTP method in the request.
.. attribute:: request_path
Request path as it appears in the HTTP request.
.. attribute:: url_base
The prefix part of the path; typically / unless the handler has a url_base set
.. attribute:: url
Absolute URL for the request.
.. attribute:: headers
List of request headers.
.. attribute:: raw_input
File-like object representing the body of the request.
.. attribute:: url_parts
Parts of the requested URL as obtained by urlparse.urlsplit(path)
.. attribute:: request_line
Raw request line
.. attribute:: headers
RequestHeaders object providing a dictionary-like representation of
the request headers.
.. attribute:: body
Request body as a string
.. attribute:: GET
MultiDict representing the parameters supplied with the request.
Note that these may be present on non-GET requests; the name is
chosen to be familiar to users of other systems such as PHP.
.. attribute:: POST
MultiDict representing the request body parameters. Most parameters
are present as string values, but file uploads have file-like
values.
.. attribute:: cookies
Cookies object representing cookies sent with the request with a
dictionary-like interface.
.. attribute:: auth
Object with username and password properties representing any
credentials supplied using HTTP authentication.
.. attribute:: server
Server object containing information about the server environment.
"""
def __init__(self, request_handler):
self.doc_root = request_handler.server.router.doc_root
self.route_match = None # Set by the router
self.protocol_version = request_handler.protocol_version
self.method = request_handler.command
scheme = request_handler.server.scheme
host = request_handler.headers.get("Host")
port = request_handler.server.server_address[1]
if host is None:
host = request_handler.server.server_address[0]
else:
if ":" in host:
host, port = host.split(":", 1)
self.request_path = request_handler.path
self.url_base = "/"
if self.request_path.startswith(scheme + "://"):
self.url = request_handler.path
else:
self.url = "%s://%s:%s%s" % (scheme,
host,
port,
self.request_path)
self.url_parts = urlparse.urlsplit(self.url)
self._raw_headers = request_handler.headers
self.request_line = request_handler.raw_requestline
self._headers = None
self.raw_input = InputFile(request_handler.rfile,
int(self.headers.get("Content-Length", 0)))
self._body = None
self._GET = None
self._POST = None
self._cookies = None
self._auth = None
self.server = Server(self)
def __repr__(self):
return "<Request %s %s>" % (self.method, self.url)
@property
def GET(self):
if self._GET is None:
params = urlparse.parse_qsl(self.url_parts.query, keep_blank_values=True)
self._GET = MultiDict()
for key, value in params:
self._GET.add(key, value)
return self._GET
@property
def POST(self):
if self._POST is None:
#Work out the post parameters
pos = self.raw_input.tell()
self.raw_input.seek(0)
fs = cgi.FieldStorage(fp=self.raw_input,
environ={"REQUEST_METHOD": self.method},
headers=self.headers,
keep_blank_values=True)
self._POST = MultiDict.from_field_storage(fs)
self.raw_input.seek(pos)
return self._POST
@property
def cookies(self):
if self._cookies is None:
parser = Cookie.BaseCookie()
cookie_headers = self.headers.get("cookie", "")
parser.load(cookie_headers)
cookies = Cookies()
for key, value in parser.iteritems():
cookies[key] = CookieValue(value)
self._cookies = cookies
return self._cookies
@property
def headers(self):
if self._headers is None:
self._headers = RequestHeaders(self._raw_headers)
return self._headers
@property
def body(self):
if self._body is None:
pos = self.raw_input.tell()
self.raw_input.seek(0)
self._body = self.raw_input.read()
self.raw_input.seek(pos)
return self._body
@property
def auth(self):
if self._auth is None:
self._auth = Authentication(self.headers)
return self._auth
class RequestHeaders(dict):
"""Dictionary-like API for accessing request headers."""
def __init__(self, items):
for key, value in zip(items.keys(), items.values()):
key = key.lower()
if key in self:
self[key].append(value)
else:
dict.__setitem__(self, key, [value])
def __getitem__(self, key):
"""Get all headers of a certain (case-insensitive) name. If there is
more than one, the values are returned comma separated"""
values = dict.__getitem__(self, key.lower())
if len(values) == 1:
return values[0]
else:
return ", ".join(values)
def __setitem__(self, name, value):
raise Exception
def get(self, key, default=None):
"""Get a string representing all headers with a particular value,
with multiple headers separated by a comma. If no header is found
return a default value
:param key: The header name to look up (case-insensitive)
:param default: The value to return in the case of no match
"""
try:
return self[key]
except KeyError:
return default
def get_list(self, key, default=missing):
"""Get all the header values for a particular field name as
a list"""
try:
return dict.__getitem__(self, key.lower())
except KeyError:
if default is not missing:
return default
else:
raise
def __contains__(self, key):
return dict.__contains__(self, key.lower())
def iteritems(self):
for item in self:
yield item, self[item]
def itervalues(self):
for item in self:
yield self[item]
class CookieValue(object):
"""Representation of cookies.
Note that cookies are considered read-only and the string value
of the cookie will not change if you update the field values.
However this is not enforced.
.. attribute:: key
The name of the cookie.
.. attribute:: value
The value of the cookie
.. attribute:: expires
The expiry date of the cookie
.. attribute:: path
The path of the cookie
.. attribute:: comment
The comment of the cookie.
.. attribute:: domain
The domain with which the cookie is associated
.. attribute:: max_age
The max-age value of the cookie.
.. attribute:: secure
Whether the cookie is marked as secure
.. attribute:: httponly
Whether the cookie is marked as httponly
"""
def __init__(self, morsel):
self.key = morsel.key
self.value = morsel.value
for attr in ["expires", "path",
"comment", "domain", "max-age",
"secure", "version", "httponly"]:
setattr(self, attr.replace("-", "_"), morsel[attr])
self._str = morsel.OutputString()
def __str__(self):
return self._str
def __repr__(self):
return self._str
def __eq__(self, other):
"""Equality comparison for cookies. Compares to other cookies
based on value alone and on non-cookies based on the equality
of self.value with the other object so that a cookie with value
"ham" compares equal to the string "ham"
"""
if hasattr(other, "value"):
return self.value == other.value
return self.value == other
class MultiDict(dict):
"""Dictionary type that holds multiple values for each
key"""
#TODO: this should perhaps also order the keys
def __init__(self):
pass
def __setitem__(self, name, value):
dict.__setitem__(self, name, [value])
def add(self, name, value):
if name in self:
dict.__getitem__(self, name).append(value)
else:
dict.__setitem__(self, name, [value])
def __getitem__(self, key):
"""Get the first value with a given key"""
#TODO: should this instead be the last value?
return self.first(key)
def first(self, key, default=missing):
"""Get the first value with a given key
:param key: The key to lookup
:param default: The default to return if key is
not found (throws if nothing is
specified)
"""
if key in self and dict.__getitem__(self, key):
return dict.__getitem__(self, key)[0]
elif default is not missing:
return default
raise KeyError
def last(self, key, default=missing):
"""Get the last value with a given key
:param key: The key to lookup
:param default: The default to return if key is
not found (throws if nothing is
specified)
"""
if key in self and dict.__getitem__(self, key):
return dict.__getitem__(self, key)[-1]
elif default is not missing:
return default
raise KeyError
def get_list(self, key):
"""Get all values with a given key as a list
:param key: The key to lookup
"""
return dict.__getitem__(self, key)
@classmethod
def from_field_storage(cls, fs):
self = cls()
if fs.list is None:
return self
for key in fs:
values = fs[key]
if not isinstance(values, list):
values = [values]
for value in values:
if value.filename:
value = value
else:
value = value.value
self.add(key, value)
return self
class Cookies(MultiDict):
"""MultiDict specialised for Cookie values"""
def __init__(self):
pass
def __getitem__(self, key):
return self.last(key)
class Authentication(object):
"""Object for dealing with HTTP Authentication
.. attribute:: username
The username supplied in the HTTP Authorization
header, or None
.. attribute:: password
The password supplied in the HTTP Authorization
header, or None
"""
def __init__(self, headers):
self.username = None
self.password = None
auth_schemes = {"Basic": self.decode_basic}
if "authorization" in headers:
header = headers.get("authorization")
auth_type, data = header.split(" ", 1)
if auth_type in auth_schemes:
self.username, self.password = auth_schemes[auth_type](data)
else:
raise HTTPException(400, "Unsupported authentication scheme %s" % auth_type)
def decode_basic(self, data):
decoded_data = base64.decodestring(data)
return decoded_data.split(":", 1)
|
anirudhSK/chromium | refs/heads/master | third_party/simplejson/__init__.py | 175 | r"""JSON (JavaScript Object Notation) <http://json.org> is a subset of
JavaScript syntax (ECMA-262 3rd edition) used as a lightweight data
interchange format.
:mod:`simplejson` exposes an API familiar to users of the standard library
:mod:`marshal` and :mod:`pickle` modules. It is the externally maintained
version of the :mod:`json` library contained in Python 2.6, but maintains
compatibility with Python 2.4 and Python 2.5 and (currently) has
significant performance advantages, even without using the optional C
extension for speedups.
Encoding basic Python object hierarchies::
>>> import simplejson as json
>>> json.dumps(['foo', {'bar': ('baz', None, 1.0, 2)}])
'["foo", {"bar": ["baz", null, 1.0, 2]}]'
>>> print json.dumps("\"foo\bar")
"\"foo\bar"
>>> print json.dumps(u'\u1234')
"\u1234"
>>> print json.dumps('\\')
"\\"
>>> print json.dumps({"c": 0, "b": 0, "a": 0}, sort_keys=True)
{"a": 0, "b": 0, "c": 0}
>>> from StringIO import StringIO
>>> io = StringIO()
>>> json.dump(['streaming API'], io)
>>> io.getvalue()
'["streaming API"]'
Compact encoding::
>>> import simplejson as json
>>> json.dumps([1,2,3,{'4': 5, '6': 7}], separators=(',',':'))
'[1,2,3,{"4":5,"6":7}]'
Pretty printing::
>>> import simplejson as json
>>> s = json.dumps({'4': 5, '6': 7}, sort_keys=True, indent=' ')
>>> print '\n'.join([l.rstrip() for l in s.splitlines()])
{
"4": 5,
"6": 7
}
Decoding JSON::
>>> import simplejson as json
>>> obj = [u'foo', {u'bar': [u'baz', None, 1.0, 2]}]
>>> json.loads('["foo", {"bar":["baz", null, 1.0, 2]}]') == obj
True
>>> json.loads('"\\"foo\\bar"') == u'"foo\x08ar'
True
>>> from StringIO import StringIO
>>> io = StringIO('["streaming API"]')
>>> json.load(io)[0] == 'streaming API'
True
Specializing JSON object decoding::
>>> import simplejson as json
>>> def as_complex(dct):
... if '__complex__' in dct:
... return complex(dct['real'], dct['imag'])
... return dct
...
>>> json.loads('{"__complex__": true, "real": 1, "imag": 2}',
... object_hook=as_complex)
(1+2j)
>>> from decimal import Decimal
>>> json.loads('1.1', parse_float=Decimal) == Decimal('1.1')
True
Specializing JSON object encoding::
>>> import simplejson as json
>>> def encode_complex(obj):
... if isinstance(obj, complex):
... return [obj.real, obj.imag]
... raise TypeError(repr(o) + " is not JSON serializable")
...
>>> json.dumps(2 + 1j, default=encode_complex)
'[2.0, 1.0]'
>>> json.JSONEncoder(default=encode_complex).encode(2 + 1j)
'[2.0, 1.0]'
>>> ''.join(json.JSONEncoder(default=encode_complex).iterencode(2 + 1j))
'[2.0, 1.0]'
Using simplejson.tool from the shell to validate and pretty-print::
$ echo '{"json":"obj"}' | python -m simplejson.tool
{
"json": "obj"
}
$ echo '{ 1.2:3.4}' | python -m simplejson.tool
Expecting property name: line 1 column 2 (char 2)
"""
__version__ = '2.6.2'
__all__ = [
'dump', 'dumps', 'load', 'loads',
'JSONDecoder', 'JSONDecodeError', 'JSONEncoder',
'OrderedDict', 'simple_first',
]
__author__ = 'Bob Ippolito <[email protected]>'
from decimal import Decimal
from decoder import JSONDecoder, JSONDecodeError
from encoder import JSONEncoder, JSONEncoderForHTML
def _import_OrderedDict():
import collections
try:
return collections.OrderedDict
except AttributeError:
import ordered_dict
return ordered_dict.OrderedDict
OrderedDict = _import_OrderedDict()
def _import_c_make_encoder():
try:
from simplejson._speedups import make_encoder
return make_encoder
except ImportError:
return None
_default_encoder = JSONEncoder(
skipkeys=False,
ensure_ascii=True,
check_circular=True,
allow_nan=True,
indent=None,
separators=None,
encoding='utf-8',
default=None,
use_decimal=True,
namedtuple_as_object=True,
tuple_as_array=True,
bigint_as_string=False,
item_sort_key=None,
)
def dump(obj, fp, skipkeys=False, ensure_ascii=True, check_circular=True,
allow_nan=True, cls=None, indent=None, separators=None,
encoding='utf-8', default=None, use_decimal=True,
namedtuple_as_object=True, tuple_as_array=True,
bigint_as_string=False, sort_keys=False, item_sort_key=None,
**kw):
"""Serialize ``obj`` as a JSON formatted stream to ``fp`` (a
``.write()``-supporting file-like object).
If ``skipkeys`` is true then ``dict`` keys that are not basic types
(``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``)
will be skipped instead of raising a ``TypeError``.
If ``ensure_ascii`` is false, then the some chunks written to ``fp``
may be ``unicode`` instances, subject to normal Python ``str`` to
``unicode`` coercion rules. Unless ``fp.write()`` explicitly
understands ``unicode`` (as in ``codecs.getwriter()``) this is likely
to cause an error.
If ``check_circular`` is false, then the circular reference check
for container types will be skipped and a circular reference will
result in an ``OverflowError`` (or worse).
If ``allow_nan`` is false, then it will be a ``ValueError`` to
serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``)
in strict compliance of the JSON specification, instead of using the
JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``).
If *indent* is a string, then JSON array elements and object members
will be pretty-printed with a newline followed by that string repeated
for each level of nesting. ``None`` (the default) selects the most compact
representation without any newlines. For backwards compatibility with
versions of simplejson earlier than 2.1.0, an integer is also accepted
and is converted to a string with that many spaces.
If ``separators`` is an ``(item_separator, dict_separator)`` tuple
then it will be used instead of the default ``(', ', ': ')`` separators.
``(',', ':')`` is the most compact JSON representation.
``encoding`` is the character encoding for str instances, default is UTF-8.
``default(obj)`` is a function that should return a serializable version
of obj or raise TypeError. The default simply raises TypeError.
If *use_decimal* is true (default: ``True``) then decimal.Decimal
will be natively serialized to JSON with full precision.
If *namedtuple_as_object* is true (default: ``True``),
:class:`tuple` subclasses with ``_asdict()`` methods will be encoded
as JSON objects.
If *tuple_as_array* is true (default: ``True``),
:class:`tuple` (and subclasses) will be encoded as JSON arrays.
If *bigint_as_string* is true (default: ``False``), ints 2**53 and higher
or lower than -2**53 will be encoded as strings. This is to avoid the
rounding that happens in Javascript otherwise. Note that this is still a
lossy operation that will not round-trip correctly and should be used
sparingly.
If specified, *item_sort_key* is a callable used to sort the items in
each dictionary. This is useful if you want to sort items other than
in alphabetical order by key. This option takes precedence over
*sort_keys*.
If *sort_keys* is true (default: ``False``), the output of dictionaries
will be sorted by item.
To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
``.default()`` method to serialize additional types), specify it with
the ``cls`` kwarg.
"""
# cached encoder
if (not skipkeys and ensure_ascii and
check_circular and allow_nan and
cls is None and indent is None and separators is None and
encoding == 'utf-8' and default is None and use_decimal
and namedtuple_as_object and tuple_as_array
and not bigint_as_string and not item_sort_key and not kw):
iterable = _default_encoder.iterencode(obj)
else:
if cls is None:
cls = JSONEncoder
iterable = cls(skipkeys=skipkeys, ensure_ascii=ensure_ascii,
check_circular=check_circular, allow_nan=allow_nan, indent=indent,
separators=separators, encoding=encoding,
default=default, use_decimal=use_decimal,
namedtuple_as_object=namedtuple_as_object,
tuple_as_array=tuple_as_array,
bigint_as_string=bigint_as_string,
sort_keys=sort_keys,
item_sort_key=item_sort_key,
**kw).iterencode(obj)
# could accelerate with writelines in some versions of Python, at
# a debuggability cost
for chunk in iterable:
fp.write(chunk)
def dumps(obj, skipkeys=False, ensure_ascii=True, check_circular=True,
allow_nan=True, cls=None, indent=None, separators=None,
encoding='utf-8', default=None, use_decimal=True,
namedtuple_as_object=True, tuple_as_array=True,
bigint_as_string=False, sort_keys=False, item_sort_key=None,
**kw):
"""Serialize ``obj`` to a JSON formatted ``str``.
If ``skipkeys`` is false then ``dict`` keys that are not basic types
(``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``)
will be skipped instead of raising a ``TypeError``.
If ``ensure_ascii`` is false, then the return value will be a
``unicode`` instance subject to normal Python ``str`` to ``unicode``
coercion rules instead of being escaped to an ASCII ``str``.
If ``check_circular`` is false, then the circular reference check
for container types will be skipped and a circular reference will
result in an ``OverflowError`` (or worse).
If ``allow_nan`` is false, then it will be a ``ValueError`` to
serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``) in
strict compliance of the JSON specification, instead of using the
JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``).
If ``indent`` is a string, then JSON array elements and object members
will be pretty-printed with a newline followed by that string repeated
for each level of nesting. ``None`` (the default) selects the most compact
representation without any newlines. For backwards compatibility with
versions of simplejson earlier than 2.1.0, an integer is also accepted
and is converted to a string with that many spaces.
If ``separators`` is an ``(item_separator, dict_separator)`` tuple
then it will be used instead of the default ``(', ', ': ')`` separators.
``(',', ':')`` is the most compact JSON representation.
``encoding`` is the character encoding for str instances, default is UTF-8.
``default(obj)`` is a function that should return a serializable version
of obj or raise TypeError. The default simply raises TypeError.
If *use_decimal* is true (default: ``True``) then decimal.Decimal
will be natively serialized to JSON with full precision.
If *namedtuple_as_object* is true (default: ``True``),
:class:`tuple` subclasses with ``_asdict()`` methods will be encoded
as JSON objects.
If *tuple_as_array* is true (default: ``True``),
:class:`tuple` (and subclasses) will be encoded as JSON arrays.
If *bigint_as_string* is true (not the default), ints 2**53 and higher
or lower than -2**53 will be encoded as strings. This is to avoid the
rounding that happens in Javascript otherwise.
If specified, *item_sort_key* is a callable used to sort the items in
each dictionary. This is useful if you want to sort items other than
in alphabetical order by key. This option takes precendence over
*sort_keys*.
If *sort_keys* is true (default: ``False``), the output of dictionaries
will be sorted by item.
To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
``.default()`` method to serialize additional types), specify it with
the ``cls`` kwarg.
"""
# cached encoder
if (not skipkeys and ensure_ascii and
check_circular and allow_nan and
cls is None and indent is None and separators is None and
encoding == 'utf-8' and default is None and use_decimal
and namedtuple_as_object and tuple_as_array
and not bigint_as_string and not sort_keys
and not item_sort_key and not kw):
return _default_encoder.encode(obj)
if cls is None:
cls = JSONEncoder
return cls(
skipkeys=skipkeys, ensure_ascii=ensure_ascii,
check_circular=check_circular, allow_nan=allow_nan, indent=indent,
separators=separators, encoding=encoding, default=default,
use_decimal=use_decimal,
namedtuple_as_object=namedtuple_as_object,
tuple_as_array=tuple_as_array,
bigint_as_string=bigint_as_string,
sort_keys=sort_keys,
item_sort_key=item_sort_key,
**kw).encode(obj)
_default_decoder = JSONDecoder(encoding=None, object_hook=None,
object_pairs_hook=None)
def load(fp, encoding=None, cls=None, object_hook=None, parse_float=None,
parse_int=None, parse_constant=None, object_pairs_hook=None,
use_decimal=False, namedtuple_as_object=True, tuple_as_array=True,
**kw):
"""Deserialize ``fp`` (a ``.read()``-supporting file-like object containing
a JSON document) to a Python object.
*encoding* determines the encoding used to interpret any
:class:`str` objects decoded by this instance (``'utf-8'`` by
default). It has no effect when decoding :class:`unicode` objects.
Note that currently only encodings that are a superset of ASCII work,
strings of other encodings should be passed in as :class:`unicode`.
*object_hook*, if specified, will be called with the result of every
JSON object decoded and its return value will be used in place of the
given :class:`dict`. This can be used to provide custom
deserializations (e.g. to support JSON-RPC class hinting).
*object_pairs_hook* is an optional function that will be called with
the result of any object literal decode with an ordered list of pairs.
The return value of *object_pairs_hook* will be used instead of the
:class:`dict`. This feature can be used to implement custom decoders
that rely on the order that the key and value pairs are decoded (for
example, :func:`collections.OrderedDict` will remember the order of
insertion). If *object_hook* is also defined, the *object_pairs_hook*
takes priority.
*parse_float*, if specified, will be called with the string of every
JSON float to be decoded. By default, this is equivalent to
``float(num_str)``. This can be used to use another datatype or parser
for JSON floats (e.g. :class:`decimal.Decimal`).
*parse_int*, if specified, will be called with the string of every
JSON int to be decoded. By default, this is equivalent to
``int(num_str)``. This can be used to use another datatype or parser
for JSON integers (e.g. :class:`float`).
*parse_constant*, if specified, will be called with one of the
following strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``. This
can be used to raise an exception if invalid JSON numbers are
encountered.
If *use_decimal* is true (default: ``False``) then it implies
parse_float=decimal.Decimal for parity with ``dump``.
To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
kwarg.
"""
return loads(fp.read(),
encoding=encoding, cls=cls, object_hook=object_hook,
parse_float=parse_float, parse_int=parse_int,
parse_constant=parse_constant, object_pairs_hook=object_pairs_hook,
use_decimal=use_decimal, **kw)
def loads(s, encoding=None, cls=None, object_hook=None, parse_float=None,
parse_int=None, parse_constant=None, object_pairs_hook=None,
use_decimal=False, **kw):
"""Deserialize ``s`` (a ``str`` or ``unicode`` instance containing a JSON
document) to a Python object.
*encoding* determines the encoding used to interpret any
:class:`str` objects decoded by this instance (``'utf-8'`` by
default). It has no effect when decoding :class:`unicode` objects.
Note that currently only encodings that are a superset of ASCII work,
strings of other encodings should be passed in as :class:`unicode`.
*object_hook*, if specified, will be called with the result of every
JSON object decoded and its return value will be used in place of the
given :class:`dict`. This can be used to provide custom
deserializations (e.g. to support JSON-RPC class hinting).
*object_pairs_hook* is an optional function that will be called with
the result of any object literal decode with an ordered list of pairs.
The return value of *object_pairs_hook* will be used instead of the
:class:`dict`. This feature can be used to implement custom decoders
that rely on the order that the key and value pairs are decoded (for
example, :func:`collections.OrderedDict` will remember the order of
insertion). If *object_hook* is also defined, the *object_pairs_hook*
takes priority.
*parse_float*, if specified, will be called with the string of every
JSON float to be decoded. By default, this is equivalent to
``float(num_str)``. This can be used to use another datatype or parser
for JSON floats (e.g. :class:`decimal.Decimal`).
*parse_int*, if specified, will be called with the string of every
JSON int to be decoded. By default, this is equivalent to
``int(num_str)``. This can be used to use another datatype or parser
for JSON integers (e.g. :class:`float`).
*parse_constant*, if specified, will be called with one of the
following strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``. This
can be used to raise an exception if invalid JSON numbers are
encountered.
If *use_decimal* is true (default: ``False``) then it implies
parse_float=decimal.Decimal for parity with ``dump``.
To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
kwarg.
"""
if (cls is None and encoding is None and object_hook is None and
parse_int is None and parse_float is None and
parse_constant is None and object_pairs_hook is None
and not use_decimal and not kw):
return _default_decoder.decode(s)
if cls is None:
cls = JSONDecoder
if object_hook is not None:
kw['object_hook'] = object_hook
if object_pairs_hook is not None:
kw['object_pairs_hook'] = object_pairs_hook
if parse_float is not None:
kw['parse_float'] = parse_float
if parse_int is not None:
kw['parse_int'] = parse_int
if parse_constant is not None:
kw['parse_constant'] = parse_constant
if use_decimal:
if parse_float is not None:
raise TypeError("use_decimal=True implies parse_float=Decimal")
kw['parse_float'] = Decimal
return cls(encoding=encoding, **kw).decode(s)
def _toggle_speedups(enabled):
import simplejson.decoder as dec
import simplejson.encoder as enc
import simplejson.scanner as scan
c_make_encoder = _import_c_make_encoder()
if enabled:
dec.scanstring = dec.c_scanstring or dec.py_scanstring
enc.c_make_encoder = c_make_encoder
enc.encode_basestring_ascii = (enc.c_encode_basestring_ascii or
enc.py_encode_basestring_ascii)
scan.make_scanner = scan.c_make_scanner or scan.py_make_scanner
else:
dec.scanstring = dec.py_scanstring
enc.c_make_encoder = None
enc.encode_basestring_ascii = enc.py_encode_basestring_ascii
scan.make_scanner = scan.py_make_scanner
dec.make_scanner = scan.make_scanner
global _default_decoder
_default_decoder = JSONDecoder(
encoding=None,
object_hook=None,
object_pairs_hook=None,
)
global _default_encoder
_default_encoder = JSONEncoder(
skipkeys=False,
ensure_ascii=True,
check_circular=True,
allow_nan=True,
indent=None,
separators=None,
encoding='utf-8',
default=None,
)
def simple_first(kv):
"""Helper function to pass to item_sort_key to sort simple
elements to the top, then container elements.
"""
return (isinstance(kv[1], (list, dict, tuple)), kv[0])
|
ammarkhann/FinalSeniorCode | refs/heads/master | lib/python2.7/site-packages/scipy/signal/_savitzky_golay.py | 20 | from __future__ import division, print_function, absolute_import
import numpy as np
from scipy.linalg import lstsq
from math import factorial
from scipy.ndimage import convolve1d
from ._arraytools import axis_slice
def savgol_coeffs(window_length, polyorder, deriv=0, delta=1.0, pos=None,
use="conv"):
"""Compute the coefficients for a 1-d Savitzky-Golay FIR filter.
Parameters
----------
window_length : int
The length of the filter window (i.e. the number of coefficients).
`window_length` must be an odd positive integer.
polyorder : int
The order of the polynomial used to fit the samples.
`polyorder` must be less than `window_length`.
deriv : int, optional
The order of the derivative to compute. This must be a
nonnegative integer. The default is 0, which means to filter
the data without differentiating.
delta : float, optional
The spacing of the samples to which the filter will be applied.
This is only used if deriv > 0.
pos : int or None, optional
If pos is not None, it specifies evaluation position within the
window. The default is the middle of the window.
use : str, optional
Either 'conv' or 'dot'. This argument chooses the order of the
coefficients. The default is 'conv', which means that the
coefficients are ordered to be used in a convolution. With
use='dot', the order is reversed, so the filter is applied by
dotting the coefficients with the data set.
Returns
-------
coeffs : 1-d ndarray
The filter coefficients.
References
----------
A. Savitzky, M. J. E. Golay, Smoothing and Differentiation of Data by
Simplified Least Squares Procedures. Analytical Chemistry, 1964, 36 (8),
pp 1627-1639.
See Also
--------
savgol_filter
Notes
-----
.. versionadded:: 0.14.0
Examples
--------
>>> from scipy.signal import savgol_coeffs
>>> savgol_coeffs(5, 2)
array([-0.08571429, 0.34285714, 0.48571429, 0.34285714, -0.08571429])
>>> savgol_coeffs(5, 2, deriv=1)
array([ 2.00000000e-01, 1.00000000e-01, 2.00607895e-16,
-1.00000000e-01, -2.00000000e-01])
Note that use='dot' simply reverses the coefficients.
>>> savgol_coeffs(5, 2, pos=3)
array([ 0.25714286, 0.37142857, 0.34285714, 0.17142857, -0.14285714])
>>> savgol_coeffs(5, 2, pos=3, use='dot')
array([-0.14285714, 0.17142857, 0.34285714, 0.37142857, 0.25714286])
`x` contains data from the parabola x = t**2, sampled at
t = -1, 0, 1, 2, 3. `c` holds the coefficients that will compute the
derivative at the last position. When dotted with `x` the result should
be 6.
>>> x = np.array([1, 0, 1, 4, 9])
>>> c = savgol_coeffs(5, 2, pos=4, deriv=1, use='dot')
>>> c.dot(x)
6.0000000000000018
"""
# An alternative method for finding the coefficients when deriv=0 is
# t = np.arange(window_length)
# unit = (t == pos).astype(int)
# coeffs = np.polyval(np.polyfit(t, unit, polyorder), t)
# The method implemented here is faster.
# To recreate the table of sample coefficients shown in the chapter on
# the Savitzy-Golay filter in the Numerical Recipes book, use
# window_length = nL + nR + 1
# pos = nL + 1
# c = savgol_coeffs(window_length, M, pos=pos, use='dot')
if polyorder >= window_length:
raise ValueError("polyorder must be less than window_length.")
halflen, rem = divmod(window_length, 2)
if rem == 0:
raise ValueError("window_length must be odd.")
if pos is None:
pos = halflen
if not (0 <= pos < window_length):
raise ValueError("pos must be nonnegative and less than "
"window_length.")
if use not in ['conv', 'dot']:
raise ValueError("`use` must be 'conv' or 'dot'")
# Form the design matrix A. The columns of A are powers of the integers
# from -pos to window_length - pos - 1. The powers (i.e. rows) range
# from 0 to polyorder. (That is, A is a vandermonde matrix, but not
# necessarily square.)
x = np.arange(-pos, window_length - pos, dtype=float)
if use == "conv":
# Reverse so that result can be used in a convolution.
x = x[::-1]
order = np.arange(polyorder + 1).reshape(-1, 1)
A = x ** order
# y determines which order derivative is returned.
y = np.zeros(polyorder + 1)
# The coefficient assigned to y[deriv] scales the result to take into
# account the order of the derivative and the sample spacing.
y[deriv] = factorial(deriv) / (delta ** deriv)
# Find the least-squares solution of A*c = y
coeffs, _, _, _ = lstsq(A, y)
return coeffs
def _polyder(p, m):
"""Differentiate polynomials represented with coefficients.
p must be a 1D or 2D array. In the 2D case, each column gives
the coefficients of a polynomial; the first row holds the coefficients
associated with the highest power. m must be a nonnegative integer.
(numpy.polyder doesn't handle the 2D case.)
"""
if m == 0:
result = p
else:
n = len(p)
if n <= m:
result = np.zeros_like(p[:1, ...])
else:
dp = p[:-m].copy()
for k in range(m):
rng = np.arange(n - k - 1, m - k - 1, -1)
dp *= rng.reshape((n - m,) + (1,) * (p.ndim - 1))
result = dp
return result
def _fit_edge(x, window_start, window_stop, interp_start, interp_stop,
axis, polyorder, deriv, delta, y):
"""
Given an n-d array `x` and the specification of a slice of `x` from
`window_start` to `window_stop` along `axis`, create an interpolating
polynomial of each 1-d slice, and evaluate that polynomial in the slice
from `interp_start` to `interp_stop`. Put the result into the
corresponding slice of `y`.
"""
# Get the edge into a (window_length, -1) array.
x_edge = axis_slice(x, start=window_start, stop=window_stop, axis=axis)
if axis == 0 or axis == -x.ndim:
xx_edge = x_edge
swapped = False
else:
xx_edge = x_edge.swapaxes(axis, 0)
swapped = True
xx_edge = xx_edge.reshape(xx_edge.shape[0], -1)
# Fit the edges. poly_coeffs has shape (polyorder + 1, -1),
# where '-1' is the same as in xx_edge.
poly_coeffs = np.polyfit(np.arange(0, window_stop - window_start),
xx_edge, polyorder)
if deriv > 0:
poly_coeffs = _polyder(poly_coeffs, deriv)
# Compute the interpolated values for the edge.
i = np.arange(interp_start - window_start, interp_stop - window_start)
values = np.polyval(poly_coeffs, i.reshape(-1, 1)) / (delta ** deriv)
# Now put the values into the appropriate slice of y.
# First reshape values to match y.
shp = list(y.shape)
shp[0], shp[axis] = shp[axis], shp[0]
values = values.reshape(interp_stop - interp_start, *shp[1:])
if swapped:
values = values.swapaxes(0, axis)
# Get a view of the data to be replaced by values.
y_edge = axis_slice(y, start=interp_start, stop=interp_stop, axis=axis)
y_edge[...] = values
def _fit_edges_polyfit(x, window_length, polyorder, deriv, delta, axis, y):
"""
Use polynomial interpolation of x at the low and high ends of the axis
to fill in the halflen values in y.
This function just calls _fit_edge twice, once for each end of the axis.
"""
halflen = window_length // 2
_fit_edge(x, 0, window_length, 0, halflen, axis,
polyorder, deriv, delta, y)
n = x.shape[axis]
_fit_edge(x, n - window_length, n, n - halflen, n, axis,
polyorder, deriv, delta, y)
def savgol_filter(x, window_length, polyorder, deriv=0, delta=1.0,
axis=-1, mode='interp', cval=0.0):
""" Apply a Savitzky-Golay filter to an array.
This is a 1-d filter. If `x` has dimension greater than 1, `axis`
determines the axis along which the filter is applied.
Parameters
----------
x : array_like
The data to be filtered. If `x` is not a single or double precision
floating point array, it will be converted to type `numpy.float64`
before filtering.
window_length : int
The length of the filter window (i.e. the number of coefficients).
`window_length` must be a positive odd integer.
polyorder : int
The order of the polynomial used to fit the samples.
`polyorder` must be less than `window_length`.
deriv : int, optional
The order of the derivative to compute. This must be a
nonnegative integer. The default is 0, which means to filter
the data without differentiating.
delta : float, optional
The spacing of the samples to which the filter will be applied.
This is only used if deriv > 0. Default is 1.0.
axis : int, optional
The axis of the array `x` along which the filter is to be applied.
Default is -1.
mode : str, optional
Must be 'mirror', 'constant', 'nearest', 'wrap' or 'interp'. This
determines the type of extension to use for the padded signal to
which the filter is applied. When `mode` is 'constant', the padding
value is given by `cval`. See the Notes for more details on 'mirror',
'constant', 'wrap', and 'nearest'.
When the 'interp' mode is selected (the default), no extension
is used. Instead, a degree `polyorder` polynomial is fit to the
last `window_length` values of the edges, and this polynomial is
used to evaluate the last `window_length // 2` output values.
cval : scalar, optional
Value to fill past the edges of the input if `mode` is 'constant'.
Default is 0.0.
Returns
-------
y : ndarray, same shape as `x`
The filtered data.
See Also
--------
savgol_coeffs
Notes
-----
Details on the `mode` options:
'mirror':
Repeats the values at the edges in reverse order. The value
closest to the edge is not included.
'nearest':
The extension contains the nearest input value.
'constant':
The extension contains the value given by the `cval` argument.
'wrap':
The extension contains the values from the other end of the array.
For example, if the input is [1, 2, 3, 4, 5, 6, 7, 8], and
`window_length` is 7, the following shows the extended data for
the various `mode` options (assuming `cval` is 0)::
mode | Ext | Input | Ext
-----------+---------+------------------------+---------
'mirror' | 4 3 2 | 1 2 3 4 5 6 7 8 | 7 6 5
'nearest' | 1 1 1 | 1 2 3 4 5 6 7 8 | 8 8 8
'constant' | 0 0 0 | 1 2 3 4 5 6 7 8 | 0 0 0
'wrap' | 6 7 8 | 1 2 3 4 5 6 7 8 | 1 2 3
.. versionadded:: 0.14.0
Examples
--------
>>> from scipy.signal import savgol_filter
>>> np.set_printoptions(precision=2) # For compact display.
>>> x = np.array([2, 2, 5, 2, 1, 0, 1, 4, 9])
Filter with a window length of 5 and a degree 2 polynomial. Use
the defaults for all other parameters.
>>> savgol_filter(x, 5, 2)
array([ 1.66, 3.17, 3.54, 2.86, 0.66, 0.17, 1. , 4. , 9. ])
Note that the last five values in x are samples of a parabola, so
when mode='interp' (the default) is used with polyorder=2, the last
three values are unchanged. Compare that to, for example,
`mode='nearest'`:
>>> savgol_filter(x, 5, 2, mode='nearest')
array([ 1.74, 3.03, 3.54, 2.86, 0.66, 0.17, 1. , 4.6 , 7.97])
"""
if mode not in ["mirror", "constant", "nearest", "interp", "wrap"]:
raise ValueError("mode must be 'mirror', 'constant', 'nearest' "
"'wrap' or 'interp'.")
x = np.asarray(x)
# Ensure that x is either single or double precision floating point.
if x.dtype != np.float64 and x.dtype != np.float32:
x = x.astype(np.float64)
coeffs = savgol_coeffs(window_length, polyorder, deriv=deriv, delta=delta)
if mode == "interp":
# Do not pad. Instead, for the elements within `window_length // 2`
# of the ends of the sequence, use the polynomial that is fitted to
# the last `window_length` elements.
y = convolve1d(x, coeffs, axis=axis, mode="constant")
_fit_edges_polyfit(x, window_length, polyorder, deriv, delta, axis, y)
else:
# Any mode other than 'interp' is passed on to ndimage.convolve1d.
y = convolve1d(x, coeffs, axis=axis, mode=mode, cval=cval)
return y
|
arthru/OpenUpgrade | refs/heads/master | addons/account/report/report_vat.py | 93 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from openerp.osv import osv
from openerp.report import report_sxw
from common_report_header import common_report_header
class tax_report(report_sxw.rml_parse, common_report_header):
def set_context(self, objects, data, ids, report_type=None):
new_ids = ids
res = {}
self.period_ids = []
period_obj = self.pool.get('account.period')
self.display_detail = data['form']['display_detail']
res['periods'] = ''
res['fiscalyear'] = data['form'].get('fiscalyear_id', False)
if data['form'].get('period_from', False) and data['form'].get('period_to', False):
self.period_ids = period_obj.build_ctx_periods(self.cr, self.uid, data['form']['period_from'], data['form']['period_to'])
periods_l = period_obj.read(self.cr, self.uid, self.period_ids, ['name'])
for period in periods_l:
if res['periods'] == '':
res['periods'] = period['name']
else:
res['periods'] += ", "+ period['name']
return super(tax_report, self).set_context(objects, data, new_ids, report_type=report_type)
def __init__(self, cr, uid, name, context=None):
super(tax_report, self).__init__(cr, uid, name, context=context)
self.localcontext.update({
'time': time,
'get_codes': self._get_codes,
'get_general': self._get_general,
'get_currency': self._get_currency,
'get_lines': self._get_lines,
'get_fiscalyear': self._get_fiscalyear,
'get_account': self._get_account,
'get_start_period': self.get_start_period,
'get_end_period': self.get_end_period,
'get_basedon': self._get_basedon,
})
def _get_basedon(self, form):
return form['form']['based_on']
def _get_lines(self, based_on, company_id=False, parent=False, level=0, context=None):
period_list = self.period_ids
res = self._get_codes(based_on, company_id, parent, level, period_list, context=context)
if period_list:
res = self._add_codes(based_on, res, period_list, context=context)
else:
self.cr.execute ("select id from account_fiscalyear")
fy = self.cr.fetchall()
self.cr.execute ("select id from account_period where fiscalyear_id = %s",(fy[0][0],))
periods = self.cr.fetchall()
for p in periods:
period_list.append(p[0])
res = self._add_codes(based_on, res, period_list, context=context)
i = 0
top_result = []
while i < len(res):
res_dict = { 'code': res[i][1].code,
'name': res[i][1].name,
'debit': 0,
'credit': 0,
'tax_amount': res[i][1].sum_period,
'type': 1,
'level': res[i][0],
'pos': 0
}
top_result.append(res_dict)
res_general = self._get_general(res[i][1].id, period_list, company_id, based_on, context=context)
ind_general = 0
while ind_general < len(res_general):
res_general[ind_general]['type'] = 2
res_general[ind_general]['pos'] = 0
res_general[ind_general]['level'] = res_dict['level']
top_result.append(res_general[ind_general])
ind_general+=1
i+=1
return top_result
def _get_general(self, tax_code_id, period_list, company_id, based_on, context=None):
if not self.display_detail:
return []
res = []
obj_account = self.pool.get('account.account')
periods_ids = tuple(period_list)
if based_on == 'payments':
self.cr.execute('SELECT SUM(line.tax_amount) AS tax_amount, \
SUM(line.debit) AS debit, \
SUM(line.credit) AS credit, \
COUNT(*) AS count, \
account.id AS account_id, \
account.name AS name, \
account.code AS code \
FROM account_move_line AS line, \
account_account AS account, \
account_move AS move \
LEFT JOIN account_invoice invoice ON \
(invoice.move_id = move.id) \
WHERE line.state<>%s \
AND line.tax_code_id = %s \
AND line.account_id = account.id \
AND account.company_id = %s \
AND move.id = line.move_id \
AND line.period_id IN %s \
AND ((invoice.state = %s) \
OR (invoice.id IS NULL)) \
GROUP BY account.id,account.name,account.code', ('draft', tax_code_id,
company_id, periods_ids, 'paid',))
else:
self.cr.execute('SELECT SUM(line.tax_amount) AS tax_amount, \
SUM(line.debit) AS debit, \
SUM(line.credit) AS credit, \
COUNT(*) AS count, \
account.id AS account_id, \
account.name AS name, \
account.code AS code \
FROM account_move_line AS line, \
account_account AS account \
WHERE line.state <> %s \
AND line.tax_code_id = %s \
AND line.account_id = account.id \
AND account.company_id = %s \
AND line.period_id IN %s\
AND account.active \
GROUP BY account.id,account.name,account.code', ('draft', tax_code_id,
company_id, periods_ids,))
res = self.cr.dictfetchall()
i = 0
while i<len(res):
res[i]['account'] = obj_account.browse(self.cr, self.uid, res[i]['account_id'], context=context)
i+=1
return res
def _get_codes(self, based_on, company_id, parent=False, level=0, period_list=None, context=None):
obj_tc = self.pool.get('account.tax.code')
ids = obj_tc.search(self.cr, self.uid, [('parent_id','=',parent),('company_id','=',company_id)], order='sequence', context=context)
res = []
for code in obj_tc.browse(self.cr, self.uid, ids, {'based_on': based_on}):
res.append(('.'*2*level, code))
res += self._get_codes(based_on, company_id, code.id, level+1, context=context)
return res
def _add_codes(self, based_on, account_list=None, period_list=None, context=None):
if account_list is None:
account_list = []
if period_list is None:
period_list = []
res = []
obj_tc = self.pool.get('account.tax.code')
for account in account_list:
ids = obj_tc.search(self.cr, self.uid, [('id','=', account[1].id)], context=context)
sum_tax_add = 0
for period_ind in period_list:
for code in obj_tc.browse(self.cr, self.uid, ids, {'period_id':period_ind,'based_on': based_on}):
sum_tax_add = sum_tax_add + code.sum_period
code.sum_period = sum_tax_add
res.append((account[0], code))
return res
def _get_currency(self, form, context=None):
return self.pool.get('res.company').browse(self.cr, self.uid, form['company_id'], context=context).currency_id.name
def sort_result(self, accounts, context=None):
# On boucle sur notre rapport
result_accounts = []
ind=0
old_level=0
while ind<len(accounts):
#
account_elem = accounts[ind]
#
#
# we will now check if the level is lower than the previous level, in this case we will make a subtotal
if (account_elem['level'] < old_level):
bcl_current_level = old_level
bcl_rup_ind = ind - 1
while (bcl_current_level >= int(accounts[bcl_rup_ind]['level']) and bcl_rup_ind >= 0 ):
res_tot = { 'code': accounts[bcl_rup_ind]['code'],
'name': '',
'debit': 0,
'credit': 0,
'tax_amount': accounts[bcl_rup_ind]['tax_amount'],
'type': accounts[bcl_rup_ind]['type'],
'level': 0,
'pos': 0
}
if res_tot['type'] == 1:
# on change le type pour afficher le total
res_tot['type'] = 2
result_accounts.append(res_tot)
bcl_current_level = accounts[bcl_rup_ind]['level']
bcl_rup_ind -= 1
old_level = account_elem['level']
result_accounts.append(account_elem)
ind+=1
return result_accounts
class report_vat(osv.AbstractModel):
_name = 'report.account.report_vat'
_inherit = 'report.abstract_report'
_template = 'account.report_vat'
_wrapped_report_class = tax_report
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
w1ll1am23/home-assistant | refs/heads/dev | tests/components/mochad/test_light.py | 8 | """The tests for the mochad light platform."""
import unittest.mock as mock
import pytest
from homeassistant.components import light
from homeassistant.components.mochad import light as mochad
from homeassistant.setup import async_setup_component
@pytest.fixture(autouse=True)
def pymochad_mock():
"""Mock pymochad."""
with mock.patch("homeassistant.components.mochad.light.device") as device:
yield device
@pytest.fixture
def light_mock(hass, brightness):
"""Mock light."""
controller_mock = mock.MagicMock()
dev_dict = {"address": "a1", "name": "fake_light", "brightness_levels": brightness}
return mochad.MochadLight(hass, controller_mock, dev_dict)
async def test_setup_adds_proper_devices(hass):
"""Test if setup adds devices."""
good_config = {
"mochad": {},
"light": {
"platform": "mochad",
"devices": [{"name": "Light1", "address": "a1"}],
},
}
assert await async_setup_component(hass, light.DOMAIN, good_config)
@pytest.mark.parametrize(
"brightness,expected", [(32, "on"), (256, "xdim 255"), (64, "xdim 63")]
)
async def test_turn_on_with_no_brightness(light_mock, expected):
"""Test turn_on."""
light_mock.turn_on()
light_mock.light.send_cmd.assert_called_once_with(expected)
@pytest.mark.parametrize(
"brightness,expected",
[
(32, [mock.call("on"), mock.call("dim 25")]),
(256, [mock.call("xdim 45")]),
(64, [mock.call("xdim 11")]),
],
)
async def test_turn_on_with_brightness(light_mock, expected):
"""Test turn_on."""
light_mock.turn_on(brightness=45)
light_mock.light.send_cmd.assert_has_calls(expected)
@pytest.mark.parametrize("brightness", [32])
async def test_turn_off(light_mock):
"""Test turn_off."""
light_mock.turn_off()
light_mock.light.send_cmd.assert_called_once_with("off")
|
zorojean/pyspider | refs/heads/master | pyspider/database/sqlalchemy/__init__.py | 82 | #!/usr/bin/env python
# -*- encoding: utf-8 -*-
# vim: set et sw=4 ts=4 sts=4 ff=unix fenc=utf8:
# Author: Binux<[email protected]>
# http://binux.me
# Created on 2014-12-04 20:11:04
|
h3biomed/ansible | refs/heads/h3 | test/units/modules/source_control/test_bitbucket_pipeline_known_host.py | 26 | import pytest
from ansible.module_utils.source_control.bitbucket import BitbucketHelper
from ansible.modules.source_control.bitbucket import bitbucket_pipeline_known_host
from ansible.modules.source_control.bitbucket.bitbucket_pipeline_known_host import HAS_PARAMIKO
from units.compat import unittest
from units.compat.mock import patch
from units.modules.utils import AnsibleExitJson, ModuleTestCase, set_module_args
class TestBucketPipelineKnownHostModule(ModuleTestCase):
def setUp(self):
super(TestBucketPipelineKnownHostModule, self).setUp()
self.module = bitbucket_pipeline_known_host
@pytest.mark.skipif(not HAS_PARAMIKO, reason='paramiko must be installed to test key creation')
@patch.object(BitbucketHelper, 'fetch_access_token', return_value='token')
@patch.object(bitbucket_pipeline_known_host, 'get_existing_known_host', return_value=None)
def test_create_known_host(self, *args):
with patch.object(self.module, 'create_known_host') as create_known_host_mock:
with self.assertRaises(AnsibleExitJson) as exec_info:
set_module_args({
'client_id': 'ABC',
'client_secret': 'XXX',
'username': 'name',
'repository': 'repo',
'name': 'bitbucket.org',
'state': 'present',
})
self.module.main()
self.assertEqual(create_known_host_mock.call_count, 1)
self.assertEqual(exec_info.exception.args[0]['changed'], True)
@patch.object(BitbucketHelper, 'fetch_access_token', return_value='token')
@patch.object(BitbucketHelper, 'request', return_value=(dict(status=201), dict()))
@patch.object(bitbucket_pipeline_known_host, 'get_existing_known_host', return_value=None)
def test_create_known_host_with_key(self, *args):
with patch.object(self.module, 'get_host_key') as get_host_key_mock:
with self.assertRaises(AnsibleExitJson) as exec_info:
set_module_args({
'client_id': 'ABC',
'client_secret': 'XXX',
'username': 'name',
'repository': 'repo',
'name': 'bitbucket.org',
'key': 'ssh-rsa public',
'state': 'present',
})
self.module.main()
self.assertEqual(get_host_key_mock.call_count, 0)
self.assertEqual(exec_info.exception.args[0]['changed'], True)
@pytest.mark.skipif(not HAS_PARAMIKO, reason='paramiko must be installed to test key creation')
@patch.object(BitbucketHelper, 'fetch_access_token', return_value='token')
@patch.object(bitbucket_pipeline_known_host, 'get_existing_known_host', return_value={
'type': 'pipeline_known_host',
'uuid': '{21cc0590-bebe-4fae-8baf-03722704119a7}',
'hostname': 'bitbucket.org',
'public_key': {
'type': 'pipeline_ssh_public_key',
'md5_fingerprint': 'md5:97:8c:1b:f2:6f:14:6b:4b:3b:ec:aa:46:46:74:7c:40',
'sha256_fingerprint': 'SHA256:zzXQOXSFBEiUtuE8AikoYKwbHaxvSc0ojez9YXaGp1A',
'key_type': 'ssh-rsa',
'key': 'AAAAB3NzaC1yc2EAAAABIwAAAQEAubiN81eDcafrgMeLzaFPsw2kN...seeFVBoGqzHM9yXw=='
}
})
def test_dont_create_same_value(self, *args):
with patch.object(self.module, 'create_known_host') as create_known_host_mock:
with self.assertRaises(AnsibleExitJson) as exec_info:
set_module_args({
'client_id': 'ABC',
'client_secret': 'XXX',
'username': 'name',
'repository': 'repo',
'name': 'bitbucket.org',
'state': 'present',
})
self.module.main()
self.assertEqual(create_known_host_mock.call_count, 0)
self.assertEqual(exec_info.exception.args[0]['changed'], False)
@pytest.mark.skipif(not HAS_PARAMIKO, reason='paramiko must be installed to test key creation')
@patch.object(BitbucketHelper, 'fetch_access_token', return_value='token')
@patch.object(bitbucket_pipeline_known_host, 'get_existing_known_host', return_value=None)
def test_create_known_host_check_mode(self, *args):
with patch.object(self.module, 'create_known_host') as create_known_host_mock:
with self.assertRaises(AnsibleExitJson) as exec_info:
set_module_args({
'client_id': 'ABC',
'client_secret': 'XXX',
'username': 'name',
'repository': 'repo',
'name': 'bitbucket.org',
'state': 'present',
'_ansible_check_mode': True,
})
self.module.main()
self.assertEqual(create_known_host_mock.call_count, 0)
self.assertEqual(exec_info.exception.args[0]['changed'], True)
@pytest.mark.skipif(not HAS_PARAMIKO, reason='paramiko must be installed to test key creation')
@patch.object(BitbucketHelper, 'fetch_access_token', return_value='token')
@patch.object(bitbucket_pipeline_known_host, 'get_existing_known_host', return_value={
'type': 'pipeline_known_host',
'uuid': '{21cc0590-bebe-4fae-8baf-03722704119a7}',
'hostname': 'bitbucket.org',
'public_key': {
'type': 'pipeline_ssh_public_key',
'md5_fingerprint': 'md5:97:8c:1b:f2:6f:14:6b:4b:3b:ec:aa:46:46:74:7c:40',
'sha256_fingerprint': 'SHA256:zzXQOXSFBEiUtuE8AikoYKwbHaxvSc0ojez9YXaGp1A',
'key_type': 'ssh-rsa',
'key': 'AAAAB3NzaC1yc2EAAAABIwAAAQEAubiN81eDcafrgMeLzaFPsw2kN...seeFVBoGqzHM9yXw=='
}
})
def test_delete_known_host(self, *args):
with patch.object(self.module, 'delete_known_host') as delete_known_host_mock:
with self.assertRaises(AnsibleExitJson) as exec_info:
set_module_args({
'client_id': 'ABC',
'client_secret': 'XXX',
'username': 'name',
'repository': 'repo',
'name': 'bitbucket.org',
'state': 'absent',
})
self.module.main()
self.assertEqual(delete_known_host_mock.call_count, 1)
self.assertEqual(exec_info.exception.args[0]['changed'], True)
@pytest.mark.skipif(not HAS_PARAMIKO, reason='paramiko must be installed to test key creation')
@patch.object(BitbucketHelper, 'fetch_access_token', return_value='token')
@patch.object(bitbucket_pipeline_known_host, 'get_existing_known_host', return_value=None)
def test_delete_absent_known_host(self, *args):
with patch.object(self.module, 'delete_known_host') as delete_known_host_mock:
with self.assertRaises(AnsibleExitJson) as exec_info:
set_module_args({
'client_id': 'ABC',
'client_secret': 'XXX',
'username': 'name',
'repository': 'repo',
'name': 'bitbucket.org',
'state': 'absent',
})
self.module.main()
self.assertEqual(delete_known_host_mock.call_count, 0)
self.assertEqual(exec_info.exception.args[0]['changed'], False)
@pytest.mark.skipif(not HAS_PARAMIKO, reason='paramiko must be installed to test key creation')
@patch.object(BitbucketHelper, 'fetch_access_token', return_value='token')
@patch.object(bitbucket_pipeline_known_host, 'get_existing_known_host', return_value={
'type': 'pipeline_known_host',
'uuid': '{21cc0590-bebe-4fae-8baf-03722704119a7}',
'hostname': 'bitbucket.org',
'public_key': {
'type': 'pipeline_ssh_public_key',
'md5_fingerprint': 'md5:97:8c:1b:f2:6f:14:6b:4b:3b:ec:aa:46:46:74:7c:40',
'sha256_fingerprint': 'SHA256:zzXQOXSFBEiUtuE8AikoYKwbHaxvSc0ojez9YXaGp1A',
'key_type': 'ssh-rsa',
'key': 'AAAAB3NzaC1yc2EAAAABIwAAAQEAubiN81eDcafrgMeLzaFPsw2kN...seeFVBoGqzHM9yXw=='
}
})
def test_delete_known_host_check_mode(self, *args):
with patch.object(self.module, 'delete_known_host') as delete_known_host_mock:
with self.assertRaises(AnsibleExitJson) as exec_info:
set_module_args({
'client_id': 'ABC',
'client_secret': 'XXX',
'username': 'name',
'repository': 'repo',
'name': 'bitbucket.org',
'state': 'absent',
'_ansible_check_mode': True,
})
self.module.main()
self.assertEqual(delete_known_host_mock.call_count, 0)
self.assertEqual(exec_info.exception.args[0]['changed'], True)
if __name__ == '__main__':
unittest.main()
|
PeterDaveHello/ShadowVPN | refs/heads/master | tools/gen_foreign_sh.py | 169 | #!/usr/bin/env python3
#
# Copyright (c) 2014 clowwindy
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from ipaddress import ip_network
import sys
print('''#!/bin/sh
tun=tun0
add_or_delete=add
if [ "$1" == "down" ] || [ "$1" == "del" ]; then
add_or_delete=del
fi
''')
for line in sys.stdin:
line = line.strip()
if not line:
continue
elif line.startswith('#'):
continue
subnet = ip_network(line)
print('route $add_or_delete -net %s netmask %s $tun' %
(subnet.network_address, subnet.netmask))
|
dulems/hue | refs/heads/master | desktop/core/ext-py/django-extensions-1.5.0/django_extensions/management/commands/pipchecker.py | 35 | import os
import pip
import sys
import json
from distutils.version import LooseVersion
from django.core.management.base import NoArgsCommand
from django_extensions.management.color import color_style
from optparse import make_option
from pip.req import parse_requirements
from django_extensions.management.utils import signalcommand
try:
from urllib.parse import urlparse
from urllib.error import HTTPError
from urllib.request import Request, urlopen
from xmlrpc.client import ServerProxy
except ImportError:
# Python 2
from urlparse import urlparse
from urllib2 import HTTPError, Request, urlopen
from xmlrpclib import ServerProxy
try:
import requests
HAS_REQUESTS = True
except ImportError:
HAS_REQUESTS = False
class Command(NoArgsCommand):
option_list = NoArgsCommand.option_list + (
make_option(
"-t", "--github-api-token", action="store", dest="github_api_token",
help="A github api authentication token."
),
make_option(
"-r", "--requirement", action="append", dest="requirements",
default=[], metavar="FILENAME",
help="Check all the packages listed in the given requirements file. "
"This option can be used multiple times."
),
make_option(
"-n", "--newer", action="store_true", dest="show_newer",
help="Also show when newer version then available is installed."
),
)
help = "Scan pip requirement files for out-of-date packages."
@signalcommand
def handle_noargs(self, **options):
self.style = color_style()
self.options = options
if options["requirements"]:
req_files = options["requirements"]
elif os.path.exists("requirements.txt"):
req_files = ["requirements.txt"]
elif os.path.exists("requirements"):
req_files = ["requirements/{0}".format(f) for f in os.listdir("requirements")
if os.path.isfile(os.path.join("requirements", f)) and
f.lower().endswith(".txt")]
else:
sys.exit("requirements not found")
self.reqs = {}
for filename in req_files:
class Object(object):
pass
mockoptions = Object()
mockoptions.default_vcs = "git"
mockoptions.skip_requirements_regex = None
for req in parse_requirements(filename, options=mockoptions):
self.reqs[req.name] = {
"pip_req": req,
"url": req.url,
}
if options["github_api_token"]:
self.github_api_token = options["github_api_token"]
elif os.environ.get("GITHUB_API_TOKEN"):
self.github_api_token = os.environ.get("GITHUB_API_TOKEN")
else:
self.github_api_token = None # only 50 requests per hour
self.check_pypi()
if HAS_REQUESTS:
self.check_github()
else:
print(self.style.ERROR("Cannot check github urls. The requests library is not installed. ( pip install requests )"))
self.check_other()
def _urlopen_as_json(self, url, headers=None):
"""Shorcut for return contents as json"""
req = Request(url, headers=headers)
return json.loads(urlopen(req).read())
def check_pypi(self):
"""
If the requirement is frozen to pypi, check for a new version.
"""
for dist in pip.get_installed_distributions():
name = dist.project_name
if name in self.reqs.keys():
self.reqs[name]["dist"] = dist
pypi = ServerProxy("http://pypi.python.org/pypi")
for name, req in list(self.reqs.items()):
if req["url"]:
continue # skipping github packages.
elif "dist" in req:
dist = req["dist"]
dist_version = LooseVersion(dist.version)
available = pypi.package_releases(req["pip_req"].url_name)
try:
available_version = LooseVersion(available[0])
except IndexError:
available_version = None
if not available_version:
msg = self.style.WARN("release is not on pypi (check capitalization and/or --extra-index-url)")
elif self.options['show_newer'] and dist_version > available_version:
msg = self.style.INFO("{0} available (newer installed)".format(available_version))
elif available_version > dist_version:
msg = self.style.INFO("{0} available".format(available_version))
else:
msg = "up to date"
del self.reqs[name]
continue
pkg_info = self.style.BOLD("{dist.project_name} {dist.version}".format(dist=dist))
else:
msg = "not installed"
pkg_info = name
print("{pkg_info:40} {msg}".format(pkg_info=pkg_info, msg=msg))
del self.reqs[name]
def check_github(self):
"""
If the requirement is frozen to a github url, check for new commits.
API Tokens
----------
For more than 50 github api calls per hour, pipchecker requires
authentication with the github api by settings the environemnt
variable ``GITHUB_API_TOKEN`` or setting the command flag
--github-api-token='mytoken'``.
To create a github api token for use at the command line::
curl -u 'rizumu' -d '{"scopes":["repo"], "note":"pipchecker"}' https://api.github.com/authorizations
For more info on github api tokens:
https://help.github.com/articles/creating-an-oauth-token-for-command-line-use
http://developer.github.com/v3/oauth/#oauth-authorizations-api
Requirement Format
------------------
Pipchecker gets the sha of frozen repo and checks if it is
found at the head of any branches. If it is not found then
the requirement is considered to be out of date.
Therefore, freezing at the commit hash will provide the expected
results, but if freezing at a branch or tag name, pipchecker will
not be able to determine with certainty if the repo is out of date.
Freeze at the commit hash (sha)::
git+git://github.com/django/django.git@393c268e725f5b229ecb554f3fac02cfc250d2df#egg=Django
Freeze with a branch name::
git+git://github.com/django/django.git@master#egg=Django
Freeze with a tag::
git+git://github.com/django/[email protected]#egg=Django
Do not freeze::
git+git://github.com/django/django.git#egg=Django
"""
for name, req in list(self.reqs.items()):
req_url = req["url"]
if not req_url:
continue
if req_url.startswith("git") and "github.com/" not in req_url:
continue
if req_url.endswith(".tar.gz") or req_url.endswith(".tar.bz2") or req_url.endswith(".zip"):
continue
headers = {
"content-type": "application/json",
}
if self.github_api_token:
headers["Authorization"] = "token {0}".format(self.github_api_token)
try:
user, repo = urlparse(req_url).path.split("#")[0].strip("/").rstrip("/").split("/")
except (ValueError, IndexError) as e:
print(self.style.ERROR("\nFailed to parse %r: %s\n" % (req_url, e)))
continue
try:
#test_auth = self._urlopen_as_json("https://api.github.com/django/", headers=headers)
test_auth = requests.get("https://api.github.com/django/", headers=headers).json()
except HTTPError as e:
print("\n%s\n" % str(e))
return
if "message" in test_auth and test_auth["message"] == "Bad credentials":
print(self.style.ERROR("\nGithub API: Bad credentials. Aborting!\n"))
return
elif "message" in test_auth and test_auth["message"].startswith("API Rate Limit Exceeded"):
print(self.style.ERROR("\nGithub API: Rate Limit Exceeded. Aborting!\n"))
return
frozen_commit_sha = None
if ".git" in repo:
repo_name, frozen_commit_full = repo.split(".git")
if frozen_commit_full.startswith("@"):
frozen_commit_sha = frozen_commit_full[1:]
elif "@" in repo:
repo_name, frozen_commit_sha = repo.split("@")
if frozen_commit_sha is None:
msg = self.style.ERROR("repo is not frozen")
if frozen_commit_sha:
branch_url = "https://api.github.com/repos/{0}/{1}/branches".format(user, repo_name)
#branch_data = self._urlopen_as_json(branch_url, headers=headers)
branch_data = requests.get(branch_url, headers=headers).json()
frozen_commit_url = "https://api.github.com/repos/{0}/{1}/commits/{2}".format(
user, repo_name, frozen_commit_sha
)
#frozen_commit_data = self._urlopen_as_json(frozen_commit_url, headers=headers)
frozen_commit_data = requests.get(frozen_commit_url, headers=headers).json()
if "message" in frozen_commit_data and frozen_commit_data["message"] == "Not Found":
msg = self.style.ERROR("{0} not found in {1}. Repo may be private.".format(frozen_commit_sha[:10], name))
elif frozen_commit_sha in [branch["commit"]["sha"] for branch in branch_data]:
msg = self.style.BOLD("up to date")
else:
msg = self.style.INFO("{0} is not the head of any branch".format(frozen_commit_data["sha"][:10]))
if "dist" in req:
pkg_info = "{dist.project_name} {dist.version}".format(dist=req["dist"])
elif frozen_commit_sha is None:
pkg_info = name
else:
pkg_info = "{0} {1}".format(name, frozen_commit_sha[:10])
print("{pkg_info:40} {msg}".format(pkg_info=pkg_info, msg=msg))
del self.reqs[name]
def check_other(self):
"""
If the requirement is frozen somewhere other than pypi or github, skip.
If you have a private pypi or use --extra-index-url, consider contributing
support here.
"""
if self.reqs:
print(self.style.ERROR("\nOnly pypi and github based requirements are supported:"))
for name, req in self.reqs.items():
if "dist" in req:
pkg_info = "{dist.project_name} {dist.version}".format(dist=req["dist"])
elif "url" in req:
pkg_info = "{url}".format(url=req["url"])
else:
pkg_info = "unknown package"
print(self.style.BOLD("{pkg_info:40} is not a pypi or github requirement".format(pkg_info=pkg_info)))
|
louisstow/dailygraphics | refs/heads/master | graphic_templates/slopegraph/graphic_config.py | 2 | #!/usr/bin/env python
import base_filters
COPY_GOOGLE_DOC_KEY = '1-7MiVUVjD2cUhCPEpqc1YV_altBoPqTv-DGP4x8CM2o' # ABC copy
USE_ASSETS = False
# Use these variables to override the default cache timeouts for this graphic
# DEFAULT_MAX_AGE = 20
# ASSETS_MAX_AGE = 300
JINJA_FILTER_FUNCTIONS = base_filters.FILTERS
|
Subterfuge-Framework/Subterfuge | refs/heads/master | lib/dbmgr.py | 1 | import sys
import os
import sqlite3
#Setup system path
import inspect
currentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
subdir = os.path.split(currentdir)[0]
sys.path.append(subdir)
DATABASE = os.path.join(subdir , 'attack.db')
print DATABASE
class dbmgr:
def __init__(self):
self.conn = sqlite3.connect(DATABASE, timeout=1)
self.conn.execute('pragma foreign_keys = on')
self.conn.commit()
self.cur = self.conn.cursor()
def getJobs(self):
#Get jobs from DB
self.cur.execute("SELECT * FROM Jobs")
return self.cur.fetchall()
def enableJob(self, jid):
#Set job status enabled
self.cur.execute("UPDATE Jobs SET Enabled = 1 WHERE ID = " + str(jid))
self.conn.commit()
#self.conn.close()
def createJob(self, jobDict):
#Build new job
print jobDict
self.cur.execute("INSERT INTO Jobs (Name, Active, Enabled, CmdString, Type, PID) values (" + str(jobDict['Name']) + ' ' + str(jobDict['Active']) + ' ' + str(jobDict['Enabled']) + ' ' + str(jobDict['CmdString']) + ' ' + str(jobDict['Type']) + ' ' + str(jobDict['PID']))
self.conn.commit()
#Add source field to Loot table???
def logLoot(self, details, datetime):
#Set job status enabled
HID = "0"
new = "1"
self.cur.execute("INSERT INTO Loot(HID, Details, Datetime, New) values ('" + HID + "','" + details + "','" + datetime + "','" + new + "')")
self.conn.commit()
#self.conn.close()
def newChannel(self, hostid, details, tags):
self.cur.execute("insert into Channels (HostID, Details, Tags) values ('" + hostid + "','" + details + "','" + tags + "')")
self.conn.commit()
self.conn.close()
def getInteractions(self, channelid):
query = self.cur.execute("SELECT ID, command FROM Interactions WHERE channelid =" + channelid + " AND status = 0")
return query
def LogCmdResponse(self, interactionid, response):
#Update query
self.cur.execute("insert into Interactions (hostid, channelid, command, response, status) values (" + hostid + "," + channelid + "," + command + "," + response + "," + status + ")")
self.conn.commit()
self.conn.close()
def kill(self):
self.conn.close()
def __del__(self):
#self.conn.close()
pass
|
wanasit/chrono-python | refs/heads/master | chrono/__init__.py | 1 | from chrono import Chrono
from chrono import parse
from chrono import parse_date
from parsed_result import ParsedResult
from parsed_result import ParsedComponent
from parsers.parser import Parser
from refiners.refiner import Refiner
from refiners.filter import Filter
import parsers
import refiners
|
cloudbau/nova | refs/heads/master | doc/ext/nova_todo.py | 68 | # -*- coding: utf-8 -*-
# This is a hack of the builtin todo extension, to make the todo_list
# more user friendly.
from sphinx.ext.todo import *
import re
def _(s):
return s
def process_todo_nodes(app, doctree, fromdocname):
if not app.config['todo_include_todos']:
for node in doctree.traverse(todo_node):
node.parent.remove(node)
# Replace all todolist nodes with a list of the collected todos.
# Augment each todo with a backlink to the original location.
env = app.builder.env
if not hasattr(env, 'todo_all_todos'):
env.todo_all_todos = []
# remove the item that was added in the constructor, since I'm tired of
# reading through docutils for the proper way to construct an empty list
lists = []
for i in xrange(5):
lists.append(nodes.bullet_list("", nodes.Text('', '')))
lists[i].remove(lists[i][0])
lists[i]['classes'].append('todo_list')
for node in doctree.traverse(todolist):
if not app.config['todo_include_todos']:
node.replace_self([])
continue
for todo_info in env.todo_all_todos:
para = nodes.paragraph()
# Create a reference
newnode = nodes.reference('', '')
filename = env.doc2path(todo_info['docname'], base=None)
link = (_('%(filename)s, line %(line_info)d') %
{'filename': filename, 'line_info': todo_info['lineno']})
innernode = nodes.emphasis(link, link)
newnode['refdocname'] = todo_info['docname']
try:
newnode['refuri'] = app.builder.get_relative_uri(
fromdocname, todo_info['docname'])
newnode['refuri'] += '#' + todo_info['target']['refid']
except NoUri:
# ignore if no URI can be determined, e.g. for LaTeX output
pass
newnode.append(innernode)
para += newnode
para['classes'].append('todo_link')
todo_entry = todo_info['todo']
env.resolve_references(todo_entry, todo_info['docname'],
app.builder)
item = nodes.list_item('', para)
todo_entry[1]['classes'].append('details')
comment = todo_entry[1]
m = re.match(r"^P(\d)", comment.astext())
priority = 5
if m:
priority = int(m.group(1))
if priority < 0:
priority = 1
if priority > 5:
priority = 5
item['classes'].append('todo_p' + str(priority))
todo_entry['classes'].append('todo_p' + str(priority))
item.append(comment)
lists[priority - 1].insert(0, item)
node.replace_self(lists)
def setup(app):
app.add_config_value('todo_include_todos', False, False)
app.add_node(todolist)
app.add_node(todo_node,
html=(visit_todo_node, depart_todo_node),
latex=(visit_todo_node, depart_todo_node),
text=(visit_todo_node, depart_todo_node))
app.add_directive('todo', Todo)
app.add_directive('todolist', TodoList)
app.connect('doctree-read', process_todos)
app.connect('doctree-resolved', process_todo_nodes)
app.connect('env-purge-doc', purge_todos)
|
was4444/chromium.src | refs/heads/nw15 | third_party/WebKit/Tools/Scripts/webkitpy/common/system/stack_utils.py | 215 | # Copyright (C) 2011 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Simple routines for logging, obtaining thread stack information."""
import sys
import traceback
def log_thread_state(logger, name, thread_id, msg=''):
"""Log information about the given thread state."""
stack = _find_thread_stack(thread_id)
assert(stack is not None)
logger("")
logger("%s (tid %d) %s" % (name, thread_id, msg))
_log_stack(logger, stack)
logger("")
def _find_thread_stack(thread_id):
"""Returns a stack object that can be used to dump a stack trace for
the given thread id (or None if the id is not found)."""
for tid, stack in sys._current_frames().items():
if tid == thread_id:
return stack
return None
def _log_stack(logger, stack):
"""Log a stack trace to the logger callback."""
for filename, lineno, name, line in traceback.extract_stack(stack):
logger('File: "%s", line %d, in %s' % (filename, lineno, name))
if line:
logger(' %s' % line.strip())
def log_traceback(logger, tb):
stack = traceback.extract_tb(tb)
for frame_str in traceback.format_list(stack):
for line in frame_str.split('\n'):
if line:
logger(" %s" % line)
|
varlib1/servermall | refs/heads/master | tcp_check/test_tcp_check.py | 3 | # (C) Datadog, Inc. 2010-2016
# All rights reserved
# Licensed under Simplified BSD License (see LICENSE)
# stdlibb
import time
# 3p
from nose.plugins.attrib import attr
# project
from tests.checks.common import AgentCheckTest
RESULTS_TIMEOUT = 40
CONFIG = {
'init_config': {},
'instances': [{
'host': '127.0.0.1',
'port': 65530,
'timeout': 1.5,
'name': 'DownService',
'skip_event': True
}, {
'host': '126.0.0.1',
'port': 65530,
'timeout': 1.5,
'name': 'DownService2',
'tags': ['test1'],
'skip_event': True
}, {
'host': 'datadoghq.com',
'port': 80,
'timeout': 1.5,
'name': 'UpService',
'tags': ['test2'],
'skip_event': True
}, {
'host': 'datadoghq.com',
'port': 80,
'timeout': 1,
'name': 'response_time',
'tags': ['test3'],
'collect_response_time': True,
'skip_event': True
}]
}
CONFIG_EVENTS = {
'init_config': {},
'instances': [{
'host': '127.0.0.1',
'port': 65530,
'timeout': 1.5,
'name': 'DownService',
'skip_event': False
}]
}
@attr(requires='tcp_check')
class TCPCheckTest(AgentCheckTest):
CHECK_NAME = 'tcp_check'
def tearDown(self):
self.check.stop()
def wait_for_async(self, method, attribute, count):
"""
Loop on `self.check.method` until `self.check.attribute >= count`.
Raise after
"""
# Check the initial values to see if we already have results before waiting for the async
# instances to finish
initial_values = getattr(self, attribute)
i = 0
while i < RESULTS_TIMEOUT:
self.check._process_results()
if len(getattr(self.check, attribute)) + len(initial_values) >= count:
return getattr(self.check, method)() + initial_values
time.sleep(1.1)
i += 1
raise Exception("Didn't get the right count of service checks in time, {0}/{1} in {2}s: {3}"
.format(len(getattr(self.check, attribute)), count, i,
getattr(self.check, attribute)))
def test_event_deprecation(self):
"""
Deprecate events usage for service checks.
"""
# Run the check
self.run_check(CONFIG_EVENTS)
# Overrides self.service_checks attribute when values are available
self.warnings = self.wait_for_async('get_warnings', 'warnings', len(CONFIG_EVENTS['instances']))
# Assess warnings
self.assertWarning(
"Using events for service checks is deprecated in "
"favor of monitors and will be removed in future versions of the "
"Datadog Agent.",
count=len(CONFIG_EVENTS['instances'])
)
def test_check(self):
"""
Check coverage.
"""
# Run the check
self.run_check(CONFIG)
# Overrides self.service_checks attribute when values are available
self.service_checks = self.wait_for_async('get_service_checks', 'service_checks', len(CONFIG['instances']))
self.metrics = self.check.get_metrics()
expected_tags = ["instance:DownService", "target_host:127.0.0.1", "port:65530"]
self.assertServiceCheckCritical("tcp.can_connect", tags=expected_tags)
expected_tags = ["instance:DownService2", "target_host:126.0.0.1", "port:65530", "test1"]
self.assertServiceCheckCritical("tcp.can_connect", tags=expected_tags)
expected_tags = ["instance:UpService", "target_host:datadoghq.com", "port:80", "test2"]
self.assertServiceCheckOK("tcp.can_connect", tags=expected_tags)
expected_tags = ["instance:response_time", "target_host:datadoghq.com", "port:80", "test3"]
self.assertServiceCheckOK("tcp.can_connect", tags=expected_tags)
expected_tags = ["instance:response_time", "url:datadoghq.com:80", "test3"]
self.assertMetric("network.tcp.response_time", tags=expected_tags)
self.coverage_report()
|
dhstack/hurdatReader | refs/heads/master | hurdatReader/coordAvg.py | 1 | #!/usr/bin/env python 3.2
'''Module for averaging GPS coordinates together based on spherical shape
of the Earth or summing the area found through integration.
Import and call methods.
@author: David Stack
'''
import math
__all__ = ['avgAll', 'avgMid', 'avgFirst', 'avgLast', 'calcScale',
'weightedAvgCoords', 'test']
def __mean(nums):
'''Calculates average of a list.'''
if len(nums):
return float( sum(nums) / len(nums))
else:
return 0.0
def __toCartesian(lat, lon):
lat = math.radians(lat)
lon = math.radians(lon)
x = math.cos(lat) * math.cos(lon)
y = math.cos(lat) * math.sin(lon)
z = math.sin(lat)
cartCoords = [x,y,z]
return cartCoords
def __avgCoords(latList, lonList):
'''Calculates cartesian coordinates from lat lon using geographic midpoint.
Returns lat/lon coords in degrees.'''
xList = []
yList = []
zList = []
for i in range(0, len(lonList)):
coords = __toCartesian(latList[i], lonList[i])
xList.append(coords[0])
yList.append(coords[1])
zList.append(coords[2])
x = __mean(xList)
y = __mean(yList)
z = __mean(zList)
lon = math.atan2(y, x)
hyp = math.sqrt(x * x + y * y)
lat = math.atan2(z, hyp)
lon = math.degrees(lon)
lat = math.degrees(lat)
coords = [lat, lon]
return coords
def __calcDif(aList):
'''Calculates difference between items in a list and returns a list of
differences.'''
difList = []
for item in aList:
if item == aList[0]:
prevItem = item
else:
currentItem = item
difList.append(math.fabs(currentItem - prevItem))
prevItem = currentItem
return difList
def calcScale(latList, lonList):
'''Calculates scale of storm based on number of measurements.'''
deltaLatList = __calcDif(latList)
deltaLonList = __calcDif(lonList)
scale = sum(deltaLatList) * sum(deltaLonList)
return scale
def avgAll(latList, lonList):
'''Averages all poins in list. Method 1.'''
average = __avgCoords(latList, lonList)
return average
def avgMid(latList, lonList, windList, numMeas):
'''Averages numMeas on either side of highest point in windList. Method 2.'''
maxWind = max(windList)
#print('Max Wind:',maxWind)
mid = windList.index(maxWind)
#print('Mid:', mid)
if maxWind == windList[0]:
#print('Special Case')
average = __avgCoords(latList, lonList)
elif mid-numMeas < 0:
#print('Special Case 2')
average = __avgCoords(latList[0:mid+numMeas],lonList[0:mid+numMeas])
else:
#print('Normal')
average = __avgCoords(latList[mid-numMeas:mid+numMeas],lonList[mid-numMeas:mid+numMeas])
return average
def avgFirst(latList, lonList, numMeas):
'''First day (first numMeas measurements) average. Method 3.'''
average = __avgCoords(latList[:numMeas], lonList[:numMeas])
return average
def avgLast(latList, lonList, numMeas):
'''Last day (last numMeas measurements) average. Method 4.'''
average = __avgCoords(latList[-numMeas:], lonList[-numMeas:])
return average
def __calcWeight(scale, scaleMax):
'''Calculates weight of storm givin a list of corresponding scales.'''
weight = scale / scaleMax
return weight
def weightedAvgCoords(latList, lonList, scale, scaleMax):
'''Determines weighted lat and lon by integrating over each
and weighting by the provided weight for i number of storms. Returns a list
of coordinates.'''
weight = __calcWeight(scale, scaleMax)
coords = __avgCoords(latList, lonList)
lat = coords[0] * weight
lon = coords[1] * weight
weightedAvg = [lat, lon]
return weightedAvg
def test():
'''Test function.'''
print('---Module coordAvg test---')
latList = [28.0, 28.0, 28.0, 28.1, 28.2, 28.3, 28.4, 28.6, 29.0, 29.5, 30.0,
30.5, 31.0]
lonList = [94.8, 95.4, 96.0, 96.5, 97.0, 97.6, 98.3, 98.9, 99.4, 99.8, 100.0,
100.1, 100.2]
windList = [80, 80, 80, 80, 70, 60, 60, 50, 50, 40, 40, 40, 40]
numMeas = 4
#
print('--Actual Values--')
print('All :', [28.904683, 97.985048])
print('Mid :', [])
print('First:', [28.026472, 95.674809])
print('Last :', [30.250083, 100.024174])
print('--Test Values--')
print('All :', avgAll(latList, lonList))
print('Mid :', avgMid(latList, lonList, windList, numMeas))
print('First:', avgFirst(latList, lonList, numMeas))
print('Last :', avgLast(latList, lonList, numMeas))
print('\n***__mean Test***')
meanList = [10, 20, 30]
calcMean = __mean(meanList)
if 20 != calcMean:
print('!!!---TEST FAIL---!!!')
print('Actual Mean:', 20)
print('Calc Mean:',calcMean)
else:
print('PASS')
print('\n***__toCartesian Test***')
lat = 28.0
lon = 94.8
cart = __toCartesian(lat, lon)
if [-0.07388315034589424, 0.8798509713754594, 0.4694715627858908] != cart:
print('!!!---TEST FAIL---!!!')
print('Actual Cart:', [-0.07388315034589424, 0.8798509713754594, 0.4694715627858908])
print('Calc Cart :', cart)
else:
print('PASS')
print('\n***weightedAvgCoords Test***')
latList = [25.6,26.8,28.1,29.3,30.2,31.3,32.9,35.1,37]
lonList = [-61.2,-63,-64.6,-65.9,-65.8,-64,-60.9,-57.2,-53.5]
scale = 194.94
scaleMax = 1591.04
test = weightedAvgCoords(latList, lonList, scale, scaleMax)
test = [round(test[0],1),round(test[1],1)]
known = [3.8,-7.6]
if known != test:
print('!!!---TEST FAIL---!!!')
print('Actual:', known)
print('Calc :', test)
else:
print('PASS')
print('\n***__calcDif Test***')
aList = [25.6,26.8,28.1,29.3,30.2,31.3,32.9,35.1,37]
test = __calcDif(aList)
for i in range(0,len(test)):
test[i] = round(test[i],4)
known = [1.2,1.3,1.2,0.9,1.1,1.6,2.2,1.9]
if known != test:
print('!!!---TEST FAIL---!!!')
print('Actual:', known)
print('Calc :', test)
else:
print('PASS')
print('\n***__calcWeight Test***')
scale = 194.94
scaleMax = 1591.04
test = round(__calcWeight(scale,scaleMax),6)
known = 0.122524
if known != test:
print('!!!---TEST FAIL---!!!')
print('Actual:', known)
print('Calc :', test)
else:
print('PASS')
print('\n***calcScale Test***')
latList = [25.6,26.8,28.1,29.3,30.2,31.3,32.9,35.1,37]
lonList = [-61.2,-63,-64.6,-65.9,-65.8,-64,-60.9,-57.2,-53.5]
test = round(calcScale(latList, lonList),2)
known = 194.94
if known != test:
print('!!!---TEST FAIL---!!!')
print('Actual:', known)
print('Calc :', test)
else:
print('PASS')
# Run test if module is run as a program
if __name__ == '__main__':
test()
|
losywee/rethinkdb | refs/heads/next | test/rql_test/connections/http_support/decorator/decorator.py | 112 | ########################## LICENCE ###############################
# Copyright (c) 2005-2012, Michele Simionato
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
# Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# Redistributions in bytecode form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
# OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
# TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
# DAMAGE.
"""
Decorator module, see http://pypi.python.org/pypi/decorator
for the documentation.
"""
__version__ = '3.4.0'
__all__ = ["decorator", "FunctionMaker", "contextmanager"]
import sys, re, inspect
if sys.version >= '3':
from inspect import getfullargspec
def get_init(cls):
return cls.__init__
else:
class getfullargspec(object):
"A quick and dirty replacement for getfullargspec for Python 2.X"
def __init__(self, f):
self.args, self.varargs, self.varkw, self.defaults = \
inspect.getargspec(f)
self.kwonlyargs = []
self.kwonlydefaults = None
def __iter__(self):
yield self.args
yield self.varargs
yield self.varkw
yield self.defaults
def get_init(cls):
return cls.__init__.im_func
DEF = re.compile('\s*def\s*([_\w][_\w\d]*)\s*\(')
# basic functionality
class FunctionMaker(object):
"""
An object with the ability to create functions with a given signature.
It has attributes name, doc, module, signature, defaults, dict and
methods update and make.
"""
def __init__(self, func=None, name=None, signature=None,
defaults=None, doc=None, module=None, funcdict=None):
self.shortsignature = signature
if func:
# func can be a class or a callable, but not an instance method
self.name = func.__name__
if self.name == '<lambda>': # small hack for lambda functions
self.name = '_lambda_'
self.doc = func.__doc__
self.module = func.__module__
if inspect.isfunction(func):
argspec = getfullargspec(func)
self.annotations = getattr(func, '__annotations__', {})
for a in ('args', 'varargs', 'varkw', 'defaults', 'kwonlyargs',
'kwonlydefaults'):
setattr(self, a, getattr(argspec, a))
for i, arg in enumerate(self.args):
setattr(self, 'arg%d' % i, arg)
if sys.version < '3': # easy way
self.shortsignature = self.signature = \
inspect.formatargspec(
formatvalue=lambda val: "", *argspec)[1:-1]
else: # Python 3 way
allargs = list(self.args)
allshortargs = list(self.args)
if self.varargs:
allargs.append('*' + self.varargs)
allshortargs.append('*' + self.varargs)
elif self.kwonlyargs:
allargs.append('*') # single star syntax
for a in self.kwonlyargs:
allargs.append('%s=None' % a)
allshortargs.append('%s=%s' % (a, a))
if self.varkw:
allargs.append('**' + self.varkw)
allshortargs.append('**' + self.varkw)
self.signature = ', '.join(allargs)
self.shortsignature = ', '.join(allshortargs)
self.dict = func.__dict__.copy()
# func=None happens when decorating a caller
if name:
self.name = name
if signature is not None:
self.signature = signature
if defaults:
self.defaults = defaults
if doc:
self.doc = doc
if module:
self.module = module
if funcdict:
self.dict = funcdict
# check existence required attributes
assert hasattr(self, 'name')
if not hasattr(self, 'signature'):
raise TypeError('You are decorating a non function: %s' % func)
def update(self, func, **kw):
"Update the signature of func with the data in self"
func.__name__ = self.name
func.__doc__ = getattr(self, 'doc', None)
func.__dict__ = getattr(self, 'dict', {})
func.func_defaults = getattr(self, 'defaults', ())
func.__kwdefaults__ = getattr(self, 'kwonlydefaults', None)
func.__annotations__ = getattr(self, 'annotations', None)
callermodule = sys._getframe(3).f_globals.get('__name__', '?')
func.__module__ = getattr(self, 'module', callermodule)
func.__dict__.update(kw)
def make(self, src_templ, evaldict=None, addsource=False, **attrs):
"Make a new function from a given template and update the signature"
src = src_templ % vars(self) # expand name and signature
evaldict = evaldict or {}
mo = DEF.match(src)
if mo is None:
raise SyntaxError('not a valid function template\n%s' % src)
name = mo.group(1) # extract the function name
names = set([name] + [arg.strip(' *') for arg in
self.shortsignature.split(',')])
for n in names:
if n in ('_func_', '_call_'):
raise NameError('%s is overridden in\n%s' % (n, src))
if not src.endswith('\n'): # add a newline just for safety
src += '\n' # this is needed in old versions of Python
try:
code = compile(src, '<string>', 'single')
# print >> sys.stderr, 'Compiling %s' % src
exec code in evaldict
except:
print >> sys.stderr, 'Error in generated code:'
print >> sys.stderr, src
raise
func = evaldict[name]
if addsource:
attrs['__source__'] = src
self.update(func, **attrs)
return func
@classmethod
def create(cls, obj, body, evaldict, defaults=None,
doc=None, module=None, addsource=True, **attrs):
"""
Create a function from the strings name, signature and body.
evaldict is the evaluation dictionary. If addsource is true an attribute
__source__ is added to the result. The attributes attrs are added,
if any.
"""
if isinstance(obj, str): # "name(signature)"
name, rest = obj.strip().split('(', 1)
signature = rest[:-1] #strip a right parens
func = None
else: # a function
name = None
signature = None
func = obj
self = cls(func, name, signature, defaults, doc, module)
ibody = '\n'.join(' ' + line for line in body.splitlines())
return self.make('def %(name)s(%(signature)s):\n' + ibody,
evaldict, addsource, **attrs)
def decorator(caller, func=None):
"""
decorator(caller) converts a caller function into a decorator;
decorator(caller, func) decorates a function using a caller.
"""
if func is not None: # returns a decorated function
evaldict = func.func_globals.copy()
evaldict['_call_'] = caller
evaldict['_func_'] = func
return FunctionMaker.create(
func, "return _call_(_func_, %(shortsignature)s)",
evaldict, undecorated=func, __wrapped__=func)
else: # returns a decorator
if inspect.isclass(caller):
name = caller.__name__.lower()
callerfunc = get_init(caller)
doc = 'decorator(%s) converts functions/generators into ' \
'factories of %s objects' % (caller.__name__, caller.__name__)
fun = getfullargspec(callerfunc).args[1] # second arg
elif inspect.isfunction(caller):
name = '_lambda_' if caller.__name__ == '<lambda>' \
else caller.__name__
callerfunc = caller
doc = caller.__doc__
fun = getfullargspec(callerfunc).args[0] # first arg
else: # assume caller is an object with a __call__ method
name = caller.__class__.__name__.lower()
callerfunc = caller.__call__.im_func
doc = caller.__call__.__doc__
fun = getfullargspec(callerfunc).args[1] # second arg
evaldict = callerfunc.func_globals.copy()
evaldict['_call_'] = caller
evaldict['decorator'] = decorator
return FunctionMaker.create(
'%s(%s)' % (name, fun),
'return decorator(_call_, %s)' % fun,
evaldict, undecorated=caller, __wrapped__=caller,
doc=doc, module=caller.__module__)
######################### contextmanager ########################
def __call__(self, func):
'Context manager decorator'
return FunctionMaker.create(
func, "with _self_: return _func_(%(shortsignature)s)",
dict(_self_=self, _func_=func), __wrapped__=func)
try: # Python >= 3.2
from contextlib import _GeneratorContextManager
ContextManager = type(
'ContextManager', (_GeneratorContextManager,), dict(__call__=__call__))
except ImportError: # Python >= 2.5
from contextlib import GeneratorContextManager
def __init__(self, f, *a, **k):
return GeneratorContextManager.__init__(self, f(*a, **k))
ContextManager = type(
'ContextManager', (GeneratorContextManager,),
dict(__call__=__call__, __init__=__init__))
contextmanager = decorator(ContextManager)
|
pepeportela/edx-platform | refs/heads/master | openedx/core/djangoapps/catalog/tests/mixins.py | 25 | """Mixins to help test catalog integration."""
from openedx.core.djangoapps.catalog.models import CatalogIntegration
class CatalogIntegrationMixin(object):
"""Utility for working with the catalog service during testing."""
catalog_integration_defaults = {
'enabled': True,
'internal_api_url': 'https://catalog-internal.example.com/api/v1/',
'cache_ttl': 0,
'service_username': 'lms_catalog_service_user',
'page_size': 20,
}
def create_catalog_integration(self, **kwargs):
"""
Creates a new CatalogIntegration with catalog_integration_defaults,
updated with any provided overrides.
"""
fields = dict(self.catalog_integration_defaults, **kwargs)
CatalogIntegration(**fields).save()
return CatalogIntegration.current()
|
lol/BCI-BO-old | refs/heads/master | BCI_Framework/RandomForest_BCI.py | 1 | from sklearn.ensemble import RandomForestClassifier, RandomForestRegressor
import numpy as np
import pickle
import sys
from Learner import Learner
from sklearn.preprocessing import StandardScaler
from sklearn.grid_search import GridSearchCV
from sklearn.metrics import pairwise, zero_one_loss, mean_squared_error
from sklearn.cross_validation import StratifiedKFold, cross_val_score
from sklearn.utils import shuffle
from scipy.stats import mode
import json
from sklearn.metrics import precision_recall_fscore_support
class RandomForest(Learner):
"""applying random forest to BCI dataset"""
def __init__(self, config, method='classification'):
""" """
Learner.__init__(self, config, method)
def generate_param_grid(self, feature_param_list, learner_name):
RF_size = self.config.configuration["n_trees"]
max_features_range = ["sqrt", "log2", None, 1]
max_depth_range = [None, 15, 30, 50]
min_samples_leaf_range = np.array([2, 5, 10])
if feature_param_list is None:
scores = np.zeros(shape=(len(max_features_range)*len(max_depth_range)*
len(min_samples_leaf_range), self.config.configuration["number_of_cvs_dict"][learner_name], RF_size))
param_grid = [ (m_feat, m_dep, m_sam_leaf) for m_feat in max_features_range for m_dep in max_depth_range
for m_sam_leaf in min_samples_leaf_range]
self.grid_dictionary = {'max_features':0, 'max_depth':1, 'min_samples_leaf':2}
else:
scores = np.zeros(shape=(len(max_features_range)*len(max_depth_range)* len(min_samples_leaf_range) * len(feature_param_list),
self.config.configuration["number_of_cvs_dict"][learner_name], RF_size))
param_grid = [ (m_feat, m_dep, m_sam_leaf, feat_param) for m_feat in max_features_range for m_dep in max_depth_range
for m_sam_leaf in min_samples_leaf_range for feat_param in feature_param_list]
self.grid_dictionary = {'max_features':0, 'max_depth':1, 'min_samples_leaf':2, 'fe_params':3}
return param_grid, scores
def set_params_list( self, learner_params, i):
RF_size = self.config.configuration["n_trees"]
n_jobs = self.config.configuration["n_jobs"]
m_feat = learner_params[0]
m_dep = learner_params[1]
m_sam_leaf = learner_params[2]
if self.method == 'classification':
self.learner = RandomForestClassifier(n_estimators = RF_size,
oob_score = True, n_jobs= n_jobs,
max_depth = m_dep, max_features = m_feat,
min_samples_leaf = m_sam_leaf, random_state= i)
elif self.method == 'regression':
self.learner = RandomForestRegressor(n_estimators = RF_size,
oob_score = True, n_jobs= n_jobs,
max_depth = m_dep, max_features = m_feat,
min_samples_leaf = m_sam_leaf, random_state= i)
def set_params_dict( self, learner_params):
if 'n_trees' in learner_params.keys():
RF_size = int(learner_params["n_trees"])
else:
RF_size = self.config.configuration["n_trees"]
n_jobs = self.config.configuration["n_jobs"]
m_feat = learner_params['max_features']
m_dep = learner_params['max_depth']
m_sam_leaf = learner_params['min_samples_leaf']
if self.method == 'classification':
self.learner = RandomForestClassifier(n_estimators = RF_size,
oob_score = True, n_jobs= n_jobs,
max_depth = m_dep, max_features = m_feat,
min_samples_leaf = m_sam_leaf, random_state= 0)
elif self.method == 'regression':
self.learner = RandomForestRegressor(n_estimators = RF_size,
oob_score = True, n_jobs= n_jobs,
max_depth = m_dep, max_features = m_feat,
min_samples_leaf = m_sam_leaf, random_state= 0)
def fit_calc_cv_scores(self, X_train, y_train, X_test, y_test):
self.learner.fit(X_train, y_train)
return self.predict_forall_estimators(X_test, y_test)
def predict_forall_estimators(self, X_test, Y_test):
n_estimators = len(self.learner.estimators_)
errors, precisions, recalls = np.zeros(shape = (n_estimators)), np.zeros(shape = (n_estimators)), np.zeros(shape = (n_estimators))
errors[:] = np.NaN
predictions = np.empty(shape = (X_test.shape[0], n_estimators))
predictions[:] = np.NAN
for i in range(0,n_estimators):
predictions[:,i] = self.learner.estimators_[i].predict(X_test) + 1
most_common = mode(predictions[:,0:i+1], axis = 1)
preds_uptonow = [item for sublist in most_common[0] for item in sublist]
errors[i] = np.sum((preds_uptonow != Y_test))/float(len(Y_test))
precisions[i], recalls[i], _ , _ = precision_recall_fscore_support(Y_test, preds_uptonow, average='weighted')
# print predictions
return errors, precisions, recalls
def write_cv_results_toFile(self, scores, precision_scores, recall_scores, param_grid, result_path):
final_results_dict = {}
all_scores_dict = {'error' : scores, 'precision' : precision_scores, 'recall' : recall_scores}
for score in all_scores_dict:
avg_score = np.mean(all_scores_dict[score], axis=1)
std_score = np.std(all_scores_dict[score], axis=1)
if score == 'error':
opt_ind = np.unravel_index(np.argmin(avg_score), avg_score.shape)
else:
opt_ind = np.unravel_index(np.argmax(avg_score), avg_score.shape)
final_results_dict[score] = avg_score[opt_ind]
final_results_dict[score + '_std'] = std_score[opt_ind]
params_dict = {}
for element in self.grid_dictionary:
params_dict[element] = param_grid[opt_ind[0]][self.grid_dictionary[element]]
final_results_dict[score + '_params'] = params_dict
n_estimators = opt_ind[1] + 1
final_results_dict[score + '_params'].update(dict(n_trees = str(n_estimators)))
if not 'fe_params' in params_dict:
final_results_dict[score + '_params'].update(dict(fe_params = None))
final_results_dict['channel_type'] = self.config.channel_type
json.dump(final_results_dict, open(result_path,'w'))
def train_learner(self, X, Y, X_test = [], Y_test = [], learner_params = [] ,optimal = False):
""" """
if optimal:
self.train_learner_opt(X, Y, X_test, Y_test, learner_params)
else:
self.train_learner_cv(X, Y)
def train_learner_cv(self, X, Y, optimal = False):
assert self.result_path != ''
X = np.asarray( X, dtype=np.float32, order='F')
Y = np.asarray( Y, dtype=np.short, order='F')
scaler = StandardScaler()
X = scaler.fit_transform(X)
for i in range(self.config.configuration["number_of_cvs"]):
for param_ind in range(len(self.scores)):
print self.param_grid[param_ind]
m_feat = self.param_grid[param_ind][0]
m_dep = self.param_grid[param_ind][1]
# m_dens = self.param_grid[param_ind][2]
m_sam_leaf = self.param_grid[param_ind][2]
if self.method == 'classification':
self.learner = RandomForestClassifier(n_estimators = self.RF_size,
oob_score=True, n_jobs= self.n_jobs,
max_depth = m_dep, max_features = m_feat,
min_samples_leaf = m_sam_leaf, random_state=i)
elif self.method == 'regression':
self.learner = RandomForestRegressor(n_estimators = self.RF_size,
oob_score=True, n_jobs= self.n_jobs,
max_depth = m_dep, max_features = m_feat,
min_samples_leaf = m_sam_leaf, random_state=i)
# TODO: check regression error
# self.learner.fit(X, Y)
# oob_err = self.learner.oob_score_
# crossval_error = 1 - cross_val_score(self.learner, X, Y, cv=5).mean()
X_new, Y_new = shuffle(X, Y, random_state=i)
cv = StratifiedKFold(y=Y_new, n_folds=self.config.configuration["number_of_cv_folds"])
cv_errors_sum = np.zeros(self.RF_size)
for train_index, test_index in cv:
# print("TRAIN:", train_index, "TEST:", test_index)
X_train, X_test = X[train_index], X[test_index]
y_train, y_test = Y[train_index], Y[test_index]
self.learner.fit(X_train, y_train)
test_predictions = self.learner.predict(X_test)
cv_errors_sum += self.predict_forall_estimators(X_test, y_test)
crossval_error = cv_errors_sum/self.config.configuration["number_of_cv_folds"]
print 'error = ', crossval_error
# bb = self.__calc_OOB_inbag_scores( X, Y, self.learner)
# self.scores[param_ind, i] = oob_err
self.scores[param_ind, i, :] = crossval_error
avg_errs = np.mean(self.scores, axis=1)
min_ind = np.unravel_index(np.argmin(avg_errs), avg_errs.shape)
with open(self.result_path, 'w') as res_file:
print>>res_file, np.min(np.mean(self.scores, axis=1))
print>>res_file, dict(max_features = self.param_grid[min_ind[0]][0], max_depth = self.param_grid[min_ind[0]][1],
min_samples_leaf = self.param_grid[min_ind[0]][2], n_trees = min_ind[1])
print>>res_file, np.std(self.scores, axis=1)
print>>res_file, self.scores
def train_learner_opt(self, X, Y, X_test, Y_test, learner_params):
""" """
# Y_test = np.array(Y_test)
# self.logging.info('Standardizing data!')
# scaler = StandardScaler()
# X = scaler.fit_transform(X)
# X_test = scaler.transform(X_test)
# self.logging.info('X size is: %s and Y size is: %s and X_test size is: %s and Y_test size is: %s',
# '_'.join(map(str,X.shape)), str(len(Y)), '_'.join(map(str,X_test.shape)), str(len(Y_test)))
X, Y, X_test, Y_test = self.scale_all_data(X, Y, X_test, Y_test)
clf = self.learner_opt(n_estimators = learner_params["n_trees"], oob_score=True, n_jobs= self.n_jobs,
compute_importances = True, max_features = learner_params["max_features"],
max_depth = learner_params["max_depth"], min_samples_leaf = learner_params["min_samples_leaf"])
clf.fit(X, Y)
self.fit_opt_learner(X, Y, X_test, Y_test, clf)
# clf.fit(X, Y)
# Y_pred_train = clf.predict(X)
# self.logging.info('optimal Random Forest trained with penalty type: %s and parameters = %s!', self.my_loss.__class__, learner_params)
# self.logging.info('calculating inbag and oob scores!')
#
## oob_predictions, inbag_predictions, oob_scores, inbag_scores = self.__calc_OOB_inbag_scores(X, Y, clf)
#
# Y_pred = clf.predict(X_test)
# nonnan_indices = ~np.isnan(Y_test)
# error = self.my_loss(Y_test[nonnan_indices], Y_pred[nonnan_indices])
# self.logging.info('error is %s', str(error))
#
# with open(self.result_opt_path,'w') as res_file:
# res_file.write(str(error))
# res_file.write('\n')
# res_file.write(' '.join(map(str,Y_pred) + ['\n']))
# res_file.write(' '.join(map(str,Y_pred_train)))
# res_file.write('\n')
# res_file.write(str(learner_params))
# res_file.write('\n')
# res_file.write(str(oob_scores))
# res_file.write('\n')
# res_file.write(str(oob_scores))
# res_file.write('\n')
def __calc_OOB_inbag_scores(self, X, Y, clf):
class_labels = list(set(Y))
num_of_classes = len(class_labels)
oob_predictions = np.zeros([num_of_classes, len(Y)])
inbag_predictions = np.zeros([num_of_classes, len(Y)])
oob_scores = np.zeros(self.RF_size)
inbag_scores = np.zeros(self.RF_size)
indices = np.array(range(len(Y)))
for index in range(self.RF_size):
d3 = clf.estimators_[index]
current_tree_predictions = d3.predict(X)
oob_predictions = self.__calc_current_tree_prediction(oob_predictions, current_tree_predictions,
~d3.indices_, indices, class_labels) ## OOB scores
inbag_predictions = self.__calc_current_tree_prediction(inbag_predictions, current_tree_predictions,
d3.indices_, indices, class_labels) ## inbag scores
oob_scores[index] = self.__calc_scores(oob_predictions,Y) ## calc oob scores
inbag_scores[index] = self.__calc_scores(inbag_predictions,Y) ## calc oob scores
return oob_predictions, inbag_predictions, oob_scores, inbag_scores
def __calc_scores(self, given_predictions, Y):
class_labels = list(set(Y))
all_predictions = np.argmax(given_predictions,0)
if len(class_labels) == 3:
all_predictions -= 1 # TODO: why is it like this?
elif len(class_labels) > 3:
sys.exit('Error! __calc_scores forest')
number_of_matches = (all_predictions == Y).tolist().count(True)
return float(number_of_matches)/len(Y)
def __calc_current_tree_prediction(self, sample_predictions, current_tree_predictions, given_indices, all_indices, class_labels):
current_samples_predictions = current_tree_predictions[given_indices]
current_samples_indices = all_indices[given_indices]
for index in range(len(class_labels)):
label = class_labels[index]
sample_predictions[index,current_samples_indices[current_samples_predictions == label]] += 1
return sample_predictions
|
pyzos/pyzos | refs/heads/master | pyzos/zos_obj_override/ilderow_methods.py | 1 | # -*- coding: utf-8 -*-
#-------------------------------------------------------------------------------
# Name: ilderow_methods.py
# Purpose: store custom methods for wrapper class of ILDERow Interface
# Licence: MIT License
#-------------------------------------------------------------------------------
"""Store custom methods for wrapper class of ILDERow Interface, which contains
all data for a LDE surface. This interface can be accessed via the
ILensDataEditor interface.
name := repr(zos_obj).split()[0].split('.')[-1].lower() + '_methods.py'
"""
from __future__ import print_function
from __future__ import division
from win32com.client import CastTo as _CastTo, constants as _constants
from pyzos.zosutils import wrapped_zos_object as _wrapped_zos_object
|
marios-zindilis/musicbrainz-django-models | refs/heads/master | musicbrainz_django_models/tests/test_model_meta.py | 1 | """
Tests for models that subclass `abstract__model_meta`.
"""
from django.test import TestCase
from django.core.exceptions import ValidationError
from ..models import artist_meta
from ..models import event_meta
from ..models import label_meta
from ..models import recording_meta
from ..models import work_meta
class test_model_meta_mixin(object):
def test__model_meta__rating_min_value(self):
self.subject.rating = self.subject.RATING_MIN - 1
with self.assertRaises(ValidationError):
self.subject.save()
def test__model_meta__rating_max_value(self):
self.subject.rating = self.subject.RATING_MAX + 1
with self.assertRaises(ValidationError):
self.subject.save()
class test_artist_meta(TestCase, test_model_meta_mixin):
"""
Tests for the `artist_meta` model.
"""
def setUp(self):
self.subject = artist_meta()
def test__artist_meta__instance(self):
self.assertIsInstance(self.subject, artist_meta)
def test__artist_meta__str(self):
self.assertEqual(str(self.subject), 'Artist Meta')
class test_event_meta(TestCase, test_model_meta_mixin):
"""
Tests for the `event_meta` model.
"""
def setUp(self):
self.subject = event_meta()
def test__event_meta__instance(self):
self.assertIsInstance(self.subject, event_meta)
def test__event_meta__str(self):
self.assertEqual(str(self.subject), 'Event Meta')
class test_label_meta(TestCase, test_model_meta_mixin):
"""
Tests for the `label_meta` model.
"""
def setUp(self):
self.subject = label_meta()
def test__label_meta__instance(self):
self.assertIsInstance(self.subject, label_meta)
def test__label_meta__str(self):
self.assertEqual(str(self.subject), 'Label Meta')
class test_recording_meta(TestCase, test_model_meta_mixin):
"""
Tests for the `recording_meta` model.
"""
def setUp(self):
self.subject = recording_meta()
def test__recording_meta__instance(self):
self.assertIsInstance(self.subject, recording_meta)
def test__recording_meta__str(self):
self.assertEqual(str(self.subject), 'Recording Meta')
class test_work_meta(TestCase, test_model_meta_mixin):
"""
Tests for the `work_meta` model.
"""
def setUp(self):
self.subject = work_meta()
def test__work_meta__instance(self):
self.assertIsInstance(self.subject, work_meta)
def test__work_meta__str(self):
self.assertEqual(str(self.subject), 'Work Meta')
|
LinDA-tools/TransformationTool | refs/heads/master | config/urls.py | 1 | from django.conf.urls import patterns, include, url
from django.contrib import admin
urlpatterns = patterns('',
url(r'^transformation/', include('transformation.urls')),
url(r'^admin/', include(admin.site.urls)),
)
|
seksan2538/schedule-generator | refs/heads/master | xlwt/examples/col_width.py | 25 | #!/usr/bin/env python
# -*- coding: windows-1251 -*-
# Copyright (C) 2005 Kiseliov Roman
__rev_id__ = """$Id$"""
from xlwt import *
w = Workbook()
ws = w.add_sheet('Hey, Dude')
for i in range(6, 80):
fnt = Font()
fnt.height = i*20
style = XFStyle()
style.font = fnt
ws.write(1, i, 'Test')
ws.col(i).width = 0x0d00 + i
w.save('col_width.xls')
|
kenshay/ImageScript | refs/heads/master | ProgramData/SystemFiles/Python/Lib/site-packages/PyQt4/examples/widgets/tooltips/tooltips_rc2.py | 5 | # -*- coding: utf-8 -*-
# Resource object code
#
# Created: Wed Mar 20 13:52:27 2013
# by: The Resource Compiler for PyQt (Qt v4.8.4)
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore
qt_resource_data = "\
\x00\x00\x00\xaa\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x20\x00\x00\x00\x20\x08\x06\x00\x00\x00\x73\x7a\x7a\xf4\
\x00\x00\x00\x71\x49\x44\x41\x54\x58\xc3\xed\xce\x4b\x0a\x80\x30\
\x10\x04\xd1\x1c\xd3\x23\x7a\xcb\x11\x82\xb8\x50\x62\x92\xf9\xd5\
\x66\x1a\x7a\x5d\xaf\xb5\x5a\xcd\x36\xb9\xcf\xc4\x8f\x53\xfa\x09\
\xc4\x13\xa7\x10\x28\xe0\x13\xcf\x44\x0c\xe3\x59\x08\x14\x30\x8d\
\x47\x23\x50\xc0\x72\x3c\x02\xb1\x1d\xf7\x46\xa0\x00\x75\xdc\x03\
\x61\x8e\x5b\x11\x28\xc0\x2d\xae\x45\xa0\x00\xf7\xf8\x0e\x22\x2c\
\xbe\x8a\x40\x01\xe1\xf1\x3f\x44\x5a\x7c\x84\x40\x01\xe9\xf1\x37\
\x42\xe0\xd7\xd8\x5d\x0f\x6f\x97\x11\x88\x38\xa9\x1e\x00\x00\x00\
\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x00\x5e\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x20\x00\x00\x00\x20\x01\x03\x00\x00\x00\x49\xb4\xe8\xb7\
\x00\x00\x00\x06\x50\x4c\x54\x45\x00\x00\x00\x58\xa8\xff\x8c\x14\
\x1f\xab\x00\x00\x00\x13\x49\x44\x41\x54\x08\xd7\x63\x60\x00\x81\
\xfa\xff\xff\xff\x0d\x3e\x02\x04\x00\x8d\x4d\x68\x6b\xcf\xb8\x8e\
\x86\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x00\xa5\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x20\x00\x00\x00\x20\x08\x06\x00\x00\x00\x73\x7a\x7a\xf4\
\x00\x00\x00\x6c\x49\x44\x41\x54\x58\xc3\xed\xd7\x5b\x0e\x00\x10\
\x0c\x44\x51\xcb\xb4\x44\xbb\x64\x03\x1e\xd5\x18\x1d\x31\x12\xdf\
\xf7\x7c\xd1\xa6\xf4\xe8\xa9\x93\x8b\x8f\xe6\x52\x87\x17\x81\x59\
\x46\x0d\x18\x7f\xdc\x13\x1e\x40\x62\xe2\x5e\xc4\xd1\xf8\x2e\x02\
\x12\xb7\x22\xa0\x71\x0b\x22\x14\x70\x25\x3e\x43\xfc\x0d\xb8\x1a\
\xef\x21\x04\x10\x40\x00\x3d\x44\x14\x00\x7d\xc7\x14\x13\x11\xc5\
\x4c\x48\x31\x15\x53\xec\x05\x14\x9b\x11\xc5\x6e\x08\xdd\x8e\x1b\
\x14\x54\x19\xf3\xa1\x23\xdb\xd5\x00\x00\x00\x00\x49\x45\x4e\x44\
\xae\x42\x60\x82\
"
qt_resource_name = "\
\x00\x06\
\x07\x03\x7d\xc3\
\x00\x69\
\x00\x6d\x00\x61\x00\x67\x00\x65\x00\x73\
\x00\x0c\
\x05\x59\xa7\xc7\
\x00\x74\
\x00\x72\x00\x69\x00\x61\x00\x6e\x00\x67\x00\x6c\x00\x65\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x0a\
\x08\x8b\x06\x27\
\x00\x73\
\x00\x71\x00\x75\x00\x61\x00\x72\x00\x65\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x0a\
\x0a\x2d\x16\x47\
\x00\x63\
\x00\x69\x00\x72\x00\x63\x00\x6c\x00\x65\x00\x2e\x00\x70\x00\x6e\x00\x67\
"
qt_resource_struct = "\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x03\x00\x00\x00\x02\
\x00\x00\x00\x12\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\
\x00\x00\x00\x30\x00\x00\x00\x00\x00\x01\x00\x00\x00\xae\
\x00\x00\x00\x4a\x00\x00\x00\x00\x00\x01\x00\x00\x01\x10\
"
def qInitResources():
QtCore.qRegisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data)
def qCleanupResources():
QtCore.qUnregisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data)
qInitResources()
|
bschuon/django-oscar | refs/heads/master | src/oscar/apps/promotions/migrations/0002_auto_20150604_1450.py | 50 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('promotions', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='handpickedproductlist',
name='products',
field=models.ManyToManyField(to='catalogue.Product', verbose_name='Products', through='promotions.OrderedProduct', blank=True),
),
migrations.AlterField(
model_name='multiimage',
name='images',
field=models.ManyToManyField(help_text='Choose the Image content blocks that this block will use. (You may need to create some first).', to='promotions.Image', blank=True),
),
]
|
bzero/bitex | refs/heads/master | libs/characters/characters/charset.py | 11 | # -*- coding: utf-8 -*-
"""
Characters
~~~~~
:copyright: (c) 2014 by Halfmoon Labs
:license: MIT, see LICENSE for more details.
"""
import re
import string
def int_to_charset(val, charset):
""" Turn a non-negative integer into a string.
"""
if not val >= 0:
raise ValueError('"val" must be a non-negative integer.')
if val == 0: return charset[0]
output = ""
while val > 0:
val, digit = divmod(val, len(charset))
output += charset[digit]
# reverse the characters in the output and return
return output[::-1]
def charset_to_int(s, charset):
""" Turn a string into a non-negative integer.
"""
output = 0
for char in s:
output = output * len(charset) + charset.index(char)
return output
def change_charset(s, original_charset, target_charset):
""" Convert a string from one charset to another.
"""
if not isinstance(s, str):
raise ValueError('"s" must be a string.')
intermediate_integer = charset_to_int(s, original_charset)
output_string = int_to_charset(intermediate_integer, target_charset)
return output_string
|
cyberden/CouchPotatoServer | refs/heads/develop | libs/oauthlib/oauth1/rfc5849/__init__.py | 112 | # -*- coding: utf-8 -*-
from __future__ import absolute_import
"""
oauthlib.oauth1.rfc5849
~~~~~~~~~~~~~~
This module is an implementation of various logic needed
for signing and checking OAuth 1.0 RFC 5849 requests.
"""
import logging
import urlparse
from oauthlib.common import Request, urlencode
from . import parameters, signature, utils
SIGNATURE_HMAC = u"HMAC-SHA1"
SIGNATURE_RSA = u"RSA-SHA1"
SIGNATURE_PLAINTEXT = u"PLAINTEXT"
SIGNATURE_METHODS = (SIGNATURE_HMAC, SIGNATURE_RSA, SIGNATURE_PLAINTEXT)
SIGNATURE_TYPE_AUTH_HEADER = u'AUTH_HEADER'
SIGNATURE_TYPE_QUERY = u'QUERY'
SIGNATURE_TYPE_BODY = u'BODY'
CONTENT_TYPE_FORM_URLENCODED = u'application/x-www-form-urlencoded'
class Client(object):
"""A client used to sign OAuth 1.0 RFC 5849 requests"""
def __init__(self, client_key,
client_secret=None,
resource_owner_key=None,
resource_owner_secret=None,
callback_uri=None,
signature_method=SIGNATURE_HMAC,
signature_type=SIGNATURE_TYPE_AUTH_HEADER,
rsa_key=None, verifier=None):
self.client_key = client_key
self.client_secret = client_secret
self.resource_owner_key = resource_owner_key
self.resource_owner_secret = resource_owner_secret
self.signature_method = signature_method
self.signature_type = signature_type
self.callback_uri = callback_uri
self.rsa_key = rsa_key
self.verifier = verifier
if self.signature_method == SIGNATURE_RSA and self.rsa_key is None:
raise ValueError('rsa_key is required when using RSA signature method.')
def get_oauth_signature(self, request):
"""Get an OAuth signature to be used in signing a request
"""
if self.signature_method == SIGNATURE_PLAINTEXT:
# fast-path
return signature.sign_plaintext(self.client_secret,
self.resource_owner_secret)
uri, headers, body = self._render(request)
collected_params = signature.collect_parameters(
uri_query=urlparse.urlparse(uri).query,
body=body,
headers=headers)
logging.debug("Collected params: {0}".format(collected_params))
normalized_params = signature.normalize_parameters(collected_params)
normalized_uri = signature.normalize_base_string_uri(request.uri)
logging.debug("Normalized params: {0}".format(normalized_params))
logging.debug("Normalized URI: {0}".format(normalized_uri))
base_string = signature.construct_base_string(request.http_method,
normalized_uri, normalized_params)
logging.debug("Base signing string: {0}".format(base_string))
if self.signature_method == SIGNATURE_HMAC:
sig = signature.sign_hmac_sha1(base_string, self.client_secret,
self.resource_owner_secret)
elif self.signature_method == SIGNATURE_RSA:
sig = signature.sign_rsa_sha1(base_string, self.rsa_key)
else:
sig = signature.sign_plaintext(self.client_secret,
self.resource_owner_secret)
logging.debug("Signature: {0}".format(sig))
return sig
def get_oauth_params(self):
"""Get the basic OAuth parameters to be used in generating a signature.
"""
params = [
(u'oauth_nonce', utils.generate_nonce()),
(u'oauth_timestamp', utils.generate_timestamp()),
(u'oauth_version', u'1.0'),
(u'oauth_signature_method', self.signature_method),
(u'oauth_consumer_key', self.client_key),
]
if self.resource_owner_key:
params.append((u'oauth_token', self.resource_owner_key))
if self.callback_uri:
params.append((u'oauth_callback', self.callback_uri))
if self.verifier:
params.append((u'oauth_verifier', self.verifier))
return params
def _render(self, request, formencode=False):
"""Render a signed request according to signature type
Returns a 3-tuple containing the request URI, headers, and body.
If the formencode argument is True and the body contains parameters, it
is escaped and returned as a valid formencoded string.
"""
# TODO what if there are body params on a header-type auth?
# TODO what if there are query params on a body-type auth?
uri, headers, body = request.uri, request.headers, request.body
# TODO: right now these prepare_* methods are very narrow in scope--they
# only affect their little thing. In some cases (for example, with
# header auth) it might be advantageous to allow these methods to touch
# other parts of the request, like the headers—so the prepare_headers
# method could also set the Content-Type header to x-www-form-urlencoded
# like the spec requires. This would be a fundamental change though, and
# I'm not sure how I feel about it.
if self.signature_type == SIGNATURE_TYPE_AUTH_HEADER:
headers = parameters.prepare_headers(request.oauth_params, request.headers)
elif self.signature_type == SIGNATURE_TYPE_BODY and request.decoded_body is not None:
body = parameters.prepare_form_encoded_body(request.oauth_params, request.decoded_body)
if formencode:
body = urlencode(body)
headers['Content-Type'] = u'application/x-www-form-urlencoded'
elif self.signature_type == SIGNATURE_TYPE_QUERY:
uri = parameters.prepare_request_uri_query(request.oauth_params, request.uri)
else:
raise ValueError('Unknown signature type specified.')
return uri, headers, body
def sign(self, uri, http_method=u'GET', body=None, headers=None):
"""Sign a request
Signs an HTTP request with the specified parts.
Returns a 3-tuple of the signed request's URI, headers, and body.
Note that http_method is not returned as it is unaffected by the OAuth
signing process.
The body argument may be a dict, a list of 2-tuples, or a formencoded
string. The Content-Type header must be 'application/x-www-form-urlencoded'
if it is present.
If the body argument is not one of the above, it will be returned
verbatim as it is unaffected by the OAuth signing process. Attempting to
sign a request with non-formencoded data using the OAuth body signature
type is invalid and will raise an exception.
If the body does contain parameters, it will be returned as a properly-
formatted formencoded string.
All string data MUST be unicode. This includes strings inside body
dicts, for example.
"""
# normalize request data
request = Request(uri, http_method, body, headers)
# sanity check
content_type = request.headers.get('Content-Type', None)
multipart = content_type and content_type.startswith('multipart/')
should_have_params = content_type == CONTENT_TYPE_FORM_URLENCODED
has_params = request.decoded_body is not None
# 3.4.1.3.1. Parameter Sources
# [Parameters are collected from the HTTP request entity-body, but only
# if [...]:
# * The entity-body is single-part.
if multipart and has_params:
raise ValueError("Headers indicate a multipart body but body contains parameters.")
# * The entity-body follows the encoding requirements of the
# "application/x-www-form-urlencoded" content-type as defined by
# [W3C.REC-html40-19980424].
elif should_have_params and not has_params:
raise ValueError("Headers indicate a formencoded body but body was not decodable.")
# * The HTTP request entity-header includes the "Content-Type"
# header field set to "application/x-www-form-urlencoded".
elif not should_have_params and has_params:
raise ValueError("Body contains parameters but Content-Type header was not set.")
# 3.5.2. Form-Encoded Body
# Protocol parameters can be transmitted in the HTTP request entity-
# body, but only if the following REQUIRED conditions are met:
# o The entity-body is single-part.
# o The entity-body follows the encoding requirements of the
# "application/x-www-form-urlencoded" content-type as defined by
# [W3C.REC-html40-19980424].
# o The HTTP request entity-header includes the "Content-Type" header
# field set to "application/x-www-form-urlencoded".
elif self.signature_type == SIGNATURE_TYPE_BODY and not (
should_have_params and has_params and not multipart):
raise ValueError('Body signatures may only be used with form-urlencoded content')
# generate the basic OAuth parameters
request.oauth_params = self.get_oauth_params()
# generate the signature
request.oauth_params.append((u'oauth_signature', self.get_oauth_signature(request)))
# render the signed request and return it
return self._render(request, formencode=True)
class Server(object):
"""A server used to verify OAuth 1.0 RFC 5849 requests"""
def __init__(self, signature_method=SIGNATURE_HMAC, rsa_key=None):
self.signature_method = signature_method
self.rsa_key = rsa_key
def get_client_secret(self, client_key):
raise NotImplementedError("Subclasses must implement this function.")
def get_resource_owner_secret(self, resource_owner_key):
raise NotImplementedError("Subclasses must implement this function.")
def get_signature_type_and_params(self, uri_query, headers, body):
signature_types_with_oauth_params = filter(lambda s: s[1], (
(SIGNATURE_TYPE_AUTH_HEADER, utils.filter_oauth_params(
signature.collect_parameters(headers=headers,
exclude_oauth_signature=False))),
(SIGNATURE_TYPE_BODY, utils.filter_oauth_params(
signature.collect_parameters(body=body,
exclude_oauth_signature=False))),
(SIGNATURE_TYPE_QUERY, utils.filter_oauth_params(
signature.collect_parameters(uri_query=uri_query,
exclude_oauth_signature=False))),
))
if len(signature_types_with_oauth_params) > 1:
raise ValueError('oauth_ params must come from only 1 signature type but were found in %s' % ', '.join(
[s[0] for s in signature_types_with_oauth_params]))
try:
signature_type, params = signature_types_with_oauth_params[0]
except IndexError:
raise ValueError('oauth_ params are missing. Could not determine signature type.')
return signature_type, dict(params)
def check_client_key(self, client_key):
raise NotImplementedError("Subclasses must implement this function.")
def check_resource_owner_key(self, client_key, resource_owner_key):
raise NotImplementedError("Subclasses must implement this function.")
def check_timestamp_and_nonce(self, timestamp, nonce):
raise NotImplementedError("Subclasses must implement this function.")
def check_request_signature(self, uri, http_method=u'GET', body='',
headers=None):
"""Check a request's supplied signature to make sure the request is
valid.
Servers should return HTTP status 400 if a ValueError exception
is raised and HTTP status 401 on return value False.
Per `section 3.2`_ of the spec.
.. _`section 3.2`: http://tools.ietf.org/html/rfc5849#section-3.2
"""
headers = headers or {}
signature_type = None
# FIXME: urlparse does not return unicode!
uri_query = urlparse.urlparse(uri).query
signature_type, params = self.get_signature_type_and_params(uri_query,
headers, body)
# the parameters may not include duplicate oauth entries
filtered_params = utils.filter_oauth_params(params)
if len(filtered_params) != len(params):
raise ValueError("Duplicate OAuth entries.")
params = dict(params)
request_signature = params.get(u'oauth_signature')
client_key = params.get(u'oauth_consumer_key')
resource_owner_key = params.get(u'oauth_token')
nonce = params.get(u'oauth_nonce')
timestamp = params.get(u'oauth_timestamp')
callback_uri = params.get(u'oauth_callback')
verifier = params.get(u'oauth_verifier')
signature_method = params.get(u'oauth_signature_method')
# ensure all mandatory parameters are present
if not all((request_signature, client_key, nonce,
timestamp, signature_method)):
raise ValueError("Missing OAuth parameters.")
# if version is supplied, it must be "1.0"
if u'oauth_version' in params and params[u'oauth_version'] != u'1.0':
raise ValueError("Invalid OAuth version.")
# signature method must be valid
if not signature_method in SIGNATURE_METHODS:
raise ValueError("Invalid signature method.")
# ensure client key is valid
if not self.check_client_key(client_key):
return False
# ensure resource owner key is valid and not expired
if not self.check_resource_owner_key(client_key, resource_owner_key):
return False
# ensure the nonce and timestamp haven't been used before
if not self.check_timestamp_and_nonce(timestamp, nonce):
return False
# FIXME: extract realm, then self.check_realm
# oauth_client parameters depend on client chosen signature method
# which may vary for each request, section 3.4
# HMAC-SHA1 and PLAINTEXT share parameters
if signature_method == SIGNATURE_RSA:
oauth_client = Client(client_key,
resource_owner_key=resource_owner_key,
callback_uri=callback_uri,
signature_method=signature_method,
signature_type=signature_type,
rsa_key=self.rsa_key, verifier=verifier)
else:
client_secret = self.get_client_secret(client_key)
resource_owner_secret = self.get_resource_owner_secret(
resource_owner_key)
oauth_client = Client(client_key,
client_secret=client_secret,
resource_owner_key=resource_owner_key,
resource_owner_secret=resource_owner_secret,
callback_uri=callback_uri,
signature_method=signature_method,
signature_type=signature_type,
verifier=verifier)
request = Request(uri, http_method, body, headers)
request.oauth_params = params
client_signature = oauth_client.get_oauth_signature(request)
# FIXME: use near constant time string compare to avoid timing attacks
return client_signature == request_signature
|
vitan/django | refs/heads/master | tests/model_forms/tests.py | 9 | from __future__ import unicode_literals
import datetime
import os
from decimal import Decimal
from unittest import skipUnless
from django import forms
from django.core.exceptions import (
NON_FIELD_ERRORS, FieldError, ImproperlyConfigured,
)
from django.core.files.uploadedfile import SimpleUploadedFile
from django.core.validators import ValidationError
from django.db import connection, models
from django.db.models.query import EmptyQuerySet
from django.forms.models import (
ModelFormMetaclass, construct_instance, fields_for_model, model_to_dict,
modelform_factory,
)
from django.template import Context, Template
from django.test import TestCase, skipUnlessDBFeature
from django.utils import six
from django.utils._os import upath
from .models import (
Article, ArticleStatus, Author, Author1, BetterWriter, BigInt, Book,
Category, Character, Colour, ColourfulItem, CommaSeparatedInteger,
CustomErrorMessage, CustomFF, CustomFieldForExclusionModel, DateTimePost,
DerivedBook, DerivedPost, Document, ExplicitPK, FilePathModel,
FlexibleDatePost, Homepage, ImprovedArticle, ImprovedArticleWithParentLink,
Inventory, Person, Photo, Post, Price, Product, Publication,
PublicationDefaults, Student, StumpJoke, TextFile, Triple, Writer,
WriterProfile, test_images,
)
if test_images:
from .models import ImageFile, OptionalImageFile
class ImageFileForm(forms.ModelForm):
class Meta:
model = ImageFile
fields = '__all__'
class OptionalImageFileForm(forms.ModelForm):
class Meta:
model = OptionalImageFile
fields = '__all__'
class ProductForm(forms.ModelForm):
class Meta:
model = Product
fields = '__all__'
class PriceForm(forms.ModelForm):
class Meta:
model = Price
fields = '__all__'
class BookForm(forms.ModelForm):
class Meta:
model = Book
fields = '__all__'
class DerivedBookForm(forms.ModelForm):
class Meta:
model = DerivedBook
fields = '__all__'
class ExplicitPKForm(forms.ModelForm):
class Meta:
model = ExplicitPK
fields = ('key', 'desc',)
class PostForm(forms.ModelForm):
class Meta:
model = Post
fields = '__all__'
class DerivedPostForm(forms.ModelForm):
class Meta:
model = DerivedPost
fields = '__all__'
class CustomWriterForm(forms.ModelForm):
name = forms.CharField(required=False)
class Meta:
model = Writer
fields = '__all__'
class BaseCategoryForm(forms.ModelForm):
class Meta:
model = Category
fields = '__all__'
class ArticleForm(forms.ModelForm):
class Meta:
model = Article
fields = '__all__'
class RoykoForm(forms.ModelForm):
class Meta:
model = Writer
fields = '__all__'
class ArticleStatusForm(forms.ModelForm):
class Meta:
model = ArticleStatus
fields = '__all__'
class InventoryForm(forms.ModelForm):
class Meta:
model = Inventory
fields = '__all__'
class SelectInventoryForm(forms.Form):
items = forms.ModelMultipleChoiceField(Inventory.objects.all(), to_field_name='barcode')
class CustomFieldForExclusionForm(forms.ModelForm):
class Meta:
model = CustomFieldForExclusionModel
fields = ['name', 'markup']
class TextFileForm(forms.ModelForm):
class Meta:
model = TextFile
fields = '__all__'
class BigIntForm(forms.ModelForm):
class Meta:
model = BigInt
fields = '__all__'
class ModelFormWithMedia(forms.ModelForm):
class Media:
js = ('/some/form/javascript',)
css = {
'all': ('/some/form/css',)
}
class Meta:
model = TextFile
fields = '__all__'
class CustomErrorMessageForm(forms.ModelForm):
name1 = forms.CharField(error_messages={'invalid': 'Form custom error message.'})
class Meta:
fields = '__all__'
model = CustomErrorMessage
class ModelFormBaseTest(TestCase):
def test_base_form(self):
self.assertEqual(list(BaseCategoryForm.base_fields),
['name', 'slug', 'url'])
def test_no_model_class(self):
class NoModelModelForm(forms.ModelForm):
pass
self.assertRaises(ValueError, NoModelModelForm)
def test_empty_fields_to_fields_for_model(self):
"""
An argument of fields=() to fields_for_model should return an empty dictionary
"""
field_dict = fields_for_model(Person, fields=())
self.assertEqual(len(field_dict), 0)
def test_empty_fields_on_modelform(self):
"""
No fields on a ModelForm should actually result in no fields.
"""
class EmptyPersonForm(forms.ModelForm):
class Meta:
model = Person
fields = ()
form = EmptyPersonForm()
self.assertEqual(len(form.fields), 0)
def test_empty_fields_to_construct_instance(self):
"""
No fields should be set on a model instance if construct_instance receives fields=().
"""
form = modelform_factory(Person, fields="__all__")({'name': 'John Doe'})
self.assertTrue(form.is_valid())
instance = construct_instance(form, Person(), fields=())
self.assertEqual(instance.name, '')
def test_blank_with_null_foreign_key_field(self):
"""
#13776 -- ModelForm's with models having a FK set to null=False and
required=False should be valid.
"""
class FormForTestingIsValid(forms.ModelForm):
class Meta:
model = Student
fields = '__all__'
def __init__(self, *args, **kwargs):
super(FormForTestingIsValid, self).__init__(*args, **kwargs)
self.fields['character'].required = False
char = Character.objects.create(username='user',
last_action=datetime.datetime.today())
data = {'study': 'Engineering'}
data2 = {'study': 'Engineering', 'character': char.pk}
# form is valid because required=False for field 'character'
f1 = FormForTestingIsValid(data)
self.assertTrue(f1.is_valid())
f2 = FormForTestingIsValid(data2)
self.assertTrue(f2.is_valid())
obj = f2.save()
self.assertEqual(obj.character, char)
def test_missing_fields_attribute(self):
message = (
"Creating a ModelForm without either the 'fields' attribute "
"or the 'exclude' attribute is prohibited; form "
"MissingFieldsForm needs updating."
)
with self.assertRaisesMessage(ImproperlyConfigured, message):
class MissingFieldsForm(forms.ModelForm):
class Meta:
model = Category
def test_extra_fields(self):
class ExtraFields(BaseCategoryForm):
some_extra_field = forms.BooleanField()
self.assertEqual(list(ExtraFields.base_fields),
['name', 'slug', 'url', 'some_extra_field'])
def test_extra_field_model_form(self):
try:
class ExtraPersonForm(forms.ModelForm):
""" ModelForm with an extra field """
age = forms.IntegerField()
class Meta:
model = Person
fields = ('name', 'no-field')
except FieldError as e:
# Make sure the exception contains some reference to the
# field responsible for the problem.
self.assertIn('no-field', e.args[0])
else:
self.fail('Invalid "no-field" field not caught')
def test_extra_declared_field_model_form(self):
try:
class ExtraPersonForm(forms.ModelForm):
""" ModelForm with an extra field """
age = forms.IntegerField()
class Meta:
model = Person
fields = ('name', 'age')
except FieldError:
self.fail('Declarative field raised FieldError incorrectly')
def test_extra_field_modelform_factory(self):
self.assertRaises(FieldError, modelform_factory,
Person, fields=['no-field', 'name'])
def test_replace_field(self):
class ReplaceField(forms.ModelForm):
url = forms.BooleanField()
class Meta:
model = Category
fields = '__all__'
self.assertIsInstance(ReplaceField.base_fields['url'],
forms.fields.BooleanField)
def test_replace_field_variant_2(self):
# Should have the same result as before,
# but 'fields' attribute specified differently
class ReplaceField(forms.ModelForm):
url = forms.BooleanField()
class Meta:
model = Category
fields = ['url']
self.assertIsInstance(ReplaceField.base_fields['url'],
forms.fields.BooleanField)
def test_replace_field_variant_3(self):
# Should have the same result as before,
# but 'fields' attribute specified differently
class ReplaceField(forms.ModelForm):
url = forms.BooleanField()
class Meta:
model = Category
fields = [] # url will still appear, since it is explicit above
self.assertIsInstance(ReplaceField.base_fields['url'],
forms.fields.BooleanField)
def test_override_field(self):
class WriterForm(forms.ModelForm):
book = forms.CharField(required=False)
class Meta:
model = Writer
fields = '__all__'
wf = WriterForm({'name': 'Richard Lockridge'})
self.assertTrue(wf.is_valid())
def test_limit_nonexistent_field(self):
expected_msg = 'Unknown field(s) (nonexistent) specified for Category'
with self.assertRaisesMessage(FieldError, expected_msg):
class InvalidCategoryForm(forms.ModelForm):
class Meta:
model = Category
fields = ['nonexistent']
def test_limit_fields_with_string(self):
expected_msg = "CategoryForm.Meta.fields cannot be a string. Did you mean to type: ('url',)?"
with self.assertRaisesMessage(TypeError, expected_msg):
class CategoryForm(forms.ModelForm):
class Meta:
model = Category
fields = ('url') # note the missing comma
def test_exclude_fields(self):
class ExcludeFields(forms.ModelForm):
class Meta:
model = Category
exclude = ['url']
self.assertEqual(list(ExcludeFields.base_fields),
['name', 'slug'])
def test_exclude_nonexistent_field(self):
class ExcludeFields(forms.ModelForm):
class Meta:
model = Category
exclude = ['nonexistent']
self.assertEqual(list(ExcludeFields.base_fields),
['name', 'slug', 'url'])
def test_exclude_fields_with_string(self):
expected_msg = "CategoryForm.Meta.exclude cannot be a string. Did you mean to type: ('url',)?"
with self.assertRaisesMessage(TypeError, expected_msg):
class CategoryForm(forms.ModelForm):
class Meta:
model = Category
exclude = ('url') # note the missing comma
def test_exclude_and_validation(self):
# This Price instance generated by this form is not valid because the quantity
# field is required, but the form is valid because the field is excluded from
# the form. This is for backwards compatibility.
class PriceFormWithoutQuantity(forms.ModelForm):
class Meta:
model = Price
exclude = ('quantity',)
form = PriceFormWithoutQuantity({'price': '6.00'})
self.assertTrue(form.is_valid())
price = form.save(commit=False)
with self.assertRaises(ValidationError):
price.full_clean()
# The form should not validate fields that it doesn't contain even if they are
# specified using 'fields', not 'exclude'.
class PriceFormWithoutQuantity(forms.ModelForm):
class Meta:
model = Price
fields = ('price',)
form = PriceFormWithoutQuantity({'price': '6.00'})
self.assertTrue(form.is_valid())
# The form should still have an instance of a model that is not complete and
# not saved into a DB yet.
self.assertEqual(form.instance.price, Decimal('6.00'))
self.assertIsNone(form.instance.quantity)
self.assertIsNone(form.instance.pk)
def test_confused_form(self):
class ConfusedForm(forms.ModelForm):
""" Using 'fields' *and* 'exclude'. Not sure why you'd want to do
this, but uh, "be liberal in what you accept" and all.
"""
class Meta:
model = Category
fields = ['name', 'url']
exclude = ['url']
self.assertEqual(list(ConfusedForm.base_fields),
['name'])
def test_mixmodel_form(self):
class MixModelForm(BaseCategoryForm):
""" Don't allow more than one 'model' definition in the
inheritance hierarchy. Technically, it would generate a valid
form, but the fact that the resulting save method won't deal with
multiple objects is likely to trip up people not familiar with the
mechanics.
"""
class Meta:
model = Article
fields = '__all__'
# MixModelForm is now an Article-related thing, because MixModelForm.Meta
# overrides BaseCategoryForm.Meta.
self.assertEqual(
list(MixModelForm.base_fields),
['headline', 'slug', 'pub_date', 'writer', 'article', 'categories', 'status']
)
def test_article_form(self):
self.assertEqual(
list(ArticleForm.base_fields),
['headline', 'slug', 'pub_date', 'writer', 'article', 'categories', 'status']
)
def test_bad_form(self):
# First class with a Meta class wins...
class BadForm(ArticleForm, BaseCategoryForm):
pass
self.assertEqual(
list(BadForm.base_fields),
['headline', 'slug', 'pub_date', 'writer', 'article', 'categories', 'status']
)
def test_invalid_meta_model(self):
class InvalidModelForm(forms.ModelForm):
class Meta:
pass # no model
# Can't create new form
with self.assertRaises(ValueError):
InvalidModelForm()
# Even if you provide a model instance
with self.assertRaises(ValueError):
InvalidModelForm(instance=Category)
def test_subcategory_form(self):
class SubCategoryForm(BaseCategoryForm):
""" Subclassing without specifying a Meta on the class will use
the parent's Meta (or the first parent in the MRO if there are
multiple parent classes).
"""
pass
self.assertEqual(list(SubCategoryForm.base_fields),
['name', 'slug', 'url'])
def test_subclassmeta_form(self):
class SomeCategoryForm(forms.ModelForm):
checkbox = forms.BooleanField()
class Meta:
model = Category
fields = '__all__'
class SubclassMeta(SomeCategoryForm):
""" We can also subclass the Meta inner class to change the fields
list.
"""
class Meta(SomeCategoryForm.Meta):
exclude = ['url']
self.assertHTMLEqual(
str(SubclassMeta()),
"""<tr><th><label for="id_name">Name:</label></th><td><input id="id_name" type="text" name="name" maxlength="20" /></td></tr>
<tr><th><label for="id_slug">Slug:</label></th><td><input id="id_slug" type="text" name="slug" maxlength="20" /></td></tr>
<tr><th><label for="id_checkbox">Checkbox:</label></th><td><input type="checkbox" name="checkbox" id="id_checkbox" /></td></tr>"""
)
def test_orderfields_form(self):
class OrderFields(forms.ModelForm):
class Meta:
model = Category
fields = ['url', 'name']
self.assertEqual(list(OrderFields.base_fields),
['url', 'name'])
self.assertHTMLEqual(
str(OrderFields()),
"""<tr><th><label for="id_url">The URL:</label></th><td><input id="id_url" type="text" name="url" maxlength="40" /></td></tr>
<tr><th><label for="id_name">Name:</label></th><td><input id="id_name" type="text" name="name" maxlength="20" /></td></tr>"""
)
def test_orderfields2_form(self):
class OrderFields2(forms.ModelForm):
class Meta:
model = Category
fields = ['slug', 'url', 'name']
exclude = ['url']
self.assertEqual(list(OrderFields2.base_fields),
['slug', 'name'])
class FieldOverridesByFormMetaForm(forms.ModelForm):
class Meta:
model = Category
fields = ['name', 'url', 'slug']
widgets = {
'name': forms.Textarea,
'url': forms.TextInput(attrs={'class': 'url'})
}
labels = {
'name': 'Title',
}
help_texts = {
'slug': 'Watch out! Letters, numbers, underscores and hyphens only.',
}
error_messages = {
'slug': {
'invalid': (
"Didn't you read the help text? "
"We said letters, numbers, underscores and hyphens only!"
)
}
}
field_classes = {
'url': forms.URLField,
}
class TestFieldOverridesByFormMeta(TestCase):
def test_widget_overrides(self):
form = FieldOverridesByFormMetaForm()
self.assertHTMLEqual(
str(form['name']),
'<textarea id="id_name" rows="10" cols="40" name="name" maxlength="20"></textarea>',
)
self.assertHTMLEqual(
str(form['url']),
'<input id="id_url" type="text" class="url" name="url" maxlength="40" />',
)
self.assertHTMLEqual(
str(form['slug']),
'<input id="id_slug" type="text" name="slug" maxlength="20" />',
)
def test_label_overrides(self):
form = FieldOverridesByFormMetaForm()
self.assertHTMLEqual(
str(form['name'].label_tag()),
'<label for="id_name">Title:</label>',
)
self.assertHTMLEqual(
str(form['url'].label_tag()),
'<label for="id_url">The URL:</label>',
)
self.assertHTMLEqual(
str(form['slug'].label_tag()),
'<label for="id_slug">Slug:</label>',
)
def test_help_text_overrides(self):
form = FieldOverridesByFormMetaForm()
self.assertEqual(
form['slug'].help_text,
'Watch out! Letters, numbers, underscores and hyphens only.',
)
def test_error_messages_overrides(self):
form = FieldOverridesByFormMetaForm(data={
'name': 'Category',
'url': 'http://www.example.com/category/',
'slug': '!%#*@',
})
form.full_clean()
error = [
"Didn't you read the help text? "
"We said letters, numbers, underscores and hyphens only!",
]
self.assertEqual(form.errors, {'slug': error})
def test_field_type_overrides(self):
form = FieldOverridesByFormMetaForm()
self.assertIs(Category._meta.get_field('url').__class__, models.CharField)
self.assertIsInstance(form.fields['url'], forms.URLField)
class IncompleteCategoryFormWithFields(forms.ModelForm):
"""
A form that replaces the model's url field with a custom one. This should
prevent the model field's validation from being called.
"""
url = forms.CharField(required=False)
class Meta:
fields = ('name', 'slug')
model = Category
class IncompleteCategoryFormWithExclude(forms.ModelForm):
"""
A form that replaces the model's url field with a custom one. This should
prevent the model field's validation from being called.
"""
url = forms.CharField(required=False)
class Meta:
exclude = ['url']
model = Category
class ValidationTest(TestCase):
def test_validates_with_replaced_field_not_specified(self):
form = IncompleteCategoryFormWithFields(data={'name': 'some name', 'slug': 'some-slug'})
assert form.is_valid()
def test_validates_with_replaced_field_excluded(self):
form = IncompleteCategoryFormWithExclude(data={'name': 'some name', 'slug': 'some-slug'})
assert form.is_valid()
def test_notrequired_overrides_notblank(self):
form = CustomWriterForm({})
assert form.is_valid()
class UniqueTest(TestCase):
"""
unique/unique_together validation.
"""
def setUp(self):
self.writer = Writer.objects.create(name='Mike Royko')
def test_simple_unique(self):
form = ProductForm({'slug': 'teddy-bear-blue'})
self.assertTrue(form.is_valid())
obj = form.save()
form = ProductForm({'slug': 'teddy-bear-blue'})
self.assertEqual(len(form.errors), 1)
self.assertEqual(form.errors['slug'], ['Product with this Slug already exists.'])
form = ProductForm({'slug': 'teddy-bear-blue'}, instance=obj)
self.assertTrue(form.is_valid())
def test_unique_together(self):
"""ModelForm test of unique_together constraint"""
form = PriceForm({'price': '6.00', 'quantity': '1'})
self.assertTrue(form.is_valid())
form.save()
form = PriceForm({'price': '6.00', 'quantity': '1'})
self.assertFalse(form.is_valid())
self.assertEqual(len(form.errors), 1)
self.assertEqual(form.errors['__all__'], ['Price with this Price and Quantity already exists.'])
def test_multiple_field_unique_together(self):
"""
When the same field is involved in multiple unique_together
constraints, we need to make sure we don't remove the data for it
before doing all the validation checking (not just failing after
the first one).
"""
class TripleForm(forms.ModelForm):
class Meta:
model = Triple
fields = '__all__'
Triple.objects.create(left=1, middle=2, right=3)
form = TripleForm({'left': '1', 'middle': '2', 'right': '3'})
self.assertFalse(form.is_valid())
form = TripleForm({'left': '1', 'middle': '3', 'right': '1'})
self.assertTrue(form.is_valid())
@skipUnlessDBFeature('supports_nullable_unique_constraints')
def test_unique_null(self):
title = 'I May Be Wrong But I Doubt It'
form = BookForm({'title': title, 'author': self.writer.pk})
self.assertTrue(form.is_valid())
form.save()
form = BookForm({'title': title, 'author': self.writer.pk})
self.assertFalse(form.is_valid())
self.assertEqual(len(form.errors), 1)
self.assertEqual(form.errors['__all__'], ['Book with this Title and Author already exists.'])
form = BookForm({'title': title})
self.assertTrue(form.is_valid())
form.save()
form = BookForm({'title': title})
self.assertTrue(form.is_valid())
def test_inherited_unique(self):
title = 'Boss'
Book.objects.create(title=title, author=self.writer, special_id=1)
form = DerivedBookForm({'title': 'Other', 'author': self.writer.pk, 'special_id': '1', 'isbn': '12345'})
self.assertFalse(form.is_valid())
self.assertEqual(len(form.errors), 1)
self.assertEqual(form.errors['special_id'], ['Book with this Special id already exists.'])
def test_inherited_unique_together(self):
title = 'Boss'
form = BookForm({'title': title, 'author': self.writer.pk})
self.assertTrue(form.is_valid())
form.save()
form = DerivedBookForm({'title': title, 'author': self.writer.pk, 'isbn': '12345'})
self.assertFalse(form.is_valid())
self.assertEqual(len(form.errors), 1)
self.assertEqual(form.errors['__all__'], ['Book with this Title and Author already exists.'])
def test_abstract_inherited_unique(self):
title = 'Boss'
isbn = '12345'
DerivedBook.objects.create(title=title, author=self.writer, isbn=isbn)
form = DerivedBookForm({'title': 'Other', 'author': self.writer.pk, 'isbn': isbn})
self.assertFalse(form.is_valid())
self.assertEqual(len(form.errors), 1)
self.assertEqual(form.errors['isbn'], ['Derived book with this Isbn already exists.'])
def test_abstract_inherited_unique_together(self):
title = 'Boss'
isbn = '12345'
DerivedBook.objects.create(title=title, author=self.writer, isbn=isbn)
form = DerivedBookForm({
'title': 'Other',
'author': self.writer.pk,
'isbn': '9876',
'suffix1': '0',
'suffix2': '0'
})
self.assertFalse(form.is_valid())
self.assertEqual(len(form.errors), 1)
self.assertEqual(form.errors['__all__'],
['Derived book with this Suffix1 and Suffix2 already exists.'])
def test_explicitpk_unspecified(self):
"""Test for primary_key being in the form and failing validation."""
form = ExplicitPKForm({'key': '', 'desc': ''})
self.assertFalse(form.is_valid())
def test_explicitpk_unique(self):
"""Ensure keys and blank character strings are tested for uniqueness."""
form = ExplicitPKForm({'key': 'key1', 'desc': ''})
self.assertTrue(form.is_valid())
form.save()
form = ExplicitPKForm({'key': 'key1', 'desc': ''})
self.assertFalse(form.is_valid())
self.assertEqual(len(form.errors), 3)
self.assertEqual(form.errors['__all__'], ['Explicit pk with this Key and Desc already exists.'])
self.assertEqual(form.errors['desc'], ['Explicit pk with this Desc already exists.'])
self.assertEqual(form.errors['key'], ['Explicit pk with this Key already exists.'])
def test_unique_for_date(self):
p = Post.objects.create(title="Django 1.0 is released",
slug="Django 1.0", subtitle="Finally", posted=datetime.date(2008, 9, 3))
form = PostForm({'title': "Django 1.0 is released", 'posted': '2008-09-03'})
self.assertFalse(form.is_valid())
self.assertEqual(len(form.errors), 1)
self.assertEqual(form.errors['title'], ['Title must be unique for Posted date.'])
form = PostForm({'title': "Work on Django 1.1 begins", 'posted': '2008-09-03'})
self.assertTrue(form.is_valid())
form = PostForm({'title': "Django 1.0 is released", 'posted': '2008-09-04'})
self.assertTrue(form.is_valid())
form = PostForm({'slug': "Django 1.0", 'posted': '2008-01-01'})
self.assertFalse(form.is_valid())
self.assertEqual(len(form.errors), 1)
self.assertEqual(form.errors['slug'], ['Slug must be unique for Posted year.'])
form = PostForm({'subtitle': "Finally", 'posted': '2008-09-30'})
self.assertFalse(form.is_valid())
self.assertEqual(form.errors['subtitle'], ['Subtitle must be unique for Posted month.'])
form = PostForm({'subtitle': "Finally", "title": "Django 1.0 is released",
"slug": "Django 1.0", 'posted': '2008-09-03'}, instance=p)
self.assertTrue(form.is_valid())
form = PostForm({'title': "Django 1.0 is released"})
self.assertFalse(form.is_valid())
self.assertEqual(len(form.errors), 1)
self.assertEqual(form.errors['posted'], ['This field is required.'])
def test_unique_for_date_in_exclude(self):
"""
If the date for unique_for_* constraints is excluded from the
ModelForm (in this case 'posted' has editable=False, then the
constraint should be ignored.
"""
class DateTimePostForm(forms.ModelForm):
class Meta:
model = DateTimePost
fields = '__all__'
DateTimePost.objects.create(title="Django 1.0 is released",
slug="Django 1.0", subtitle="Finally",
posted=datetime.datetime(2008, 9, 3, 10, 10, 1))
# 'title' has unique_for_date='posted'
form = DateTimePostForm({'title': "Django 1.0 is released", 'posted': '2008-09-03'})
self.assertTrue(form.is_valid())
# 'slug' has unique_for_year='posted'
form = DateTimePostForm({'slug': "Django 1.0", 'posted': '2008-01-01'})
self.assertTrue(form.is_valid())
# 'subtitle' has unique_for_month='posted'
form = DateTimePostForm({'subtitle': "Finally", 'posted': '2008-09-30'})
self.assertTrue(form.is_valid())
def test_inherited_unique_for_date(self):
p = Post.objects.create(title="Django 1.0 is released",
slug="Django 1.0", subtitle="Finally", posted=datetime.date(2008, 9, 3))
form = DerivedPostForm({'title': "Django 1.0 is released", 'posted': '2008-09-03'})
self.assertFalse(form.is_valid())
self.assertEqual(len(form.errors), 1)
self.assertEqual(form.errors['title'], ['Title must be unique for Posted date.'])
form = DerivedPostForm({'title': "Work on Django 1.1 begins", 'posted': '2008-09-03'})
self.assertTrue(form.is_valid())
form = DerivedPostForm({'title': "Django 1.0 is released", 'posted': '2008-09-04'})
self.assertTrue(form.is_valid())
form = DerivedPostForm({'slug': "Django 1.0", 'posted': '2008-01-01'})
self.assertFalse(form.is_valid())
self.assertEqual(len(form.errors), 1)
self.assertEqual(form.errors['slug'], ['Slug must be unique for Posted year.'])
form = DerivedPostForm({'subtitle': "Finally", 'posted': '2008-09-30'})
self.assertFalse(form.is_valid())
self.assertEqual(form.errors['subtitle'], ['Subtitle must be unique for Posted month.'])
form = DerivedPostForm({'subtitle': "Finally", "title": "Django 1.0 is released",
"slug": "Django 1.0", 'posted': '2008-09-03'}, instance=p)
self.assertTrue(form.is_valid())
def test_unique_for_date_with_nullable_date(self):
class FlexDatePostForm(forms.ModelForm):
class Meta:
model = FlexibleDatePost
fields = '__all__'
p = FlexibleDatePost.objects.create(title="Django 1.0 is released",
slug="Django 1.0", subtitle="Finally", posted=datetime.date(2008, 9, 3))
form = FlexDatePostForm({'title': "Django 1.0 is released"})
self.assertTrue(form.is_valid())
form = FlexDatePostForm({'slug': "Django 1.0"})
self.assertTrue(form.is_valid())
form = FlexDatePostForm({'subtitle': "Finally"})
self.assertTrue(form.is_valid())
form = FlexDatePostForm({'subtitle': "Finally", "title": "Django 1.0 is released",
"slug": "Django 1.0"}, instance=p)
self.assertTrue(form.is_valid())
def test_override_unique_message(self):
class CustomProductForm(ProductForm):
class Meta(ProductForm.Meta):
error_messages = {
'slug': {
'unique': "%(model_name)s's %(field_label)s not unique.",
}
}
Product.objects.create(slug='teddy-bear-blue')
form = CustomProductForm({'slug': 'teddy-bear-blue'})
self.assertEqual(len(form.errors), 1)
self.assertEqual(form.errors['slug'], ["Product's Slug not unique."])
def test_override_unique_together_message(self):
class CustomPriceForm(PriceForm):
class Meta(PriceForm.Meta):
error_messages = {
NON_FIELD_ERRORS: {
'unique_together': "%(model_name)s's %(field_labels)s not unique.",
}
}
Price.objects.create(price=6.00, quantity=1)
form = CustomPriceForm({'price': '6.00', 'quantity': '1'})
self.assertEqual(len(form.errors), 1)
self.assertEqual(form.errors[NON_FIELD_ERRORS], ["Price's Price and Quantity not unique."])
def test_override_unique_for_date_message(self):
class CustomPostForm(PostForm):
class Meta(PostForm.Meta):
error_messages = {
'title': {
'unique_for_date': "%(model_name)s's %(field_label)s not unique for %(date_field_label)s date.",
}
}
Post.objects.create(title="Django 1.0 is released",
slug="Django 1.0", subtitle="Finally", posted=datetime.date(2008, 9, 3))
form = CustomPostForm({'title': "Django 1.0 is released", 'posted': '2008-09-03'})
self.assertEqual(len(form.errors), 1)
self.assertEqual(form.errors['title'], ["Post's Title not unique for Posted date."])
class ModelToDictTests(TestCase):
"""
Tests for forms.models.model_to_dict
"""
def test_model_to_dict_many_to_many(self):
categories = [
Category(name='TestName1', slug='TestName1', url='url1'),
Category(name='TestName2', slug='TestName2', url='url2'),
Category(name='TestName3', slug='TestName3', url='url3')
]
for c in categories:
c.save()
writer = Writer(name='Test writer')
writer.save()
art = Article(
headline='Test article',
slug='test-article',
pub_date=datetime.date(1988, 1, 4),
writer=writer,
article='Hello.'
)
art.save()
for c in categories:
art.categories.add(c)
art.save()
with self.assertNumQueries(1):
d = model_to_dict(art)
# Ensure all many-to-many categories appear in model_to_dict
for c in categories:
self.assertIn(c.pk, d['categories'])
# Ensure many-to-many relation appears as a list
self.assertIsInstance(d['categories'], list)
def test_reuse_prefetched(self):
# model_to_dict should not hit the database if it can reuse
# the data populated by prefetch_related.
categories = [
Category(name='TestName1', slug='TestName1', url='url1'),
Category(name='TestName2', slug='TestName2', url='url2'),
Category(name='TestName3', slug='TestName3', url='url3')
]
for c in categories:
c.save()
writer = Writer(name='Test writer')
writer.save()
art = Article(
headline='Test article',
slug='test-article',
pub_date=datetime.date(1988, 1, 4),
writer=writer,
article='Hello.'
)
art.save()
for c in categories:
art.categories.add(c)
art = Article.objects.prefetch_related('categories').get(pk=art.pk)
with self.assertNumQueries(0):
d = model_to_dict(art)
# Ensure all many-to-many categories appear in model_to_dict
for c in categories:
self.assertIn(c.pk, d['categories'])
# Ensure many-to-many relation appears as a list
self.assertIsInstance(d['categories'], list)
class ModelFormBasicTests(TestCase):
def create_basic_data(self):
self.c1 = Category.objects.create(
name="Entertainment", slug="entertainment", url="entertainment")
self.c2 = Category.objects.create(
name="It's a test", slug="its-test", url="test")
self.c3 = Category.objects.create(
name="Third test", slug="third-test", url="third")
self.w_royko = Writer.objects.create(name='Mike Royko')
self.w_woodward = Writer.objects.create(name='Bob Woodward')
def test_base_form(self):
self.assertEqual(Category.objects.count(), 0)
f = BaseCategoryForm()
self.assertHTMLEqual(
str(f),
"""<tr><th><label for="id_name">Name:</label></th><td><input id="id_name" type="text" name="name" maxlength="20" /></td></tr>
<tr><th><label for="id_slug">Slug:</label></th><td><input id="id_slug" type="text" name="slug" maxlength="20" /></td></tr>
<tr><th><label for="id_url">The URL:</label></th><td><input id="id_url" type="text" name="url" maxlength="40" /></td></tr>"""
)
self.assertHTMLEqual(
str(f.as_ul()),
"""<li><label for="id_name">Name:</label> <input id="id_name" type="text" name="name" maxlength="20" /></li>
<li><label for="id_slug">Slug:</label> <input id="id_slug" type="text" name="slug" maxlength="20" /></li>
<li><label for="id_url">The URL:</label> <input id="id_url" type="text" name="url" maxlength="40" /></li>"""
)
self.assertHTMLEqual(
str(f["name"]),
"""<input id="id_name" type="text" name="name" maxlength="20" />""")
def test_auto_id(self):
f = BaseCategoryForm(auto_id=False)
self.assertHTMLEqual(
str(f.as_ul()),
"""<li>Name: <input type="text" name="name" maxlength="20" /></li>
<li>Slug: <input type="text" name="slug" maxlength="20" /></li>
<li>The URL: <input type="text" name="url" maxlength="40" /></li>"""
)
def test_initial_values(self):
self.create_basic_data()
# Initial values can be provided for model forms
f = ArticleForm(
auto_id=False,
initial={
'headline': 'Your headline here',
'categories': [str(self.c1.id), str(self.c2.id)]
})
self.assertHTMLEqual(f.as_ul(), '''<li>Headline: <input type="text" name="headline" value="Your headline here" maxlength="50" /></li>
<li>Slug: <input type="text" name="slug" maxlength="50" /></li>
<li>Pub date: <input type="text" name="pub_date" /></li>
<li>Writer: <select name="writer">
<option value="" selected="selected">---------</option>
<option value="%s">Bob Woodward</option>
<option value="%s">Mike Royko</option>
</select></li>
<li>Article: <textarea rows="10" cols="40" name="article"></textarea></li>
<li>Categories: <select multiple="multiple" name="categories">
<option value="%s" selected="selected">Entertainment</option>
<option value="%s" selected="selected">It's a test</option>
<option value="%s">Third test</option>
</select></li>
<li>Status: <select name="status">
<option value="" selected="selected">---------</option>
<option value="1">Draft</option>
<option value="2">Pending</option>
<option value="3">Live</option>
</select></li>''' % (self.w_woodward.pk, self.w_royko.pk, self.c1.pk, self.c2.pk, self.c3.pk))
# When the ModelForm is passed an instance, that instance's current values are
# inserted as 'initial' data in each Field.
f = RoykoForm(auto_id=False, instance=self.w_royko)
self.assertHTMLEqual(six.text_type(f), '''<tr><th>Name:</th><td><input type="text" name="name" value="Mike Royko" maxlength="50" /><br /><span class="helptext">Use both first and last names.</span></td></tr>''')
art = Article.objects.create(
headline='Test article',
slug='test-article',
pub_date=datetime.date(1988, 1, 4),
writer=self.w_royko,
article='Hello.'
)
art_id_1 = art.id
f = ArticleForm(auto_id=False, instance=art)
self.assertHTMLEqual(f.as_ul(), '''<li>Headline: <input type="text" name="headline" value="Test article" maxlength="50" /></li>
<li>Slug: <input type="text" name="slug" value="test-article" maxlength="50" /></li>
<li>Pub date: <input type="text" name="pub_date" value="1988-01-04" /></li>
<li>Writer: <select name="writer">
<option value="">---------</option>
<option value="%s">Bob Woodward</option>
<option value="%s" selected="selected">Mike Royko</option>
</select></li>
<li>Article: <textarea rows="10" cols="40" name="article">Hello.</textarea></li>
<li>Categories: <select multiple="multiple" name="categories">
<option value="%s">Entertainment</option>
<option value="%s">It's a test</option>
<option value="%s">Third test</option>
</select></li>
<li>Status: <select name="status">
<option value="" selected="selected">---------</option>
<option value="1">Draft</option>
<option value="2">Pending</option>
<option value="3">Live</option>
</select></li>''' % (self.w_woodward.pk, self.w_royko.pk, self.c1.pk, self.c2.pk, self.c3.pk))
f = ArticleForm({
'headline': 'Test headline',
'slug': 'test-headline',
'pub_date': '1984-02-06',
'writer': six.text_type(self.w_royko.pk),
'article': 'Hello.'
}, instance=art)
self.assertEqual(f.errors, {})
self.assertTrue(f.is_valid())
test_art = f.save()
self.assertEqual(test_art.id, art_id_1)
test_art = Article.objects.get(id=art_id_1)
self.assertEqual(test_art.headline, 'Test headline')
def test_m2m_initial_callable(self):
"""
Regression for #10349: A callable can be provided as the initial value for an m2m field
"""
self.maxDiff = 1200
self.create_basic_data()
# Set up a callable initial value
def formfield_for_dbfield(db_field, **kwargs):
if db_field.name == 'categories':
kwargs['initial'] = lambda: Category.objects.all().order_by('name')[:2]
return db_field.formfield(**kwargs)
# Create a ModelForm, instantiate it, and check that the output is as expected
ModelForm = modelform_factory(Article, fields=['headline', 'categories'],
formfield_callback=formfield_for_dbfield)
form = ModelForm()
self.assertHTMLEqual(form.as_ul(), """<li><label for="id_headline">Headline:</label> <input id="id_headline" type="text" name="headline" maxlength="50" /></li>
<li><label for="id_categories">Categories:</label> <select multiple="multiple" name="categories" id="id_categories">
<option value="%d" selected="selected">Entertainment</option>
<option value="%d" selected="selected">It&39;s a test</option>
<option value="%d">Third test</option>
</select></li>"""
% (self.c1.pk, self.c2.pk, self.c3.pk))
def test_basic_creation(self):
self.assertEqual(Category.objects.count(), 0)
f = BaseCategoryForm({'name': 'Entertainment',
'slug': 'entertainment',
'url': 'entertainment'})
self.assertTrue(f.is_valid())
self.assertEqual(f.cleaned_data['name'], 'Entertainment')
self.assertEqual(f.cleaned_data['slug'], 'entertainment')
self.assertEqual(f.cleaned_data['url'], 'entertainment')
c1 = f.save()
# Testing whether the same object is returned from the
# ORM... not the fastest way...
self.assertEqual(Category.objects.count(), 1)
self.assertEqual(c1, Category.objects.all()[0])
self.assertEqual(c1.name, "Entertainment")
def test_save_commit_false(self):
# If you call save() with commit=False, then it will return an object that
# hasn't yet been saved to the database. In this case, it's up to you to call
# save() on the resulting model instance.
f = BaseCategoryForm({'name': 'Third test', 'slug': 'third-test', 'url': 'third'})
self.assertTrue(f.is_valid())
c1 = f.save(commit=False)
self.assertEqual(c1.name, "Third test")
self.assertEqual(Category.objects.count(), 0)
c1.save()
self.assertEqual(Category.objects.count(), 1)
def test_save_with_data_errors(self):
# If you call save() with invalid data, you'll get a ValueError.
f = BaseCategoryForm({'name': '', 'slug': 'not a slug!', 'url': 'foo'})
self.assertEqual(f.errors['name'], ['This field is required.'])
self.assertEqual(f.errors['slug'], ["Enter a valid 'slug' consisting of letters, numbers, underscores or hyphens."])
self.assertEqual(f.cleaned_data, {'url': 'foo'})
with self.assertRaises(ValueError):
f.save()
f = BaseCategoryForm({'name': '', 'slug': '', 'url': 'foo'})
with self.assertRaises(ValueError):
f.save()
def test_multi_fields(self):
self.create_basic_data()
self.maxDiff = None
# ManyToManyFields are represented by a MultipleChoiceField, ForeignKeys and any
# fields with the 'choices' attribute are represented by a ChoiceField.
f = ArticleForm(auto_id=False)
self.assertHTMLEqual(six.text_type(f), '''<tr><th>Headline:</th><td><input type="text" name="headline" maxlength="50" /></td></tr>
<tr><th>Slug:</th><td><input type="text" name="slug" maxlength="50" /></td></tr>
<tr><th>Pub date:</th><td><input type="text" name="pub_date" /></td></tr>
<tr><th>Writer:</th><td><select name="writer">
<option value="" selected="selected">---------</option>
<option value="%s">Bob Woodward</option>
<option value="%s">Mike Royko</option>
</select></td></tr>
<tr><th>Article:</th><td><textarea rows="10" cols="40" name="article"></textarea></td></tr>
<tr><th>Categories:</th><td><select multiple="multiple" name="categories">
<option value="%s">Entertainment</option>
<option value="%s">It's a test</option>
<option value="%s">Third test</option>
</select></td></tr>
<tr><th>Status:</th><td><select name="status">
<option value="" selected="selected">---------</option>
<option value="1">Draft</option>
<option value="2">Pending</option>
<option value="3">Live</option>
</select></td></tr>''' % (self.w_woodward.pk, self.w_royko.pk, self.c1.pk, self.c2.pk, self.c3.pk))
# Add some categories and test the many-to-many form output.
new_art = Article.objects.create(
article="Hello.", headline="New headline", slug="new-headline",
pub_date=datetime.date(1988, 1, 4), writer=self.w_royko)
new_art.categories.add(Category.objects.get(name='Entertainment'))
self.assertQuerysetEqual(new_art.categories.all(), ["Entertainment"])
f = ArticleForm(auto_id=False, instance=new_art)
self.assertHTMLEqual(f.as_ul(), '''<li>Headline: <input type="text" name="headline" value="New headline" maxlength="50" /></li>
<li>Slug: <input type="text" name="slug" value="new-headline" maxlength="50" /></li>
<li>Pub date: <input type="text" name="pub_date" value="1988-01-04" /></li>
<li>Writer: <select name="writer">
<option value="">---------</option>
<option value="%s">Bob Woodward</option>
<option value="%s" selected="selected">Mike Royko</option>
</select></li>
<li>Article: <textarea rows="10" cols="40" name="article">Hello.</textarea></li>
<li>Categories: <select multiple="multiple" name="categories">
<option value="%s" selected="selected">Entertainment</option>
<option value="%s">It's a test</option>
<option value="%s">Third test</option>
</select></li>
<li>Status: <select name="status">
<option value="" selected="selected">---------</option>
<option value="1">Draft</option>
<option value="2">Pending</option>
<option value="3">Live</option>
</select></li>''' % (self.w_woodward.pk, self.w_royko.pk, self.c1.pk, self.c2.pk, self.c3.pk))
def test_subset_fields(self):
# You can restrict a form to a subset of the complete list of fields
# by providing a 'fields' argument. If you try to save a
# model created with such a form, you need to ensure that the fields
# that are _not_ on the form have default values, or are allowed to have
# a value of None. If a field isn't specified on a form, the object created
# from the form can't provide a value for that field!
class PartialArticleForm(forms.ModelForm):
class Meta:
model = Article
fields = ('headline', 'pub_date')
f = PartialArticleForm(auto_id=False)
self.assertHTMLEqual(six.text_type(f), '''<tr><th>Headline:</th><td><input type="text" name="headline" maxlength="50" /></td></tr>
<tr><th>Pub date:</th><td><input type="text" name="pub_date" /></td></tr>''')
# You can create a form over a subset of the available fields
# by specifying a 'fields' argument to form_for_instance.
class PartialArticleFormWithSlug(forms.ModelForm):
class Meta:
model = Article
fields = ('headline', 'slug', 'pub_date')
w_royko = Writer.objects.create(name='Mike Royko')
art = Article.objects.create(
article="Hello.", headline="New headline", slug="new-headline",
pub_date=datetime.date(1988, 1, 4), writer=w_royko)
f = PartialArticleFormWithSlug({
'headline': 'New headline',
'slug': 'new-headline',
'pub_date': '1988-01-04'
}, auto_id=False, instance=art)
self.assertHTMLEqual(f.as_ul(), '''<li>Headline: <input type="text" name="headline" value="New headline" maxlength="50" /></li>
<li>Slug: <input type="text" name="slug" value="new-headline" maxlength="50" /></li>
<li>Pub date: <input type="text" name="pub_date" value="1988-01-04" /></li>''')
self.assertTrue(f.is_valid())
new_art = f.save()
self.assertEqual(new_art.id, art.id)
new_art = Article.objects.get(id=art.id)
self.assertEqual(new_art.headline, 'New headline')
def test_m2m_editing(self):
self.create_basic_data()
form_data = {
'headline': 'New headline',
'slug': 'new-headline',
'pub_date': '1988-01-04',
'writer': six.text_type(self.w_royko.pk),
'article': 'Hello.',
'categories': [six.text_type(self.c1.id), six.text_type(self.c2.id)]
}
# Create a new article, with categories, via the form.
f = ArticleForm(form_data)
new_art = f.save()
new_art = Article.objects.get(id=new_art.id)
art_id_1 = new_art.id
self.assertQuerysetEqual(new_art.categories.order_by('name'),
["Entertainment", "It's a test"])
# Now, submit form data with no categories. This deletes the existing categories.
form_data['categories'] = []
f = ArticleForm(form_data, instance=new_art)
new_art = f.save()
self.assertEqual(new_art.id, art_id_1)
new_art = Article.objects.get(id=art_id_1)
self.assertQuerysetEqual(new_art.categories.all(), [])
# Create a new article, with no categories, via the form.
f = ArticleForm(form_data)
new_art = f.save()
art_id_2 = new_art.id
self.assertNotIn(art_id_2, (None, art_id_1))
new_art = Article.objects.get(id=art_id_2)
self.assertQuerysetEqual(new_art.categories.all(), [])
# Create a new article, with categories, via the form, but use commit=False.
# The m2m data won't be saved until save_m2m() is invoked on the form.
form_data['categories'] = [six.text_type(self.c1.id), six.text_type(self.c2.id)]
f = ArticleForm(form_data)
new_art = f.save(commit=False)
# Manually save the instance
new_art.save()
art_id_3 = new_art.id
self.assertNotIn(art_id_3, (None, art_id_1, art_id_2))
# The instance doesn't have m2m data yet
new_art = Article.objects.get(id=art_id_3)
self.assertQuerysetEqual(new_art.categories.all(), [])
# Save the m2m data on the form
f.save_m2m()
self.assertQuerysetEqual(new_art.categories.order_by('name'),
["Entertainment", "It's a test"])
def test_custom_form_fields(self):
# Here, we define a custom ModelForm. Because it happens to have the same fields as
# the Category model, we can just call the form's save() to apply its changes to an
# existing Category instance.
class ShortCategory(forms.ModelForm):
name = forms.CharField(max_length=5)
slug = forms.CharField(max_length=5)
url = forms.CharField(max_length=3)
class Meta:
model = Category
fields = '__all__'
cat = Category.objects.create(name='Third test')
form = ShortCategory({'name': 'Third', 'slug': 'third', 'url': '3rd'}, instance=cat)
self.assertEqual(form.save().name, 'Third')
self.assertEqual(Category.objects.get(id=cat.id).name, 'Third')
def test_runtime_choicefield_populated(self):
self.maxDiff = None
# Here, we demonstrate that choices for a ForeignKey ChoiceField are determined
# at runtime, based on the data in the database when the form is displayed, not
# the data in the database when the form is instantiated.
self.create_basic_data()
f = ArticleForm(auto_id=False)
self.assertHTMLEqual(f.as_ul(), '''<li>Headline: <input type="text" name="headline" maxlength="50" /></li>
<li>Slug: <input type="text" name="slug" maxlength="50" /></li>
<li>Pub date: <input type="text" name="pub_date" /></li>
<li>Writer: <select name="writer">
<option value="" selected="selected">---------</option>
<option value="%s">Bob Woodward</option>
<option value="%s">Mike Royko</option>
</select></li>
<li>Article: <textarea rows="10" cols="40" name="article"></textarea></li>
<li>Categories: <select multiple="multiple" name="categories">
<option value="%s">Entertainment</option>
<option value="%s">It's a test</option>
<option value="%s">Third test</option>
</select> </li>
<li>Status: <select name="status">
<option value="" selected="selected">---------</option>
<option value="1">Draft</option>
<option value="2">Pending</option>
<option value="3">Live</option>
</select></li>''' % (self.w_woodward.pk, self.w_royko.pk, self.c1.pk, self.c2.pk, self.c3.pk))
c4 = Category.objects.create(name='Fourth', url='4th')
w_bernstein = Writer.objects.create(name='Carl Bernstein')
self.assertHTMLEqual(f.as_ul(), '''<li>Headline: <input type="text" name="headline" maxlength="50" /></li>
<li>Slug: <input type="text" name="slug" maxlength="50" /></li>
<li>Pub date: <input type="text" name="pub_date" /></li>
<li>Writer: <select name="writer">
<option value="" selected="selected">---------</option>
<option value="%s">Bob Woodward</option>
<option value="%s">Carl Bernstein</option>
<option value="%s">Mike Royko</option>
</select></li>
<li>Article: <textarea rows="10" cols="40" name="article"></textarea></li>
<li>Categories: <select multiple="multiple" name="categories">
<option value="%s">Entertainment</option>
<option value="%s">It's a test</option>
<option value="%s">Third test</option>
<option value="%s">Fourth</option>
</select></li>
<li>Status: <select name="status">
<option value="" selected="selected">---------</option>
<option value="1">Draft</option>
<option value="2">Pending</option>
<option value="3">Live</option>
</select></li>''' % (self.w_woodward.pk, w_bernstein.pk, self.w_royko.pk, self.c1.pk, self.c2.pk, self.c3.pk, c4.pk))
class ModelChoiceFieldTests(TestCase):
def setUp(self):
self.c1 = Category.objects.create(
name="Entertainment", slug="entertainment", url="entertainment")
self.c2 = Category.objects.create(
name="It's a test", slug="its-test", url="test")
self.c3 = Category.objects.create(
name="Third", slug="third-test", url="third")
# ModelChoiceField ############################################################
def test_modelchoicefield(self):
f = forms.ModelChoiceField(Category.objects.all())
self.assertEqual(list(f.choices), [
('', '---------'),
(self.c1.pk, 'Entertainment'),
(self.c2.pk, "It's a test"),
(self.c3.pk, 'Third')])
with self.assertRaises(ValidationError):
f.clean('')
with self.assertRaises(ValidationError):
f.clean(None)
with self.assertRaises(ValidationError):
f.clean(0)
# Invalid types that require TypeError to be caught (#22808).
with self.assertRaises(ValidationError):
f.clean([['fail']])
with self.assertRaises(ValidationError):
f.clean([{'foo': 'bar'}])
self.assertEqual(f.clean(self.c2.id).name, "It's a test")
self.assertEqual(f.clean(self.c3.id).name, 'Third')
# Add a Category object *after* the ModelChoiceField has already been
# instantiated. This proves clean() checks the database during clean() rather
# than caching it at time of instantiation.
c4 = Category.objects.create(name='Fourth', url='4th')
self.assertEqual(f.clean(c4.id).name, 'Fourth')
# Delete a Category object *after* the ModelChoiceField has already been
# instantiated. This proves clean() checks the database during clean() rather
# than caching it at time of instantiation.
Category.objects.get(url='4th').delete()
with self.assertRaises(ValidationError):
f.clean(c4.id)
def test_modelchoicefield_choices(self):
f = forms.ModelChoiceField(Category.objects.filter(pk=self.c1.id), required=False)
self.assertIsNone(f.clean(''))
self.assertEqual(f.clean(str(self.c1.id)).name, "Entertainment")
with self.assertRaises(ValidationError):
f.clean('100')
# len can be called on choices
self.assertEqual(len(f.choices), 2)
# queryset can be changed after the field is created.
f.queryset = Category.objects.exclude(name='Third')
self.assertEqual(list(f.choices), [
('', '---------'),
(self.c1.pk, 'Entertainment'),
(self.c2.pk, "It's a test")])
self.assertEqual(f.clean(self.c2.id).name, "It's a test")
with self.assertRaises(ValidationError):
f.clean(self.c3.id)
# check that we can safely iterate choices repeatedly
gen_one = list(f.choices)
gen_two = f.choices
self.assertEqual(gen_one[2], (self.c2.pk, "It's a test"))
self.assertEqual(list(gen_two), [
('', '---------'),
(self.c1.pk, 'Entertainment'),
(self.c2.pk, "It's a test")])
# check that we can override the label_from_instance method to print custom labels (#4620)
f.queryset = Category.objects.all()
f.label_from_instance = lambda obj: "category " + str(obj)
self.assertEqual(list(f.choices), [
('', '---------'),
(self.c1.pk, 'category Entertainment'),
(self.c2.pk, "category It's a test"),
(self.c3.pk, 'category Third')])
def test_modelchoicefield_11183(self):
"""
Regression test for ticket #11183.
"""
class ModelChoiceForm(forms.Form):
category = forms.ModelChoiceField(Category.objects.all())
form1 = ModelChoiceForm()
field1 = form1.fields['category']
# To allow the widget to change the queryset of field1.widget.choices correctly,
# without affecting other forms, the following must hold:
self.assertIsNot(field1, ModelChoiceForm.base_fields['category'])
self.assertIs(field1.widget.choices.field, field1)
def test_modelchoicefield_22745(self):
"""
#22745 -- Make sure that ModelChoiceField with RadioSelect widget
doesn't produce unnecessary db queries when accessing its BoundField's
attrs.
"""
class ModelChoiceForm(forms.Form):
category = forms.ModelChoiceField(Category.objects.all(), widget=forms.RadioSelect)
form = ModelChoiceForm()
field = form['category'] # BoundField
template = Template('{{ field.name }}{{ field }}{{ field.help_text }}')
with self.assertNumQueries(1):
template.render(Context({'field': field}))
class ModelMultipleChoiceFieldTests(TestCase):
def setUp(self):
self.c1 = Category.objects.create(
name="Entertainment", slug="entertainment", url="entertainment")
self.c2 = Category.objects.create(
name="It's a test", slug="its-test", url="test")
self.c3 = Category.objects.create(
name="Third", slug="third-test", url="third")
def test_model_multiple_choice_field(self):
f = forms.ModelMultipleChoiceField(Category.objects.all())
self.assertEqual(list(f.choices), [
(self.c1.pk, 'Entertainment'),
(self.c2.pk, "It's a test"),
(self.c3.pk, 'Third')])
with self.assertRaises(ValidationError):
f.clean(None)
with self.assertRaises(ValidationError):
f.clean([])
self.assertQuerysetEqual(f.clean([self.c1.id]), ["Entertainment"])
self.assertQuerysetEqual(f.clean([self.c2.id]), ["It's a test"])
self.assertQuerysetEqual(f.clean([str(self.c1.id)]), ["Entertainment"])
self.assertQuerysetEqual(f.clean([str(self.c1.id), str(self.c2.id)]),
["Entertainment", "It's a test"], ordered=False)
self.assertQuerysetEqual(f.clean([self.c1.id, str(self.c2.id)]),
["Entertainment", "It's a test"], ordered=False)
self.assertQuerysetEqual(f.clean((self.c1.id, str(self.c2.id))),
["Entertainment", "It's a test"], ordered=False)
with self.assertRaises(ValidationError):
f.clean(['100'])
with self.assertRaises(ValidationError):
f.clean('hello')
with self.assertRaises(ValidationError):
f.clean(['fail'])
# Invalid types that require TypeError to be caught (#22808).
with self.assertRaises(ValidationError):
f.clean([['fail']])
with self.assertRaises(ValidationError):
f.clean([{'foo': 'bar'}])
# Add a Category object *after* the ModelMultipleChoiceField has already been
# instantiated. This proves clean() checks the database during clean() rather
# than caching it at time of instantiation.
# Note, we are using an id of 1006 here since tests that run before
# this may create categories with primary keys up to 6. Use
# a number that will not conflict.
c6 = Category.objects.create(id=1006, name='Sixth', url='6th')
self.assertQuerysetEqual(f.clean([c6.id]), ["Sixth"])
# Delete a Category object *after* the ModelMultipleChoiceField has already been
# instantiated. This proves clean() checks the database during clean() rather
# than caching it at time of instantiation.
Category.objects.get(url='6th').delete()
with self.assertRaises(ValidationError):
f.clean([c6.id])
def test_model_multiple_choice_required_false(self):
f = forms.ModelMultipleChoiceField(Category.objects.all(), required=False)
self.assertIsInstance(f.clean([]), EmptyQuerySet)
self.assertIsInstance(f.clean(()), EmptyQuerySet)
with self.assertRaises(ValidationError):
f.clean(['0'])
with self.assertRaises(ValidationError):
f.clean([str(self.c3.id), '0'])
with self.assertRaises(ValidationError):
f.clean([str(self.c1.id), '0'])
# queryset can be changed after the field is created.
f.queryset = Category.objects.exclude(name='Third')
self.assertEqual(list(f.choices), [
(self.c1.pk, 'Entertainment'),
(self.c2.pk, "It's a test")])
self.assertQuerysetEqual(f.clean([self.c2.id]), ["It's a test"])
with self.assertRaises(ValidationError):
f.clean([self.c3.id])
with self.assertRaises(ValidationError):
f.clean([str(self.c2.id), str(self.c3.id)])
f.queryset = Category.objects.all()
f.label_from_instance = lambda obj: "multicategory " + str(obj)
self.assertEqual(list(f.choices), [
(self.c1.pk, 'multicategory Entertainment'),
(self.c2.pk, "multicategory It's a test"),
(self.c3.pk, 'multicategory Third')])
def test_model_multiple_choice_number_of_queries(self):
"""
Test that ModelMultipleChoiceField does O(1) queries instead of
O(n) (#10156).
"""
persons = [Writer.objects.create(name="Person %s" % i) for i in range(30)]
f = forms.ModelMultipleChoiceField(queryset=Writer.objects.all())
self.assertNumQueries(1, f.clean, [p.pk for p in persons[1:11:2]])
def test_model_multiple_choice_run_validators(self):
"""
Test that ModelMultipleChoiceField run given validators (#14144).
"""
for i in range(30):
Writer.objects.create(name="Person %s" % i)
self._validator_run = False
def my_validator(value):
self._validator_run = True
f = forms.ModelMultipleChoiceField(queryset=Writer.objects.all(),
validators=[my_validator])
f.clean([p.pk for p in Writer.objects.all()[8:9]])
self.assertTrue(self._validator_run)
def test_model_multiple_choice_show_hidden_initial(self):
"""
Test support of show_hidden_initial by ModelMultipleChoiceField.
"""
class WriterForm(forms.Form):
persons = forms.ModelMultipleChoiceField(show_hidden_initial=True,
queryset=Writer.objects.all())
person1 = Writer.objects.create(name="Person 1")
person2 = Writer.objects.create(name="Person 2")
form = WriterForm(initial={'persons': [person1, person2]},
data={'initial-persons': [str(person1.pk), str(person2.pk)],
'persons': [str(person1.pk), str(person2.pk)]})
self.assertTrue(form.is_valid())
self.assertFalse(form.has_changed())
form = WriterForm(initial={'persons': [person1, person2]},
data={'initial-persons': [str(person1.pk), str(person2.pk)],
'persons': [str(person2.pk)]})
self.assertTrue(form.is_valid())
self.assertTrue(form.has_changed())
def test_model_multiple_choice_field_22745(self):
"""
#22745 -- Make sure that ModelMultipleChoiceField with
CheckboxSelectMultiple widget doesn't produce unnecessary db queries
when accessing its BoundField's attrs.
"""
class ModelMultipleChoiceForm(forms.Form):
categories = forms.ModelMultipleChoiceField(Category.objects.all(), widget=forms.CheckboxSelectMultiple)
form = ModelMultipleChoiceForm()
field = form['categories'] # BoundField
template = Template('{{ field.name }}{{ field }}{{ field.help_text }}')
with self.assertNumQueries(1):
template.render(Context({'field': field}))
def test_show_hidden_initial_changed_queries_efficiently(self):
class WriterForm(forms.Form):
persons = forms.ModelMultipleChoiceField(
show_hidden_initial=True, queryset=Writer.objects.all())
writers = (Writer.objects.create(name=str(x)) for x in range(0, 50))
writer_pks = tuple(x.pk for x in writers)
form = WriterForm(data={'initial-persons': writer_pks})
with self.assertNumQueries(1):
self.assertTrue(form.has_changed())
def test_clean_does_deduplicate_values(self):
class WriterForm(forms.Form):
persons = forms.ModelMultipleChoiceField(queryset=Writer.objects.all())
person1 = Writer.objects.create(name="Person 1")
form = WriterForm(data={})
queryset = form.fields['persons'].clean([str(person1.pk)] * 50)
sql, params = queryset.query.sql_with_params()
self.assertEqual(len(params), 1)
class ModelOneToOneFieldTests(TestCase):
def test_modelform_onetoonefield(self):
class ImprovedArticleForm(forms.ModelForm):
class Meta:
model = ImprovedArticle
fields = '__all__'
class ImprovedArticleWithParentLinkForm(forms.ModelForm):
class Meta:
model = ImprovedArticleWithParentLink
fields = '__all__'
self.assertEqual(list(ImprovedArticleForm.base_fields), ['article'])
self.assertEqual(list(ImprovedArticleWithParentLinkForm.base_fields), [])
def test_modelform_subclassed_model(self):
class BetterWriterForm(forms.ModelForm):
class Meta:
# BetterWriter model is a subclass of Writer with an additional `score` field
model = BetterWriter
fields = '__all__'
bw = BetterWriter.objects.create(name='Joe Better', score=10)
self.assertEqual(sorted(model_to_dict(bw)),
['id', 'name', 'score', 'writer_ptr'])
form = BetterWriterForm({'name': 'Some Name', 'score': 12})
self.assertTrue(form.is_valid())
bw2 = form.save()
self.assertEqual(bw2.score, 12)
def test_onetoonefield(self):
class WriterProfileForm(forms.ModelForm):
class Meta:
# WriterProfile has a OneToOneField to Writer
model = WriterProfile
fields = '__all__'
self.w_royko = Writer.objects.create(name='Mike Royko')
self.w_woodward = Writer.objects.create(name='Bob Woodward')
form = WriterProfileForm()
self.assertHTMLEqual(form.as_p(), '''<p><label for="id_writer">Writer:</label> <select name="writer" id="id_writer">
<option value="" selected="selected">---------</option>
<option value="%s">Bob Woodward</option>
<option value="%s">Mike Royko</option>
</select></p>
<p><label for="id_age">Age:</label> <input type="number" name="age" id="id_age" min="0" /></p>''' % (self.w_woodward.pk, self.w_royko.pk))
data = {
'writer': six.text_type(self.w_woodward.pk),
'age': '65',
}
form = WriterProfileForm(data)
instance = form.save()
self.assertEqual(six.text_type(instance), 'Bob Woodward is 65')
form = WriterProfileForm(instance=instance)
self.assertHTMLEqual(form.as_p(), '''<p><label for="id_writer">Writer:</label> <select name="writer" id="id_writer">
<option value="">---------</option>
<option value="%s" selected="selected">Bob Woodward</option>
<option value="%s">Mike Royko</option>
</select></p>
<p><label for="id_age">Age:</label> <input type="number" name="age" value="65" id="id_age" min="0" /></p>''' % (self.w_woodward.pk, self.w_royko.pk))
def test_assignment_of_none(self):
class AuthorForm(forms.ModelForm):
class Meta:
model = Author
fields = ['publication', 'full_name']
publication = Publication.objects.create(title="Pravda",
date_published=datetime.date(1991, 8, 22))
author = Author.objects.create(publication=publication, full_name='John Doe')
form = AuthorForm({'publication': '', 'full_name': 'John Doe'}, instance=author)
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data['publication'], None)
author = form.save()
# author object returned from form still retains original publication object
# that's why we need to retrieve it from database again
new_author = Author.objects.get(pk=author.pk)
self.assertEqual(new_author.publication, None)
def test_assignment_of_none_null_false(self):
class AuthorForm(forms.ModelForm):
class Meta:
model = Author1
fields = ['publication', 'full_name']
publication = Publication.objects.create(title="Pravda",
date_published=datetime.date(1991, 8, 22))
author = Author1.objects.create(publication=publication, full_name='John Doe')
form = AuthorForm({'publication': '', 'full_name': 'John Doe'}, instance=author)
self.assertFalse(form.is_valid())
class FileAndImageFieldTests(TestCase):
def test_clean_false(self):
"""
If the ``clean`` method on a non-required FileField receives False as
the data (meaning clear the field value), it returns False, regardless
of the value of ``initial``.
"""
f = forms.FileField(required=False)
self.assertEqual(f.clean(False), False)
self.assertEqual(f.clean(False, 'initial'), False)
def test_clean_false_required(self):
"""
If the ``clean`` method on a required FileField receives False as the
data, it has the same effect as None: initial is returned if non-empty,
otherwise the validation catches the lack of a required value.
"""
f = forms.FileField(required=True)
self.assertEqual(f.clean(False, 'initial'), 'initial')
self.assertRaises(ValidationError, f.clean, False)
def test_full_clear(self):
"""
Integration happy-path test that a model FileField can actually be set
and cleared via a ModelForm.
"""
class DocumentForm(forms.ModelForm):
class Meta:
model = Document
fields = '__all__'
form = DocumentForm()
self.assertIn('name="myfile"', six.text_type(form))
self.assertNotIn('myfile-clear', six.text_type(form))
form = DocumentForm(files={'myfile': SimpleUploadedFile('something.txt', b'content')})
self.assertTrue(form.is_valid())
doc = form.save(commit=False)
self.assertEqual(doc.myfile.name, 'something.txt')
form = DocumentForm(instance=doc)
self.assertIn('myfile-clear', six.text_type(form))
form = DocumentForm(instance=doc, data={'myfile-clear': 'true'})
doc = form.save(commit=False)
self.assertEqual(bool(doc.myfile), False)
def test_clear_and_file_contradiction(self):
"""
If the user submits a new file upload AND checks the clear checkbox,
they get a validation error, and the bound redisplay of the form still
includes the current file and the clear checkbox.
"""
class DocumentForm(forms.ModelForm):
class Meta:
model = Document
fields = '__all__'
form = DocumentForm(files={'myfile': SimpleUploadedFile('something.txt', b'content')})
self.assertTrue(form.is_valid())
doc = form.save(commit=False)
form = DocumentForm(instance=doc,
files={'myfile': SimpleUploadedFile('something.txt', b'content')},
data={'myfile-clear': 'true'})
self.assertTrue(not form.is_valid())
self.assertEqual(form.errors['myfile'],
['Please either submit a file or check the clear checkbox, not both.'])
rendered = six.text_type(form)
self.assertIn('something.txt', rendered)
self.assertIn('myfile-clear', rendered)
def test_file_field_data(self):
# Test conditions when files is either not given or empty.
f = TextFileForm(data={'description': 'Assistance'})
self.assertFalse(f.is_valid())
f = TextFileForm(data={'description': 'Assistance'}, files={})
self.assertFalse(f.is_valid())
# Upload a file and ensure it all works as expected.
f = TextFileForm(
data={'description': 'Assistance'},
files={'file': SimpleUploadedFile('test1.txt', b'hello world')})
self.assertTrue(f.is_valid())
self.assertEqual(type(f.cleaned_data['file']), SimpleUploadedFile)
instance = f.save()
self.assertEqual(instance.file.name, 'tests/test1.txt')
instance.file.delete()
# If the previous file has been deleted, the file name can be reused
f = TextFileForm(
data={'description': 'Assistance'},
files={'file': SimpleUploadedFile('test1.txt', b'hello world')})
self.assertTrue(f.is_valid())
self.assertEqual(type(f.cleaned_data['file']), SimpleUploadedFile)
instance = f.save()
self.assertEqual(instance.file.name, 'tests/test1.txt')
# Check if the max_length attribute has been inherited from the model.
f = TextFileForm(
data={'description': 'Assistance'},
files={'file': SimpleUploadedFile('test-maxlength.txt', b'hello world')})
self.assertFalse(f.is_valid())
# Edit an instance that already has the file defined in the model. This will not
# save the file again, but leave it exactly as it is.
f = TextFileForm(
data={'description': 'Assistance'},
instance=instance)
self.assertTrue(f.is_valid())
self.assertEqual(f.cleaned_data['file'].name, 'tests/test1.txt')
instance = f.save()
self.assertEqual(instance.file.name, 'tests/test1.txt')
# Delete the current file since this is not done by Django.
instance.file.delete()
# Override the file by uploading a new one.
f = TextFileForm(
data={'description': 'Assistance'},
files={'file': SimpleUploadedFile('test2.txt', b'hello world')}, instance=instance)
self.assertTrue(f.is_valid())
instance = f.save()
self.assertEqual(instance.file.name, 'tests/test2.txt')
# Delete the current file since this is not done by Django.
instance.file.delete()
instance.delete()
def test_filefield_required_false(self):
# Test the non-required FileField
f = TextFileForm(data={'description': 'Assistance'})
f.fields['file'].required = False
self.assertTrue(f.is_valid())
instance = f.save()
self.assertEqual(instance.file.name, '')
f = TextFileForm(
data={'description': 'Assistance'},
files={'file': SimpleUploadedFile('test3.txt', b'hello world')}, instance=instance)
self.assertTrue(f.is_valid())
instance = f.save()
self.assertEqual(instance.file.name, 'tests/test3.txt')
# Instance can be edited w/out re-uploading the file and existing file should be preserved.
f = TextFileForm(
data={'description': 'New Description'},
instance=instance)
f.fields['file'].required = False
self.assertTrue(f.is_valid())
instance = f.save()
self.assertEqual(instance.description, 'New Description')
self.assertEqual(instance.file.name, 'tests/test3.txt')
# Delete the current file since this is not done by Django.
instance.file.delete()
instance.delete()
def test_custom_file_field_save(self):
"""
Regression for #11149: save_form_data should be called only once
"""
class CFFForm(forms.ModelForm):
class Meta:
model = CustomFF
fields = '__all__'
# It's enough that the form saves without error -- the custom save routine will
# generate an AssertionError if it is called more than once during save.
form = CFFForm(data={'f': None})
form.save()
def test_file_field_multiple_save(self):
"""
Simulate a file upload and check how many times Model.save() gets
called. Test for bug #639.
"""
class PhotoForm(forms.ModelForm):
class Meta:
model = Photo
fields = '__all__'
# Grab an image for testing.
filename = os.path.join(os.path.dirname(upath(__file__)), "test.png")
with open(filename, "rb") as fp:
img = fp.read()
# Fake a POST QueryDict and FILES MultiValueDict.
data = {'title': 'Testing'}
files = {"image": SimpleUploadedFile('test.png', img, 'image/png')}
form = PhotoForm(data=data, files=files)
p = form.save()
try:
# Check the savecount stored on the object (see the model).
self.assertEqual(p._savecount, 1)
finally:
# Delete the "uploaded" file to avoid clogging /tmp.
p = Photo.objects.get()
p.image.delete(save=False)
def test_file_path_field_blank(self):
"""
Regression test for #8842: FilePathField(blank=True)
"""
class FPForm(forms.ModelForm):
class Meta:
model = FilePathModel
fields = '__all__'
form = FPForm()
names = [p[1] for p in form['path'].field.choices]
names.sort()
self.assertEqual(names, ['---------', '__init__.py', 'models.py', 'tests.py'])
@skipUnless(test_images, "Pillow not installed")
def test_image_field(self):
# ImageField and FileField are nearly identical, but they differ slightly when
# it comes to validation. This specifically tests that #6302 is fixed for
# both file fields and image fields.
with open(os.path.join(os.path.dirname(upath(__file__)), "test.png"), 'rb') as fp:
image_data = fp.read()
with open(os.path.join(os.path.dirname(upath(__file__)), "test2.png"), 'rb') as fp:
image_data2 = fp.read()
f = ImageFileForm(
data={'description': 'An image'},
files={'image': SimpleUploadedFile('test.png', image_data)})
self.assertTrue(f.is_valid())
self.assertEqual(type(f.cleaned_data['image']), SimpleUploadedFile)
instance = f.save()
self.assertEqual(instance.image.name, 'tests/test.png')
self.assertEqual(instance.width, 16)
self.assertEqual(instance.height, 16)
# Delete the current file since this is not done by Django, but don't save
# because the dimension fields are not null=True.
instance.image.delete(save=False)
f = ImageFileForm(
data={'description': 'An image'},
files={'image': SimpleUploadedFile('test.png', image_data)})
self.assertTrue(f.is_valid())
self.assertEqual(type(f.cleaned_data['image']), SimpleUploadedFile)
instance = f.save()
self.assertEqual(instance.image.name, 'tests/test.png')
self.assertEqual(instance.width, 16)
self.assertEqual(instance.height, 16)
# Edit an instance that already has the (required) image defined in the model. This will not
# save the image again, but leave it exactly as it is.
f = ImageFileForm(data={'description': 'Look, it changed'}, instance=instance)
self.assertTrue(f.is_valid())
self.assertEqual(f.cleaned_data['image'].name, 'tests/test.png')
instance = f.save()
self.assertEqual(instance.image.name, 'tests/test.png')
self.assertEqual(instance.height, 16)
self.assertEqual(instance.width, 16)
# Delete the current file since this is not done by Django, but don't save
# because the dimension fields are not null=True.
instance.image.delete(save=False)
# Override the file by uploading a new one.
f = ImageFileForm(
data={'description': 'Changed it'},
files={'image': SimpleUploadedFile('test2.png', image_data2)}, instance=instance)
self.assertTrue(f.is_valid())
instance = f.save()
self.assertEqual(instance.image.name, 'tests/test2.png')
self.assertEqual(instance.height, 32)
self.assertEqual(instance.width, 48)
# Delete the current file since this is not done by Django, but don't save
# because the dimension fields are not null=True.
instance.image.delete(save=False)
instance.delete()
f = ImageFileForm(
data={'description': 'Changed it'},
files={'image': SimpleUploadedFile('test2.png', image_data2)})
self.assertTrue(f.is_valid())
instance = f.save()
self.assertEqual(instance.image.name, 'tests/test2.png')
self.assertEqual(instance.height, 32)
self.assertEqual(instance.width, 48)
# Delete the current file since this is not done by Django, but don't save
# because the dimension fields are not null=True.
instance.image.delete(save=False)
instance.delete()
# Test the non-required ImageField
# Note: In Oracle, we expect a null ImageField to return '' instead of
# None.
if connection.features.interprets_empty_strings_as_nulls:
expected_null_imagefield_repr = ''
else:
expected_null_imagefield_repr = None
f = OptionalImageFileForm(data={'description': 'Test'})
self.assertTrue(f.is_valid())
instance = f.save()
self.assertEqual(instance.image.name, expected_null_imagefield_repr)
self.assertEqual(instance.width, None)
self.assertEqual(instance.height, None)
f = OptionalImageFileForm(
data={'description': 'And a final one'},
files={'image': SimpleUploadedFile('test3.png', image_data)}, instance=instance)
self.assertTrue(f.is_valid())
instance = f.save()
self.assertEqual(instance.image.name, 'tests/test3.png')
self.assertEqual(instance.width, 16)
self.assertEqual(instance.height, 16)
# Editing the instance without re-uploading the image should not affect the image or its width/height properties
f = OptionalImageFileForm(
data={'description': 'New Description'},
instance=instance)
self.assertTrue(f.is_valid())
instance = f.save()
self.assertEqual(instance.description, 'New Description')
self.assertEqual(instance.image.name, 'tests/test3.png')
self.assertEqual(instance.width, 16)
self.assertEqual(instance.height, 16)
# Delete the current file since this is not done by Django.
instance.image.delete()
instance.delete()
f = OptionalImageFileForm(
data={'description': 'And a final one'},
files={'image': SimpleUploadedFile('test4.png', image_data2)}
)
self.assertTrue(f.is_valid())
instance = f.save()
self.assertEqual(instance.image.name, 'tests/test4.png')
self.assertEqual(instance.width, 48)
self.assertEqual(instance.height, 32)
instance.delete()
# Test callable upload_to behavior that's dependent on the value of another field in the model
f = ImageFileForm(
data={'description': 'And a final one', 'path': 'foo'},
files={'image': SimpleUploadedFile('test4.png', image_data)})
self.assertTrue(f.is_valid())
instance = f.save()
self.assertEqual(instance.image.name, 'foo/test4.png')
instance.delete()
class ModelOtherFieldTests(TestCase):
def test_big_integer_field(self):
bif = BigIntForm({'biggie': '-9223372036854775808'})
self.assertTrue(bif.is_valid())
bif = BigIntForm({'biggie': '-9223372036854775809'})
self.assertFalse(bif.is_valid())
self.assertEqual(bif.errors, {'biggie': ['Ensure this value is greater than or equal to -9223372036854775808.']})
bif = BigIntForm({'biggie': '9223372036854775807'})
self.assertTrue(bif.is_valid())
bif = BigIntForm({'biggie': '9223372036854775808'})
self.assertFalse(bif.is_valid())
self.assertEqual(bif.errors, {'biggie': ['Ensure this value is less than or equal to 9223372036854775807.']})
def test_comma_separated_integer_field(self):
class CommaSeparatedIntegerForm(forms.ModelForm):
class Meta:
model = CommaSeparatedInteger
fields = '__all__'
f = CommaSeparatedIntegerForm({'field': '1,2,3'})
self.assertTrue(f.is_valid())
self.assertEqual(f.cleaned_data, {'field': '1,2,3'})
f = CommaSeparatedIntegerForm({'field': '1a,2'})
self.assertEqual(f.errors, {'field': ['Enter only digits separated by commas.']})
f = CommaSeparatedIntegerForm({'field': ',,,,'})
self.assertTrue(f.is_valid())
self.assertEqual(f.cleaned_data, {'field': ',,,,'})
f = CommaSeparatedIntegerForm({'field': '1.2'})
self.assertEqual(f.errors, {'field': ['Enter only digits separated by commas.']})
f = CommaSeparatedIntegerForm({'field': '1,a,2'})
self.assertEqual(f.errors, {'field': ['Enter only digits separated by commas.']})
f = CommaSeparatedIntegerForm({'field': '1,,2'})
self.assertTrue(f.is_valid())
self.assertEqual(f.cleaned_data, {'field': '1,,2'})
f = CommaSeparatedIntegerForm({'field': '1'})
self.assertTrue(f.is_valid())
self.assertEqual(f.cleaned_data, {'field': '1'})
def test_url_on_modelform(self):
"Check basic URL field validation on model forms"
class HomepageForm(forms.ModelForm):
class Meta:
model = Homepage
fields = '__all__'
self.assertFalse(HomepageForm({'url': 'foo'}).is_valid())
self.assertFalse(HomepageForm({'url': 'http://'}).is_valid())
self.assertFalse(HomepageForm({'url': 'http://example'}).is_valid())
self.assertFalse(HomepageForm({'url': 'http://example.'}).is_valid())
self.assertFalse(HomepageForm({'url': 'http://com.'}).is_valid())
self.assertTrue(HomepageForm({'url': 'http://localhost'}).is_valid())
self.assertTrue(HomepageForm({'url': 'http://example.com'}).is_valid())
self.assertTrue(HomepageForm({'url': 'http://www.example.com'}).is_valid())
self.assertTrue(HomepageForm({'url': 'http://www.example.com:8000'}).is_valid())
self.assertTrue(HomepageForm({'url': 'http://www.example.com/test'}).is_valid())
self.assertTrue(HomepageForm({'url': 'http://www.example.com:8000/test'}).is_valid())
self.assertTrue(HomepageForm({'url': 'http://example.com/foo/bar'}).is_valid())
def test_http_prefixing(self):
"""
If the http:// prefix is omitted on form input, the field adds it again. (Refs #13613)
"""
class HomepageForm(forms.ModelForm):
class Meta:
model = Homepage
fields = '__all__'
form = HomepageForm({'url': 'example.com'})
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data['url'], 'http://example.com')
form = HomepageForm({'url': 'example.com/test'})
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data['url'], 'http://example.com/test')
class OtherModelFormTests(TestCase):
def test_media_on_modelform(self):
# Similar to a regular Form class you can define custom media to be used on
# the ModelForm.
f = ModelFormWithMedia()
self.assertHTMLEqual(six.text_type(f.media), '''<link href="/some/form/css" type="text/css" media="all" rel="stylesheet" />
<script type="text/javascript" src="/some/form/javascript"></script>''')
def test_choices_type(self):
# Choices on CharField and IntegerField
f = ArticleForm()
with self.assertRaises(ValidationError):
f.fields['status'].clean('42')
f = ArticleStatusForm()
with self.assertRaises(ValidationError):
f.fields['status'].clean('z')
def test_foreignkeys_which_use_to_field(self):
apple = Inventory.objects.create(barcode=86, name='Apple')
Inventory.objects.create(barcode=22, name='Pear')
core = Inventory.objects.create(barcode=87, name='Core', parent=apple)
field = forms.ModelChoiceField(Inventory.objects.all(), to_field_name='barcode')
self.assertEqual(tuple(field.choices), (
('', '---------'),
(86, 'Apple'),
(87, 'Core'),
(22, 'Pear')))
form = InventoryForm(instance=core)
self.assertHTMLEqual(six.text_type(form['parent']), '''<select name="parent" id="id_parent">
<option value="">---------</option>
<option value="86" selected="selected">Apple</option>
<option value="87">Core</option>
<option value="22">Pear</option>
</select>''')
data = model_to_dict(core)
data['parent'] = '22'
form = InventoryForm(data=data, instance=core)
core = form.save()
self.assertEqual(core.parent.name, 'Pear')
class CategoryForm(forms.ModelForm):
description = forms.CharField()
class Meta:
model = Category
fields = ['description', 'url']
self.assertEqual(list(CategoryForm.base_fields),
['description', 'url'])
self.assertHTMLEqual(six.text_type(CategoryForm()), '''<tr><th><label for="id_description">Description:</label></th><td><input type="text" name="description" id="id_description" /></td></tr>
<tr><th><label for="id_url">The URL:</label></th><td><input id="id_url" type="text" name="url" maxlength="40" /></td></tr>''')
# to_field_name should also work on ModelMultipleChoiceField ##################
field = forms.ModelMultipleChoiceField(Inventory.objects.all(), to_field_name='barcode')
self.assertEqual(tuple(field.choices), ((86, 'Apple'), (87, 'Core'), (22, 'Pear')))
self.assertQuerysetEqual(field.clean([86]), ['Apple'])
form = SelectInventoryForm({'items': [87, 22]})
self.assertTrue(form.is_valid())
self.assertEqual(len(form.cleaned_data), 1)
self.assertQuerysetEqual(form.cleaned_data['items'], ['Core', 'Pear'])
def test_model_field_that_returns_none_to_exclude_itself_with_explicit_fields(self):
self.assertEqual(list(CustomFieldForExclusionForm.base_fields),
['name'])
self.assertHTMLEqual(six.text_type(CustomFieldForExclusionForm()),
'''<tr><th><label for="id_name">Name:</label></th><td><input id="id_name" type="text" name="name" maxlength="10" /></td></tr>''')
def test_iterable_model_m2m(self):
class ColourfulItemForm(forms.ModelForm):
class Meta:
model = ColourfulItem
fields = '__all__'
colour = Colour.objects.create(name='Blue')
form = ColourfulItemForm()
self.maxDiff = 1024
self.assertHTMLEqual(
form.as_p(),
"""<p><label for="id_name">Name:</label> <input id="id_name" type="text" name="name" maxlength="50" /></p>
<p><label for="id_colours">Colours:</label> <select multiple="multiple" name="colours" id="id_colours">
<option value="%(blue_pk)s">Blue</option>
</select></p>"""
% {'blue_pk': colour.pk})
def test_callable_field_default(self):
class PublicationDefaultsForm(forms.ModelForm):
class Meta:
model = PublicationDefaults
fields = '__all__'
self.maxDiff = 2000
form = PublicationDefaultsForm()
today_str = str(datetime.date.today())
self.assertHTMLEqual(
form.as_p(),
"""<p><label for="id_title">Title:</label> <input id="id_title" maxlength="30" name="title" type="text" /></p>
<p><label for="id_date_published">Date published:</label>
<input id="id_date_published" name="date_published" type="text" value="{0}" />
<input id="initial-id_date_published" name="initial-date_published" type="hidden" value="{0}" /></p>
<p><label for="id_mode">Mode:</label> <select id="id_mode" name="mode">
<option value="di" selected="selected">direct</option>
<option value="de">delayed</option></select>
<input id="initial-id_mode" name="initial-mode" type="hidden" value="di" /></p>
<p><label for="id_category">Category:</label> <select id="id_category" name="category">
<option value="1">Games</option>
<option value="2">Comics</option>
<option value="3" selected="selected">Novel</option></select>
<input id="initial-id_category" name="initial-category" type="hidden" value="3" />
""".format(today_str)
)
empty_data = {
'title': '',
'date_published': today_str,
'initial-date_published': today_str,
'mode': 'di',
'initial-mode': 'di',
'category': '3',
'initial-category': '3',
}
bound_form = PublicationDefaultsForm(empty_data)
self.assertFalse(bound_form.has_changed())
class ModelFormCustomErrorTests(TestCase):
def test_custom_error_messages(self):
data = {'name1': '@#$!!**@#$', 'name2': '@#$!!**@#$'}
errors = CustomErrorMessageForm(data).errors
self.assertHTMLEqual(
str(errors['name1']),
'<ul class="errorlist"><li>Form custom error message.</li></ul>'
)
self.assertHTMLEqual(
str(errors['name2']),
'<ul class="errorlist"><li>Model custom error message.</li></ul>'
)
def test_model_clean_error_messages(self):
data = {'name1': 'FORBIDDEN_VALUE', 'name2': 'ABC'}
form = CustomErrorMessageForm(data)
self.assertFalse(form.is_valid())
self.assertHTMLEqual(
str(form.errors['name1']),
'<ul class="errorlist"><li>Model.clean() error messages.</li></ul>'
)
data = {'name1': 'FORBIDDEN_VALUE2', 'name2': 'ABC'}
form = CustomErrorMessageForm(data)
self.assertFalse(form.is_valid())
self.assertHTMLEqual(
str(form.errors['name1']),
'<ul class="errorlist"><li>Model.clean() error messages (simpler syntax).</li></ul>'
)
data = {'name1': 'GLOBAL_ERROR', 'name2': 'ABC'}
form = CustomErrorMessageForm(data)
self.assertFalse(form.is_valid())
self.assertEqual(form.errors['__all__'], ['Global error message.'])
class CustomCleanTests(TestCase):
def test_override_clean(self):
"""
Regression for #12596: Calling super from ModelForm.clean() should be
optional.
"""
class TripleFormWithCleanOverride(forms.ModelForm):
class Meta:
model = Triple
fields = '__all__'
def clean(self):
if not self.cleaned_data['left'] == self.cleaned_data['right']:
raise forms.ValidationError('Left and right should be equal')
return self.cleaned_data
form = TripleFormWithCleanOverride({'left': 1, 'middle': 2, 'right': 1})
self.assertTrue(form.is_valid())
# form.instance.left will be None if the instance was not constructed
# by form.full_clean().
self.assertEqual(form.instance.left, 1)
def test_model_form_clean_applies_to_model(self):
"""
Regression test for #12960. Make sure the cleaned_data returned from
ModelForm.clean() is applied to the model instance.
"""
class CategoryForm(forms.ModelForm):
class Meta:
model = Category
fields = '__all__'
def clean(self):
self.cleaned_data['name'] = self.cleaned_data['name'].upper()
return self.cleaned_data
data = {'name': 'Test', 'slug': 'test', 'url': '/test'}
form = CategoryForm(data)
category = form.save()
self.assertEqual(category.name, 'TEST')
class ModelFormInheritanceTests(TestCase):
def test_form_subclass_inheritance(self):
class Form(forms.Form):
age = forms.IntegerField()
class ModelForm(forms.ModelForm, Form):
class Meta:
model = Writer
fields = '__all__'
self.assertEqual(list(ModelForm().fields.keys()), ['name', 'age'])
def test_field_removal(self):
class ModelForm(forms.ModelForm):
class Meta:
model = Writer
fields = '__all__'
class Mixin(object):
age = None
class Form(forms.Form):
age = forms.IntegerField()
class Form2(forms.Form):
foo = forms.IntegerField()
self.assertEqual(list(ModelForm().fields.keys()), ['name'])
self.assertEqual(list(type(str('NewForm'), (Mixin, Form), {})().fields.keys()), [])
self.assertEqual(list(type(str('NewForm'), (Form2, Mixin, Form), {})().fields.keys()), ['foo'])
self.assertEqual(list(type(str('NewForm'), (Mixin, ModelForm, Form), {})().fields.keys()), ['name'])
self.assertEqual(list(type(str('NewForm'), (ModelForm, Mixin, Form), {})().fields.keys()), ['name'])
self.assertEqual(list(type(str('NewForm'), (ModelForm, Form, Mixin), {})().fields.keys()), ['name', 'age'])
self.assertEqual(list(type(str('NewForm'), (ModelForm, Form), {'age': None})().fields.keys()), ['name'])
def test_field_removal_name_clashes(self):
"""Regression test for https://code.djangoproject.com/ticket/22510."""
class MyForm(forms.ModelForm):
media = forms.CharField()
class Meta:
model = Writer
fields = '__all__'
class SubForm(MyForm):
media = None
self.assertIn('media', MyForm().fields)
self.assertNotIn('media', SubForm().fields)
self.assertTrue(hasattr(MyForm, 'media'))
self.assertTrue(hasattr(SubForm, 'media'))
class StumpJokeForm(forms.ModelForm):
class Meta:
model = StumpJoke
fields = '__all__'
class CustomFieldWithQuerysetButNoLimitChoicesTo(forms.Field):
queryset = 42
class StumpJokeWithCustomFieldForm(forms.ModelForm):
custom = CustomFieldWithQuerysetButNoLimitChoicesTo()
class Meta:
model = StumpJoke
fields = () # We don't need any fields from the model
class LimitChoicesToTest(TestCase):
"""
Tests the functionality of ``limit_choices_to``.
"""
def setUp(self):
self.threepwood = Character.objects.create(
username='threepwood',
last_action=datetime.datetime.today() + datetime.timedelta(days=1),
)
self.marley = Character.objects.create(
username='marley',
last_action=datetime.datetime.today() - datetime.timedelta(days=1),
)
def test_limit_choices_to_callable_for_fk_rel(self):
"""
A ForeignKey relation can use ``limit_choices_to`` as a callable, re #2554.
"""
stumpjokeform = StumpJokeForm()
self.assertIn(self.threepwood, stumpjokeform.fields['most_recently_fooled'].queryset)
self.assertNotIn(self.marley, stumpjokeform.fields['most_recently_fooled'].queryset)
def test_limit_choices_to_callable_for_m2m_rel(self):
"""
A ManyToMany relation can use ``limit_choices_to`` as a callable, re #2554.
"""
stumpjokeform = StumpJokeForm()
self.assertIn(self.threepwood, stumpjokeform.fields['has_fooled_today'].queryset)
self.assertNotIn(self.marley, stumpjokeform.fields['has_fooled_today'].queryset)
def test_custom_field_with_queryset_but_no_limit_choices_to(self):
"""
Regression test for #23795: Make sure a custom field with a `queryset`
attribute but no `limit_choices_to` still works.
"""
f = StumpJokeWithCustomFieldForm()
self.assertEqual(f.fields['custom'].queryset, 42)
class FormFieldCallbackTests(TestCase):
def test_baseform_with_widgets_in_meta(self):
"""Regression for #13095: Using base forms with widgets defined in Meta should not raise errors."""
widget = forms.Textarea()
class BaseForm(forms.ModelForm):
class Meta:
model = Person
widgets = {'name': widget}
fields = "__all__"
Form = modelform_factory(Person, form=BaseForm)
self.assertIs(Form.base_fields['name'].widget, widget)
def test_factory_with_widget_argument(self):
""" Regression for #15315: modelform_factory should accept widgets
argument
"""
widget = forms.Textarea()
# Without a widget should not set the widget to textarea
Form = modelform_factory(Person, fields="__all__")
self.assertNotEqual(Form.base_fields['name'].widget.__class__, forms.Textarea)
# With a widget should not set the widget to textarea
Form = modelform_factory(Person, fields="__all__", widgets={'name': widget})
self.assertEqual(Form.base_fields['name'].widget.__class__, forms.Textarea)
def test_modelform_factory_without_fields(self):
""" Regression for #19733 """
message = (
"Calling modelform_factory without defining 'fields' or 'exclude' "
"explicitly is prohibited."
)
with self.assertRaisesMessage(ImproperlyConfigured, message):
modelform_factory(Person)
def test_modelform_factory_with_all_fields(self):
""" Regression for #19733 """
form = modelform_factory(Person, fields="__all__")
self.assertEqual(list(form.base_fields), ["name"])
def test_custom_callback(self):
"""Test that a custom formfield_callback is used if provided"""
callback_args = []
def callback(db_field, **kwargs):
callback_args.append((db_field, kwargs))
return db_field.formfield(**kwargs)
widget = forms.Textarea()
class BaseForm(forms.ModelForm):
class Meta:
model = Person
widgets = {'name': widget}
fields = "__all__"
modelform_factory(Person, form=BaseForm, formfield_callback=callback)
id_field, name_field = Person._meta.fields
self.assertEqual(callback_args,
[(id_field, {}), (name_field, {'widget': widget})])
def test_bad_callback(self):
# A bad callback provided by user still gives an error
self.assertRaises(TypeError, modelform_factory, Person, fields="__all__",
formfield_callback='not a function or callable')
class LocalizedModelFormTest(TestCase):
def test_model_form_applies_localize_to_some_fields(self):
class PartiallyLocalizedTripleForm(forms.ModelForm):
class Meta:
model = Triple
localized_fields = ('left', 'right',)
fields = '__all__'
f = PartiallyLocalizedTripleForm({'left': 10, 'middle': 10, 'right': 10})
self.assertTrue(f.is_valid())
self.assertTrue(f.fields['left'].localize)
self.assertFalse(f.fields['middle'].localize)
self.assertTrue(f.fields['right'].localize)
def test_model_form_applies_localize_to_all_fields(self):
class FullyLocalizedTripleForm(forms.ModelForm):
class Meta:
model = Triple
localized_fields = '__all__'
fields = '__all__'
f = FullyLocalizedTripleForm({'left': 10, 'middle': 10, 'right': 10})
self.assertTrue(f.is_valid())
self.assertTrue(f.fields['left'].localize)
self.assertTrue(f.fields['middle'].localize)
self.assertTrue(f.fields['right'].localize)
def test_model_form_refuses_arbitrary_string(self):
with self.assertRaises(TypeError):
class BrokenLocalizedTripleForm(forms.ModelForm):
class Meta:
model = Triple
localized_fields = "foo"
class CustomMetaclass(ModelFormMetaclass):
def __new__(cls, name, bases, attrs):
new = super(CustomMetaclass, cls).__new__(cls, name, bases, attrs)
new.base_fields = {}
return new
class CustomMetaclassForm(six.with_metaclass(CustomMetaclass, forms.ModelForm)):
pass
class CustomMetaclassTestCase(TestCase):
def test_modelform_factory_metaclass(self):
new_cls = modelform_factory(Person, fields="__all__", form=CustomMetaclassForm)
self.assertEqual(new_cls.base_fields, {})
|
vaygr/ansible | refs/heads/devel | lib/ansible/module_utils/network/fortios/fortios.py | 89 | # This code is part of Ansible, but is an independent component.
# This particular file snippet, and this file snippet only, is BSD licensed.
# Modules you write using this snippet, which is embedded dynamically by Ansible
# still belong to the author of the module, and may assign their own license
# to the complete work.
#
# Copyright (c), Benjamin Jolivot <[email protected]>, 2014
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
import os
import time
import traceback
from ansible.module_utils._text import to_native
from ansible.module_utils.basic import env_fallback
# check for pyFG lib
try:
from pyFG import FortiOS, FortiConfig
from pyFG.exceptions import FailedCommit
HAS_PYFG = True
except ImportError:
HAS_PYFG = False
fortios_argument_spec = dict(
file_mode=dict(type='bool', default=False),
config_file=dict(type='path'),
host=dict(),
username=dict(fallback=(env_fallback, ['ANSIBLE_NET_USERNAME'])),
password=dict(fallback=(env_fallback, ['ANSIBLE_NET_PASSWORD']), no_log=True),
timeout=dict(type='int', default=60),
vdom=dict(type='str'),
backup=dict(type='bool', default=False),
backup_path=dict(type='path'),
backup_filename=dict(type='str'),
)
fortios_required_if = [
['file_mode', False, ['host', 'username', 'password']],
['file_mode', True, ['config_file']],
['backup', True, ['backup_path']],
]
fortios_mutually_exclusive = [
['config_file', 'host'],
['config_file', 'username'],
['config_file', 'password']
]
fortios_error_codes = {
'-3': "Object not found",
'-61': "Command error"
}
def backup(module, running_config):
backup_path = module.params['backup_path']
backup_filename = module.params['backup_filename']
if not os.path.exists(backup_path):
try:
os.mkdir(backup_path)
except:
module.fail_json(msg="Can't create directory {0} Permission denied ?".format(backup_path))
tstamp = time.strftime("%Y-%m-%d@%H:%M:%S", time.localtime(time.time()))
if 0 < len(backup_filename):
filename = '%s/%s' % (backup_path, backup_filename)
else:
filename = '%s/%s_config.%s' % (backup_path, module.params['host'], tstamp)
try:
open(filename, 'w').write(running_config)
except:
module.fail_json(msg="Can't create backup file {0} Permission denied ?".format(filename))
class AnsibleFortios(object):
def __init__(self, module):
if not HAS_PYFG:
module.fail_json(msg='Could not import the python library pyFG required by this module')
self.result = {
'changed': False,
}
self.module = module
def _connect(self):
if self.module.params['file_mode']:
self.forti_device = FortiOS('')
else:
host = self.module.params['host']
username = self.module.params['username']
password = self.module.params['password']
timeout = self.module.params['timeout']
vdom = self.module.params['vdom']
self.forti_device = FortiOS(host, username=username, password=password, timeout=timeout, vdom=vdom)
try:
self.forti_device.open()
except Exception as e:
self.module.fail_json(msg='Error connecting device. %s' % to_native(e),
exception=traceback.format_exc())
def load_config(self, path):
self.path = path
self._connect()
# load in file_mode
if self.module.params['file_mode']:
try:
f = open(self.module.params['config_file'], 'r')
running = f.read()
f.close()
except IOError as e:
self.module.fail_json(msg='Error reading configuration file. %s' % to_native(e),
exception=traceback.format_exc())
self.forti_device.load_config(config_text=running, path=path)
else:
# get config
try:
self.forti_device.load_config(path=path)
except Exception as e:
self.forti_device.close()
self.module.fail_json(msg='Error reading running config. %s' % to_native(e),
exception=traceback.format_exc())
# set configs in object
self.result['running_config'] = self.forti_device.running_config.to_text()
self.candidate_config = self.forti_device.candidate_config
# backup if needed
if self.module.params['backup']:
backup(self.module, self.forti_device.running_config.to_text())
def apply_changes(self):
change_string = self.forti_device.compare_config()
if change_string:
self.result['change_string'] = change_string
self.result['changed'] = True
# Commit if not check mode
if change_string and not self.module.check_mode:
if self.module.params['file_mode']:
try:
f = open(self.module.params['config_file'], 'w')
f.write(self.candidate_config.to_text())
f.close()
except IOError as e:
self.module.fail_json(msg='Error writing configuration file. %s' %
to_native(e), exception=traceback.format_exc())
else:
try:
self.forti_device.commit()
except FailedCommit as e:
# Something's wrong (rollback is automatic)
self.forti_device.close()
error_list = self.get_error_infos(e)
self.module.fail_json(msg_error_list=error_list, msg="Unable to commit change, check your args, the error was %s" % e.message)
self.forti_device.close()
self.module.exit_json(**self.result)
def del_block(self, block_id):
self.forti_device.candidate_config[self.path].del_block(block_id)
def add_block(self, block_id, block):
self.forti_device.candidate_config[self.path][block_id] = block
def get_error_infos(self, cli_errors):
error_list = []
for errors in cli_errors.args:
for error in errors:
error_code = error[0]
error_string = error[1]
error_type = fortios_error_codes.get(error_code, "unknown")
error_list.append(dict(error_code=error_code, error_type=error_type, error_string=error_string))
return error_list
def get_empty_configuration_block(self, block_name, block_type):
return FortiConfig(block_name, block_type)
|
cpennington/edx-platform | refs/heads/master | openedx/core/djangoapps/content/block_structure/transformer_registry.py | 4 | """
Block Structure Transformer Registry implemented using the platform's
PluginManager.
"""
from base64 import b64encode
from hashlib import sha1
import six
from openedx.core.lib.cache_utils import process_cached
from openedx.core.lib.plugins import PluginManager
class TransformerRegistry(PluginManager):
"""
Registry for all of the block structure transformers that have been
made available.
All block structure transformers should implement
`BlockStructureTransformer`.
"""
NAMESPACE = 'openedx.block_structure_transformer'
USE_PLUGIN_MANAGER = True
@classmethod
def get_registered_transformers(cls):
"""
Returns a set of all registered transformers.
Returns:
{BlockStructureTransformer} - All transformers that are
registered with the platform's PluginManager.
"""
if cls.USE_PLUGIN_MANAGER:
return set(six.itervalues(cls.get_available_plugins()))
else:
return set()
@classmethod
@process_cached
def get_write_version_hash(cls):
"""
Returns a deterministic hash value of the WRITE_VERSION of all
registered transformers.
"""
hash_obj = sha1()
sorted_transformers = sorted(cls.get_registered_transformers(), key=lambda t: t.name())
for transformer in sorted_transformers:
hash_obj.update(six.b(transformer.name()))
hash_obj.update(six.b(str(transformer.WRITE_VERSION)))
return b64encode(hash_obj.digest()).decode('utf-8')
@classmethod
def find_unregistered(cls, transformers):
"""
Find and returns the names of all the transformers from the
given list that aren't registered with the platform's
PluginManager.
Arguments:
transformers ([BlockStructureTransformer] - List of
transformers to check in the registry.
Returns:
set([string]) - Set of names of a subset of the given
transformers that weren't found in the registry.
"""
registered_transformer_names = set(reg_trans.name() for reg_trans in cls.get_registered_transformers())
requested_transformer_names = set(transformer.name() for transformer in transformers)
return requested_transformer_names - registered_transformer_names
|
Subsets and Splits