repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
---|---|---|---|---|
guozhongxin/shadowsocks
|
refs/heads/master
|
tests/graceful_cli.py
|
977
|
#!/usr/bin/python
import socks
import time
SERVER_IP = '127.0.0.1'
SERVER_PORT = 8001
if __name__ == '__main__':
s = socks.socksocket()
s.set_proxy(socks.SOCKS5, SERVER_IP, 1081)
s.connect((SERVER_IP, SERVER_PORT))
s.send(b'test')
time.sleep(30)
s.close()
|
ferriman/SSandSP
|
refs/heads/master
|
pyxel-test/venv/lib/python3.8/site-packages/pip/_vendor/toml/tz.py
|
18
|
from datetime import tzinfo, timedelta
class TomlTz(tzinfo):
def __init__(self, toml_offset):
if toml_offset == "Z":
self._raw_offset = "+00:00"
else:
self._raw_offset = toml_offset
self._sign = -1 if self._raw_offset[0] == '-' else 1
self._hours = int(self._raw_offset[1:3])
self._minutes = int(self._raw_offset[4:6])
def __deepcopy__(self, memo):
return self.__class__(self._raw_offset)
def tzname(self, dt):
return "UTC" + self._raw_offset
def utcoffset(self, dt):
return self._sign * timedelta(hours=self._hours, minutes=self._minutes)
def dst(self, dt):
return timedelta(0)
|
ant9000/RIOT
|
refs/heads/master
|
tests/pkg_micro-ecc/tests/01-run.py
|
21
|
#!/usr/bin/env python3
import sys
from testrunner import run
# Use a custom global timeout for slow hardware. On ATmegas clocked at 8MHz
# one test round completes in ~36s
TIMEOUT = 100
def testfunc(child):
child.expect_exact('micro-ecc compiled!')
child.expect(r'Testing (\d+) random private key pairs and signature '
'without using HWRNG')
testrounds = int(child.match.group(1))
for i in range(testrounds):
child.expect_exact("Round {}".format(i))
child.expect_exact('Done with 0 error(s)')
child.expect_exact('SUCCESS')
if __name__ == "__main__":
sys.exit(run(testfunc, timeout=TIMEOUT))
|
pizzathief/numpy
|
refs/heads/master
|
numpy/f2py/cfuncs.py
|
3
|
#!/usr/bin/env python
"""
C declarations, CPP macros, and C functions for f2py2e.
Only required declarations/macros/functions will be used.
Copyright 1999,2000 Pearu Peterson all rights reserved,
Pearu Peterson <[email protected]>
Permission to use, modify, and distribute this software is given under the
terms of the NumPy License.
NO WARRANTY IS EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK.
$Date: 2005/05/06 11:42:34 $
Pearu Peterson
"""
from __future__ import division, absolute_import, print_function
import sys
import copy
from . import __version__
f2py_version = __version__.version
errmess = sys.stderr.write
##################### Definitions ##################
outneeds = {'includes0': [], 'includes': [], 'typedefs': [], 'typedefs_generated': [],
'userincludes': [],
'cppmacros': [], 'cfuncs': [], 'callbacks': [], 'f90modhooks': [],
'commonhooks': []}
needs = {}
includes0 = {'includes0': '/*need_includes0*/'}
includes = {'includes': '/*need_includes*/'}
userincludes = {'userincludes': '/*need_userincludes*/'}
typedefs = {'typedefs': '/*need_typedefs*/'}
typedefs_generated = {'typedefs_generated': '/*need_typedefs_generated*/'}
cppmacros = {'cppmacros': '/*need_cppmacros*/'}
cfuncs = {'cfuncs': '/*need_cfuncs*/'}
callbacks = {'callbacks': '/*need_callbacks*/'}
f90modhooks = {'f90modhooks': '/*need_f90modhooks*/',
'initf90modhooksstatic': '/*initf90modhooksstatic*/',
'initf90modhooksdynamic': '/*initf90modhooksdynamic*/',
}
commonhooks = {'commonhooks': '/*need_commonhooks*/',
'initcommonhooks': '/*need_initcommonhooks*/',
}
############ Includes ###################
includes0['math.h'] = '#include <math.h>'
includes0['string.h'] = '#include <string.h>'
includes0['setjmp.h'] = '#include <setjmp.h>'
includes['Python.h'] = '#include "Python.h"'
needs['arrayobject.h'] = ['Python.h']
includes['arrayobject.h'] = '''#define PY_ARRAY_UNIQUE_SYMBOL PyArray_API
#include "arrayobject.h"'''
includes['arrayobject.h'] = '#include "fortranobject.h"'
includes['stdarg.h'] = '#include <stdarg.h>'
############# Type definitions ###############
typedefs['unsigned_char'] = 'typedef unsigned char unsigned_char;'
typedefs['unsigned_short'] = 'typedef unsigned short unsigned_short;'
typedefs['unsigned_long'] = 'typedef unsigned long unsigned_long;'
typedefs['signed_char'] = 'typedef signed char signed_char;'
typedefs['long_long'] = """\
#ifdef _WIN32
typedef __int64 long_long;
#else
typedef long long long_long;
typedef unsigned long long unsigned_long_long;
#endif
"""
typedefs['unsigned_long_long'] = """\
#ifdef _WIN32
typedef __uint64 long_long;
#else
typedef unsigned long long unsigned_long_long;
#endif
"""
typedefs['long_double'] = """\
#ifndef _LONG_DOUBLE
typedef long double long_double;
#endif
"""
typedefs[
'complex_long_double'] = 'typedef struct {long double r,i;} complex_long_double;'
typedefs['complex_float'] = 'typedef struct {float r,i;} complex_float;'
typedefs['complex_double'] = 'typedef struct {double r,i;} complex_double;'
typedefs['string'] = """typedef char * string;"""
############### CPP macros ####################
cppmacros['CFUNCSMESS'] = """\
#ifdef DEBUGCFUNCS
#define CFUNCSMESS(mess) fprintf(stderr,\"debug-capi:\"mess);
#define CFUNCSMESSPY(mess,obj) CFUNCSMESS(mess) \\
PyObject_Print((PyObject *)obj,stderr,Py_PRINT_RAW);\\
fprintf(stderr,\"\\n\");
#else
#define CFUNCSMESS(mess)
#define CFUNCSMESSPY(mess,obj)
#endif
"""
cppmacros['F_FUNC'] = """\
#if defined(PREPEND_FORTRAN)
#if defined(NO_APPEND_FORTRAN)
#if defined(UPPERCASE_FORTRAN)
#define F_FUNC(f,F) _##F
#else
#define F_FUNC(f,F) _##f
#endif
#else
#if defined(UPPERCASE_FORTRAN)
#define F_FUNC(f,F) _##F##_
#else
#define F_FUNC(f,F) _##f##_
#endif
#endif
#else
#if defined(NO_APPEND_FORTRAN)
#if defined(UPPERCASE_FORTRAN)
#define F_FUNC(f,F) F
#else
#define F_FUNC(f,F) f
#endif
#else
#if defined(UPPERCASE_FORTRAN)
#define F_FUNC(f,F) F##_
#else
#define F_FUNC(f,F) f##_
#endif
#endif
#endif
#if defined(UNDERSCORE_G77)
#define F_FUNC_US(f,F) F_FUNC(f##_,F##_)
#else
#define F_FUNC_US(f,F) F_FUNC(f,F)
#endif
"""
cppmacros['F_WRAPPEDFUNC'] = """\
#if defined(PREPEND_FORTRAN)
#if defined(NO_APPEND_FORTRAN)
#if defined(UPPERCASE_FORTRAN)
#define F_WRAPPEDFUNC(f,F) _F2PYWRAP##F
#else
#define F_WRAPPEDFUNC(f,F) _f2pywrap##f
#endif
#else
#if defined(UPPERCASE_FORTRAN)
#define F_WRAPPEDFUNC(f,F) _F2PYWRAP##F##_
#else
#define F_WRAPPEDFUNC(f,F) _f2pywrap##f##_
#endif
#endif
#else
#if defined(NO_APPEND_FORTRAN)
#if defined(UPPERCASE_FORTRAN)
#define F_WRAPPEDFUNC(f,F) F2PYWRAP##F
#else
#define F_WRAPPEDFUNC(f,F) f2pywrap##f
#endif
#else
#if defined(UPPERCASE_FORTRAN)
#define F_WRAPPEDFUNC(f,F) F2PYWRAP##F##_
#else
#define F_WRAPPEDFUNC(f,F) f2pywrap##f##_
#endif
#endif
#endif
#if defined(UNDERSCORE_G77)
#define F_WRAPPEDFUNC_US(f,F) F_WRAPPEDFUNC(f##_,F##_)
#else
#define F_WRAPPEDFUNC_US(f,F) F_WRAPPEDFUNC(f,F)
#endif
"""
cppmacros['F_MODFUNC'] = """\
#if defined(F90MOD2CCONV1) /*E.g. Compaq Fortran */
#if defined(NO_APPEND_FORTRAN)
#define F_MODFUNCNAME(m,f) $ ## m ## $ ## f
#else
#define F_MODFUNCNAME(m,f) $ ## m ## $ ## f ## _
#endif
#endif
#if defined(F90MOD2CCONV2) /*E.g. IBM XL Fortran, not tested though */
#if defined(NO_APPEND_FORTRAN)
#define F_MODFUNCNAME(m,f) __ ## m ## _MOD_ ## f
#else
#define F_MODFUNCNAME(m,f) __ ## m ## _MOD_ ## f ## _
#endif
#endif
#if defined(F90MOD2CCONV3) /*E.g. MIPSPro Compilers */
#if defined(NO_APPEND_FORTRAN)
#define F_MODFUNCNAME(m,f) f ## .in. ## m
#else
#define F_MODFUNCNAME(m,f) f ## .in. ## m ## _
#endif
#endif
/*
#if defined(UPPERCASE_FORTRAN)
#define F_MODFUNC(m,M,f,F) F_MODFUNCNAME(M,F)
#else
#define F_MODFUNC(m,M,f,F) F_MODFUNCNAME(m,f)
#endif
*/
#define F_MODFUNC(m,f) (*(f2pymodstruct##m##.##f))
"""
cppmacros['SWAPUNSAFE'] = """\
#define SWAP(a,b) (size_t)(a) = ((size_t)(a) ^ (size_t)(b));\\
(size_t)(b) = ((size_t)(a) ^ (size_t)(b));\\
(size_t)(a) = ((size_t)(a) ^ (size_t)(b))
"""
cppmacros['SWAP'] = """\
#define SWAP(a,b,t) {\\
t *c;\\
c = a;\\
a = b;\\
b = c;}
"""
# cppmacros['ISCONTIGUOUS']='#define ISCONTIGUOUS(m) (PyArray_FLAGS(m) &
# NPY_ARRAY_C_CONTIGUOUS)'
cppmacros['PRINTPYOBJERR'] = """\
#define PRINTPYOBJERR(obj)\\
fprintf(stderr,\"#modulename#.error is related to \");\\
PyObject_Print((PyObject *)obj,stderr,Py_PRINT_RAW);\\
fprintf(stderr,\"\\n\");
"""
cppmacros['MINMAX'] = """\
#ifndef max
#define max(a,b) ((a > b) ? (a) : (b))
#endif
#ifndef min
#define min(a,b) ((a < b) ? (a) : (b))
#endif
#ifndef MAX
#define MAX(a,b) ((a > b) ? (a) : (b))
#endif
#ifndef MIN
#define MIN(a,b) ((a < b) ? (a) : (b))
#endif
"""
needs['len..'] = ['f2py_size']
cppmacros['len..'] = """\
#define rank(var) var ## _Rank
#define shape(var,dim) var ## _Dims[dim]
#define old_rank(var) (PyArray_NDIM((PyArrayObject *)(capi_ ## var ## _tmp)))
#define old_shape(var,dim) PyArray_DIM(((PyArrayObject *)(capi_ ## var ## _tmp)),dim)
#define fshape(var,dim) shape(var,rank(var)-dim-1)
#define len(var) shape(var,0)
#define flen(var) fshape(var,0)
#define old_size(var) PyArray_SIZE((PyArrayObject *)(capi_ ## var ## _tmp))
/* #define index(i) capi_i ## i */
#define slen(var) capi_ ## var ## _len
#define size(var, ...) f2py_size((PyArrayObject *)(capi_ ## var ## _tmp), ## __VA_ARGS__, -1)
"""
needs['f2py_size'] = ['stdarg.h']
cfuncs['f2py_size'] = """\
static int f2py_size(PyArrayObject* var, ...)
{
npy_int sz = 0;
npy_int dim;
npy_int rank;
va_list argp;
va_start(argp, var);
dim = va_arg(argp, npy_int);
if (dim==-1)
{
sz = PyArray_SIZE(var);
}
else
{
rank = PyArray_NDIM(var);
if (dim>=1 && dim<=rank)
sz = PyArray_DIM(var, dim-1);
else
fprintf(stderr, \"f2py_size: 2nd argument value=%d fails to satisfy 1<=value<=%d. Result will be 0.\\n\", dim, rank);
}
va_end(argp);
return sz;
}
"""
cppmacros[
'pyobj_from_char1'] = '#define pyobj_from_char1(v) (PyInt_FromLong(v))'
cppmacros[
'pyobj_from_short1'] = '#define pyobj_from_short1(v) (PyInt_FromLong(v))'
needs['pyobj_from_int1'] = ['signed_char']
cppmacros['pyobj_from_int1'] = '#define pyobj_from_int1(v) (PyInt_FromLong(v))'
cppmacros[
'pyobj_from_long1'] = '#define pyobj_from_long1(v) (PyLong_FromLong(v))'
needs['pyobj_from_long_long1'] = ['long_long']
cppmacros['pyobj_from_long_long1'] = """\
#ifdef HAVE_LONG_LONG
#define pyobj_from_long_long1(v) (PyLong_FromLongLong(v))
#else
#warning HAVE_LONG_LONG is not available. Redefining pyobj_from_long_long.
#define pyobj_from_long_long1(v) (PyLong_FromLong(v))
#endif
"""
needs['pyobj_from_long_double1'] = ['long_double']
cppmacros[
'pyobj_from_long_double1'] = '#define pyobj_from_long_double1(v) (PyFloat_FromDouble(v))'
cppmacros[
'pyobj_from_double1'] = '#define pyobj_from_double1(v) (PyFloat_FromDouble(v))'
cppmacros[
'pyobj_from_float1'] = '#define pyobj_from_float1(v) (PyFloat_FromDouble(v))'
needs['pyobj_from_complex_long_double1'] = ['complex_long_double']
cppmacros[
'pyobj_from_complex_long_double1'] = '#define pyobj_from_complex_long_double1(v) (PyComplex_FromDoubles(v.r,v.i))'
needs['pyobj_from_complex_double1'] = ['complex_double']
cppmacros[
'pyobj_from_complex_double1'] = '#define pyobj_from_complex_double1(v) (PyComplex_FromDoubles(v.r,v.i))'
needs['pyobj_from_complex_float1'] = ['complex_float']
cppmacros[
'pyobj_from_complex_float1'] = '#define pyobj_from_complex_float1(v) (PyComplex_FromDoubles(v.r,v.i))'
needs['pyobj_from_string1'] = ['string']
cppmacros[
'pyobj_from_string1'] = '#define pyobj_from_string1(v) (PyString_FromString((char *)v))'
needs['pyobj_from_string1size'] = ['string']
cppmacros[
'pyobj_from_string1size'] = '#define pyobj_from_string1size(v,len) (PyUString_FromStringAndSize((char *)v, len))'
needs['TRYPYARRAYTEMPLATE'] = ['PRINTPYOBJERR']
cppmacros['TRYPYARRAYTEMPLATE'] = """\
/* New SciPy */
#define TRYPYARRAYTEMPLATECHAR case NPY_STRING: *(char *)(PyArray_DATA(arr))=*v; break;
#define TRYPYARRAYTEMPLATELONG case NPY_LONG: *(long *)(PyArray_DATA(arr))=*v; break;
#define TRYPYARRAYTEMPLATEOBJECT case NPY_OBJECT: PyArray_SETITEM(arr,PyArray_DATA(arr),pyobj_from_ ## ctype ## 1(*v)); break;
#define TRYPYARRAYTEMPLATE(ctype,typecode) \\
PyArrayObject *arr = NULL;\\
if (!obj) return -2;\\
if (!PyArray_Check(obj)) return -1;\\
if (!(arr=(PyArrayObject *)obj)) {fprintf(stderr,\"TRYPYARRAYTEMPLATE:\");PRINTPYOBJERR(obj);return 0;}\\
if (PyArray_DESCR(arr)->type==typecode) {*(ctype *)(PyArray_DATA(arr))=*v; return 1;}\\
switch (PyArray_TYPE(arr)) {\\
case NPY_DOUBLE: *(double *)(PyArray_DATA(arr))=*v; break;\\
case NPY_INT: *(int *)(PyArray_DATA(arr))=*v; break;\\
case NPY_LONG: *(long *)(PyArray_DATA(arr))=*v; break;\\
case NPY_FLOAT: *(float *)(PyArray_DATA(arr))=*v; break;\\
case NPY_CDOUBLE: *(double *)(PyArray_DATA(arr))=*v; break;\\
case NPY_CFLOAT: *(float *)(PyArray_DATA(arr))=*v; break;\\
case NPY_BOOL: *(npy_bool *)(PyArray_DATA(arr))=(*v!=0); break;\\
case NPY_UBYTE: *(unsigned char *)(PyArray_DATA(arr))=*v; break;\\
case NPY_BYTE: *(signed char *)(PyArray_DATA(arr))=*v; break;\\
case NPY_SHORT: *(short *)(PyArray_DATA(arr))=*v; break;\\
case NPY_USHORT: *(npy_ushort *)(PyArray_DATA(arr))=*v; break;\\
case NPY_UINT: *(npy_uint *)(PyArray_DATA(arr))=*v; break;\\
case NPY_ULONG: *(npy_ulong *)(PyArray_DATA(arr))=*v; break;\\
case NPY_LONGLONG: *(npy_longlong *)(PyArray_DATA(arr))=*v; break;\\
case NPY_ULONGLONG: *(npy_ulonglong *)(PyArray_DATA(arr))=*v; break;\\
case NPY_LONGDOUBLE: *(npy_longdouble *)(PyArray_DATA(arr))=*v; break;\\
case NPY_CLONGDOUBLE: *(npy_longdouble *)(PyArray_DATA(arr))=*v; break;\\
case NPY_OBJECT: PyArray_SETITEM(arr, PyArray_DATA(arr), pyobj_from_ ## ctype ## 1(*v)); break;\\
default: return -2;\\
};\\
return 1
"""
needs['TRYCOMPLEXPYARRAYTEMPLATE'] = ['PRINTPYOBJERR']
cppmacros['TRYCOMPLEXPYARRAYTEMPLATE'] = """\
#define TRYCOMPLEXPYARRAYTEMPLATEOBJECT case NPY_OBJECT: PyArray_SETITEM(arr, PyArray_DATA(arr), pyobj_from_complex_ ## ctype ## 1((*v))); break;
#define TRYCOMPLEXPYARRAYTEMPLATE(ctype,typecode)\\
PyArrayObject *arr = NULL;\\
if (!obj) return -2;\\
if (!PyArray_Check(obj)) return -1;\\
if (!(arr=(PyArrayObject *)obj)) {fprintf(stderr,\"TRYCOMPLEXPYARRAYTEMPLATE:\");PRINTPYOBJERR(obj);return 0;}\\
if (PyArray_DESCR(arr)->type==typecode) {\\
*(ctype *)(PyArray_DATA(arr))=(*v).r;\\
*(ctype *)(PyArray_DATA(arr)+sizeof(ctype))=(*v).i;\\
return 1;\\
}\\
switch (PyArray_TYPE(arr)) {\\
case NPY_CDOUBLE: *(double *)(PyArray_DATA(arr))=(*v).r;*(double *)(PyArray_DATA(arr)+sizeof(double))=(*v).i;break;\\
case NPY_CFLOAT: *(float *)(PyArray_DATA(arr))=(*v).r;*(float *)(PyArray_DATA(arr)+sizeof(float))=(*v).i;break;\\
case NPY_DOUBLE: *(double *)(PyArray_DATA(arr))=(*v).r; break;\\
case NPY_LONG: *(long *)(PyArray_DATA(arr))=(*v).r; break;\\
case NPY_FLOAT: *(float *)(PyArray_DATA(arr))=(*v).r; break;\\
case NPY_INT: *(int *)(PyArray_DATA(arr))=(*v).r; break;\\
case NPY_SHORT: *(short *)(PyArray_DATA(arr))=(*v).r; break;\\
case NPY_UBYTE: *(unsigned char *)(PyArray_DATA(arr))=(*v).r; break;\\
case NPY_BYTE: *(signed char *)(PyArray_DATA(arr))=(*v).r; break;\\
case NPY_BOOL: *(npy_bool *)(PyArray_DATA(arr))=((*v).r!=0 && (*v).i!=0); break;\\
case NPY_USHORT: *(npy_ushort *)(PyArray_DATA(arr))=(*v).r; break;\\
case NPY_UINT: *(npy_uint *)(PyArray_DATA(arr))=(*v).r; break;\\
case NPY_ULONG: *(npy_ulong *)(PyArray_DATA(arr))=(*v).r; break;\\
case NPY_LONGLONG: *(npy_longlong *)(PyArray_DATA(arr))=(*v).r; break;\\
case NPY_ULONGLONG: *(npy_ulonglong *)(PyArray_DATA(arr))=(*v).r; break;\\
case NPY_LONGDOUBLE: *(npy_longdouble *)(PyArray_DATA(arr))=(*v).r; break;\\
case NPY_CLONGDOUBLE: *(npy_longdouble *)(PyArray_DATA(arr))=(*v).r;*(npy_longdouble *)(PyArray_DATA(arr)+sizeof(npy_longdouble))=(*v).i;break;\\
case NPY_OBJECT: PyArray_SETITEM(arr, PyArray_DATA(arr), pyobj_from_complex_ ## ctype ## 1((*v))); break;\\
default: return -2;\\
};\\
return -1;
"""
# cppmacros['NUMFROMARROBJ']="""\
# define NUMFROMARROBJ(typenum,ctype) \\
# if (PyArray_Check(obj)) arr = (PyArrayObject *)obj;\\
# else arr = (PyArrayObject *)PyArray_ContiguousFromObject(obj,typenum,0,0);\\
# if (arr) {\\
# if (PyArray_TYPE(arr)==NPY_OBJECT) {\\
# if (!ctype ## _from_pyobj(v,(PyArray_DESCR(arr)->getitem)(PyArray_DATA(arr)),\"\"))\\
# goto capi_fail;\\
# } else {\\
# (PyArray_DESCR(arr)->cast[typenum])(PyArray_DATA(arr),1,(char*)v,1,1);\\
# }\\
# if ((PyObject *)arr != obj) { Py_DECREF(arr); }\\
# return 1;\\
# }
# """
# XXX: Note that CNUMFROMARROBJ is identical with NUMFROMARROBJ
# cppmacros['CNUMFROMARROBJ']="""\
# define CNUMFROMARROBJ(typenum,ctype) \\
# if (PyArray_Check(obj)) arr = (PyArrayObject *)obj;\\
# else arr = (PyArrayObject *)PyArray_ContiguousFromObject(obj,typenum,0,0);\\
# if (arr) {\\
# if (PyArray_TYPE(arr)==NPY_OBJECT) {\\
# if (!ctype ## _from_pyobj(v,(PyArray_DESCR(arr)->getitem)(PyArray_DATA(arr)),\"\"))\\
# goto capi_fail;\\
# } else {\\
# (PyArray_DESCR(arr)->cast[typenum])((void *)(PyArray_DATA(arr)),1,(void *)(v),1,1);\\
# }\\
# if ((PyObject *)arr != obj) { Py_DECREF(arr); }\\
# return 1;\\
# }
# """
needs['GETSTRFROMPYTUPLE'] = ['STRINGCOPYN', 'PRINTPYOBJERR']
cppmacros['GETSTRFROMPYTUPLE'] = """\
#define GETSTRFROMPYTUPLE(tuple,index,str,len) {\\
PyObject *rv_cb_str = PyTuple_GetItem((tuple),(index));\\
if (rv_cb_str == NULL)\\
goto capi_fail;\\
if (PyString_Check(rv_cb_str)) {\\
str[len-1]='\\0';\\
STRINGCOPYN((str),PyString_AS_STRING((PyStringObject*)rv_cb_str),(len));\\
} else {\\
PRINTPYOBJERR(rv_cb_str);\\
PyErr_SetString(#modulename#_error,\"string object expected\");\\
goto capi_fail;\\
}\\
}
"""
cppmacros['GETSCALARFROMPYTUPLE'] = """\
#define GETSCALARFROMPYTUPLE(tuple,index,var,ctype,mess) {\\
if ((capi_tmp = PyTuple_GetItem((tuple),(index)))==NULL) goto capi_fail;\\
if (!(ctype ## _from_pyobj((var),capi_tmp,mess)))\\
goto capi_fail;\\
}
"""
cppmacros['FAILNULL'] = """\\
#define FAILNULL(p) do { \\
if ((p) == NULL) { \\
PyErr_SetString(PyExc_MemoryError, "NULL pointer found"); \\
goto capi_fail; \\
} \\
} while (0)
"""
needs['MEMCOPY'] = ['string.h', 'FAILNULL']
cppmacros['MEMCOPY'] = """\
#define MEMCOPY(to,from,n)\\
do { FAILNULL(to); FAILNULL(from); (void)memcpy(to,from,n); } while (0)
"""
cppmacros['STRINGMALLOC'] = """\
#define STRINGMALLOC(str,len)\\
if ((str = (string)malloc(sizeof(char)*(len+1))) == NULL) {\\
PyErr_SetString(PyExc_MemoryError, \"out of memory\");\\
goto capi_fail;\\
} else {\\
(str)[len] = '\\0';\\
}
"""
cppmacros['STRINGFREE'] = """\
#define STRINGFREE(str) do {if (!(str == NULL)) free(str);} while (0)
"""
needs['STRINGCOPYN'] = ['string.h', 'FAILNULL']
cppmacros['STRINGCOPYN'] = """\
#define STRINGCOPYN(to,from,buf_size) \\
do { \\
int _m = (buf_size); \\
char *_to = (to); \\
char *_from = (from); \\
FAILNULL(_to); FAILNULL(_from); \\
(void)strncpy(_to, _from, sizeof(char)*_m); \\
_to[_m-1] = '\\0'; \\
/* Padding with spaces instead of nulls */ \\
for (_m -= 2; _m >= 0 && _to[_m] == '\\0'; _m--) { \\
_to[_m] = ' '; \\
} \\
} while (0)
"""
needs['STRINGCOPY'] = ['string.h', 'FAILNULL']
cppmacros['STRINGCOPY'] = """\
#define STRINGCOPY(to,from)\\
do { FAILNULL(to); FAILNULL(from); (void)strcpy(to,from); } while (0)
"""
cppmacros['CHECKGENERIC'] = """\
#define CHECKGENERIC(check,tcheck,name) \\
if (!(check)) {\\
PyErr_SetString(#modulename#_error,\"(\"tcheck\") failed for \"name);\\
/*goto capi_fail;*/\\
} else """
cppmacros['CHECKARRAY'] = """\
#define CHECKARRAY(check,tcheck,name) \\
if (!(check)) {\\
PyErr_SetString(#modulename#_error,\"(\"tcheck\") failed for \"name);\\
/*goto capi_fail;*/\\
} else """
cppmacros['CHECKSTRING'] = """\
#define CHECKSTRING(check,tcheck,name,show,var)\\
if (!(check)) {\\
char errstring[256];\\
sprintf(errstring, \"%s: \"show, \"(\"tcheck\") failed for \"name, slen(var), var);\\
PyErr_SetString(#modulename#_error, errstring);\\
/*goto capi_fail;*/\\
} else """
cppmacros['CHECKSCALAR'] = """\
#define CHECKSCALAR(check,tcheck,name,show,var)\\
if (!(check)) {\\
char errstring[256];\\
sprintf(errstring, \"%s: \"show, \"(\"tcheck\") failed for \"name, var);\\
PyErr_SetString(#modulename#_error,errstring);\\
/*goto capi_fail;*/\\
} else """
# cppmacros['CHECKDIMS']="""\
# define CHECKDIMS(dims,rank) \\
# for (int i=0;i<(rank);i++)\\
# if (dims[i]<0) {\\
# fprintf(stderr,\"Unspecified array argument requires a complete dimension specification.\\n\");\\
# goto capi_fail;\\
# }
# """
cppmacros[
'ARRSIZE'] = '#define ARRSIZE(dims,rank) (_PyArray_multiply_list(dims,rank))'
cppmacros['OLDPYNUM'] = """\
#ifdef OLDPYNUM
#error You need to install NumPy version 13 or higher. See https://scipy.org/install.html
#endif
"""
################# C functions ###############
cfuncs['calcarrindex'] = """\
static int calcarrindex(int *i,PyArrayObject *arr) {
int k,ii = i[0];
for (k=1; k < PyArray_NDIM(arr); k++)
ii += (ii*(PyArray_DIM(arr,k) - 1)+i[k]); /* assuming contiguous arr */
return ii;
}"""
cfuncs['calcarrindextr'] = """\
static int calcarrindextr(int *i,PyArrayObject *arr) {
int k,ii = i[PyArray_NDIM(arr)-1];
for (k=1; k < PyArray_NDIM(arr); k++)
ii += (ii*(PyArray_DIM(arr,PyArray_NDIM(arr)-k-1) - 1)+i[PyArray_NDIM(arr)-k-1]); /* assuming contiguous arr */
return ii;
}"""
cfuncs['forcomb'] = """\
static struct { int nd;npy_intp *d;int *i,*i_tr,tr; } forcombcache;
static int initforcomb(npy_intp *dims,int nd,int tr) {
int k;
if (dims==NULL) return 0;
if (nd<0) return 0;
forcombcache.nd = nd;
forcombcache.d = dims;
forcombcache.tr = tr;
if ((forcombcache.i = (int *)malloc(sizeof(int)*nd))==NULL) return 0;
if ((forcombcache.i_tr = (int *)malloc(sizeof(int)*nd))==NULL) return 0;
for (k=1;k<nd;k++) {
forcombcache.i[k] = forcombcache.i_tr[nd-k-1] = 0;
}
forcombcache.i[0] = forcombcache.i_tr[nd-1] = -1;
return 1;
}
static int *nextforcomb(void) {
int j,*i,*i_tr,k;
int nd=forcombcache.nd;
if ((i=forcombcache.i) == NULL) return NULL;
if ((i_tr=forcombcache.i_tr) == NULL) return NULL;
if (forcombcache.d == NULL) return NULL;
i[0]++;
if (i[0]==forcombcache.d[0]) {
j=1;
while ((j<nd) && (i[j]==forcombcache.d[j]-1)) j++;
if (j==nd) {
free(i);
free(i_tr);
return NULL;
}
for (k=0;k<j;k++) i[k] = i_tr[nd-k-1] = 0;
i[j]++;
i_tr[nd-j-1]++;
} else
i_tr[nd-1]++;
if (forcombcache.tr) return i_tr;
return i;
}"""
needs['try_pyarr_from_string'] = ['STRINGCOPYN', 'PRINTPYOBJERR', 'string']
cfuncs['try_pyarr_from_string'] = """\
static int try_pyarr_from_string(PyObject *obj,const string str) {
PyArrayObject *arr = NULL;
if (PyArray_Check(obj) && (!((arr = (PyArrayObject *)obj) == NULL)))
{ STRINGCOPYN(PyArray_DATA(arr),str,PyArray_NBYTES(arr)); }
return 1;
capi_fail:
PRINTPYOBJERR(obj);
PyErr_SetString(#modulename#_error,\"try_pyarr_from_string failed\");
return 0;
}
"""
needs['string_from_pyobj'] = ['string', 'STRINGMALLOC', 'STRINGCOPYN']
cfuncs['string_from_pyobj'] = """\
static int string_from_pyobj(string *str,int *len,const string inistr,PyObject *obj,const char *errmess) {
PyArrayObject *arr = NULL;
PyObject *tmp = NULL;
#ifdef DEBUGCFUNCS
fprintf(stderr,\"string_from_pyobj(str='%s',len=%d,inistr='%s',obj=%p)\\n\",(char*)str,*len,(char *)inistr,obj);
#endif
if (obj == Py_None) {
if (*len == -1)
*len = strlen(inistr); /* Will this cause problems? */
STRINGMALLOC(*str,*len);
STRINGCOPYN(*str,inistr,*len+1);
return 1;
}
if (PyArray_Check(obj)) {
if ((arr = (PyArrayObject *)obj) == NULL)
goto capi_fail;
if (!ISCONTIGUOUS(arr)) {
PyErr_SetString(PyExc_ValueError,\"array object is non-contiguous.\");
goto capi_fail;
}
if (*len == -1)
*len = (PyArray_ITEMSIZE(arr))*PyArray_SIZE(arr);
STRINGMALLOC(*str,*len);
STRINGCOPYN(*str,PyArray_DATA(arr),*len+1);
return 1;
}
if (PyString_Check(obj)) {
tmp = obj;
Py_INCREF(tmp);
}
#if PY_VERSION_HEX >= 0x03000000
else if (PyUnicode_Check(obj)) {
tmp = PyUnicode_AsASCIIString(obj);
}
else {
PyObject *tmp2;
tmp2 = PyObject_Str(obj);
if (tmp2) {
tmp = PyUnicode_AsASCIIString(tmp2);
Py_DECREF(tmp2);
}
else {
tmp = NULL;
}
}
#else
else {
tmp = PyObject_Str(obj);
}
#endif
if (tmp == NULL) goto capi_fail;
if (*len == -1)
*len = PyString_GET_SIZE(tmp);
STRINGMALLOC(*str,*len);
STRINGCOPYN(*str,PyString_AS_STRING(tmp),*len+1);
Py_DECREF(tmp);
return 1;
capi_fail:
Py_XDECREF(tmp);
{
PyObject* err = PyErr_Occurred();
if (err==NULL) err = #modulename#_error;
PyErr_SetString(err,errmess);
}
return 0;
}
"""
needs['char_from_pyobj'] = ['int_from_pyobj']
cfuncs['char_from_pyobj'] = """\
static int char_from_pyobj(char* v,PyObject *obj,const char *errmess) {
int i=0;
if (int_from_pyobj(&i,obj,errmess)) {
*v = (char)i;
return 1;
}
return 0;
}
"""
needs['signed_char_from_pyobj'] = ['int_from_pyobj', 'signed_char']
cfuncs['signed_char_from_pyobj'] = """\
static int signed_char_from_pyobj(signed_char* v,PyObject *obj,const char *errmess) {
int i=0;
if (int_from_pyobj(&i,obj,errmess)) {
*v = (signed_char)i;
return 1;
}
return 0;
}
"""
needs['short_from_pyobj'] = ['int_from_pyobj']
cfuncs['short_from_pyobj'] = """\
static int short_from_pyobj(short* v,PyObject *obj,const char *errmess) {
int i=0;
if (int_from_pyobj(&i,obj,errmess)) {
*v = (short)i;
return 1;
}
return 0;
}
"""
cfuncs['int_from_pyobj'] = """\
static int int_from_pyobj(int* v,PyObject *obj,const char *errmess) {
PyObject* tmp = NULL;
if (PyInt_Check(obj)) {
*v = (int)PyInt_AS_LONG(obj);
return 1;
}
tmp = PyNumber_Int(obj);
if (tmp) {
*v = PyInt_AS_LONG(tmp);
Py_DECREF(tmp);
return 1;
}
if (PyComplex_Check(obj))
tmp = PyObject_GetAttrString(obj,\"real\");
else if (PyString_Check(obj) || PyUnicode_Check(obj))
/*pass*/;
else if (PySequence_Check(obj))
tmp = PySequence_GetItem(obj,0);
if (tmp) {
PyErr_Clear();
if (int_from_pyobj(v,tmp,errmess)) {Py_DECREF(tmp); return 1;}
Py_DECREF(tmp);
}
{
PyObject* err = PyErr_Occurred();
if (err==NULL) err = #modulename#_error;
PyErr_SetString(err,errmess);
}
return 0;
}
"""
cfuncs['long_from_pyobj'] = """\
static int long_from_pyobj(long* v,PyObject *obj,const char *errmess) {
PyObject* tmp = NULL;
if (PyInt_Check(obj)) {
*v = PyInt_AS_LONG(obj);
return 1;
}
tmp = PyNumber_Int(obj);
if (tmp) {
*v = PyInt_AS_LONG(tmp);
Py_DECREF(tmp);
return 1;
}
if (PyComplex_Check(obj))
tmp = PyObject_GetAttrString(obj,\"real\");
else if (PyString_Check(obj) || PyUnicode_Check(obj))
/*pass*/;
else if (PySequence_Check(obj))
tmp = PySequence_GetItem(obj,0);
if (tmp) {
PyErr_Clear();
if (long_from_pyobj(v,tmp,errmess)) {Py_DECREF(tmp); return 1;}
Py_DECREF(tmp);
}
{
PyObject* err = PyErr_Occurred();
if (err==NULL) err = #modulename#_error;
PyErr_SetString(err,errmess);
}
return 0;
}
"""
needs['long_long_from_pyobj'] = ['long_long']
cfuncs['long_long_from_pyobj'] = """\
static int long_long_from_pyobj(long_long* v,PyObject *obj,const char *errmess) {
PyObject* tmp = NULL;
if (PyLong_Check(obj)) {
*v = PyLong_AsLongLong(obj);
return (!PyErr_Occurred());
}
if (PyInt_Check(obj)) {
*v = (long_long)PyInt_AS_LONG(obj);
return 1;
}
tmp = PyNumber_Long(obj);
if (tmp) {
*v = PyLong_AsLongLong(tmp);
Py_DECREF(tmp);
return (!PyErr_Occurred());
}
if (PyComplex_Check(obj))
tmp = PyObject_GetAttrString(obj,\"real\");
else if (PyString_Check(obj) || PyUnicode_Check(obj))
/*pass*/;
else if (PySequence_Check(obj))
tmp = PySequence_GetItem(obj,0);
if (tmp) {
PyErr_Clear();
if (long_long_from_pyobj(v,tmp,errmess)) {Py_DECREF(tmp); return 1;}
Py_DECREF(tmp);
}
{
PyObject* err = PyErr_Occurred();
if (err==NULL) err = #modulename#_error;
PyErr_SetString(err,errmess);
}
return 0;
}
"""
needs['long_double_from_pyobj'] = ['double_from_pyobj', 'long_double']
cfuncs['long_double_from_pyobj'] = """\
static int long_double_from_pyobj(long_double* v,PyObject *obj,const char *errmess) {
double d=0;
if (PyArray_CheckScalar(obj)){
if PyArray_IsScalar(obj, LongDouble) {
PyArray_ScalarAsCtype(obj, v);
return 1;
}
else if (PyArray_Check(obj) && PyArray_TYPE(obj)==NPY_LONGDOUBLE) {
(*v) = *((npy_longdouble *)PyArray_DATA(obj));
return 1;
}
}
if (double_from_pyobj(&d,obj,errmess)) {
*v = (long_double)d;
return 1;
}
return 0;
}
"""
cfuncs['double_from_pyobj'] = """\
static int double_from_pyobj(double* v,PyObject *obj,const char *errmess) {
PyObject* tmp = NULL;
if (PyFloat_Check(obj)) {
#ifdef __sgi
*v = PyFloat_AsDouble(obj);
#else
*v = PyFloat_AS_DOUBLE(obj);
#endif
return 1;
}
tmp = PyNumber_Float(obj);
if (tmp) {
#ifdef __sgi
*v = PyFloat_AsDouble(tmp);
#else
*v = PyFloat_AS_DOUBLE(tmp);
#endif
Py_DECREF(tmp);
return 1;
}
if (PyComplex_Check(obj))
tmp = PyObject_GetAttrString(obj,\"real\");
else if (PyString_Check(obj) || PyUnicode_Check(obj))
/*pass*/;
else if (PySequence_Check(obj))
tmp = PySequence_GetItem(obj,0);
if (tmp) {
PyErr_Clear();
if (double_from_pyobj(v,tmp,errmess)) {Py_DECREF(tmp); return 1;}
Py_DECREF(tmp);
}
{
PyObject* err = PyErr_Occurred();
if (err==NULL) err = #modulename#_error;
PyErr_SetString(err,errmess);
}
return 0;
}
"""
needs['float_from_pyobj'] = ['double_from_pyobj']
cfuncs['float_from_pyobj'] = """\
static int float_from_pyobj(float* v,PyObject *obj,const char *errmess) {
double d=0.0;
if (double_from_pyobj(&d,obj,errmess)) {
*v = (float)d;
return 1;
}
return 0;
}
"""
needs['complex_long_double_from_pyobj'] = ['complex_long_double', 'long_double',
'complex_double_from_pyobj']
cfuncs['complex_long_double_from_pyobj'] = """\
static int complex_long_double_from_pyobj(complex_long_double* v,PyObject *obj,const char *errmess) {
complex_double cd={0.0,0.0};
if (PyArray_CheckScalar(obj)){
if PyArray_IsScalar(obj, CLongDouble) {
PyArray_ScalarAsCtype(obj, v);
return 1;
}
else if (PyArray_Check(obj) && PyArray_TYPE(obj)==NPY_CLONGDOUBLE) {
(*v).r = ((npy_clongdouble *)PyArray_DATA(obj))->real;
(*v).i = ((npy_clongdouble *)PyArray_DATA(obj))->imag;
return 1;
}
}
if (complex_double_from_pyobj(&cd,obj,errmess)) {
(*v).r = (long_double)cd.r;
(*v).i = (long_double)cd.i;
return 1;
}
return 0;
}
"""
needs['complex_double_from_pyobj'] = ['complex_double']
cfuncs['complex_double_from_pyobj'] = """\
static int complex_double_from_pyobj(complex_double* v,PyObject *obj,const char *errmess) {
Py_complex c;
if (PyComplex_Check(obj)) {
c=PyComplex_AsCComplex(obj);
(*v).r=c.real, (*v).i=c.imag;
return 1;
}
if (PyArray_IsScalar(obj, ComplexFloating)) {
if (PyArray_IsScalar(obj, CFloat)) {
npy_cfloat new;
PyArray_ScalarAsCtype(obj, &new);
(*v).r = (double)new.real;
(*v).i = (double)new.imag;
}
else if (PyArray_IsScalar(obj, CLongDouble)) {
npy_clongdouble new;
PyArray_ScalarAsCtype(obj, &new);
(*v).r = (double)new.real;
(*v).i = (double)new.imag;
}
else { /* if (PyArray_IsScalar(obj, CDouble)) */
PyArray_ScalarAsCtype(obj, v);
}
return 1;
}
if (PyArray_CheckScalar(obj)) { /* 0-dim array or still array scalar */
PyObject *arr;
if (PyArray_Check(obj)) {
arr = PyArray_Cast((PyArrayObject *)obj, NPY_CDOUBLE);
}
else {
arr = PyArray_FromScalar(obj, PyArray_DescrFromType(NPY_CDOUBLE));
}
if (arr==NULL) return 0;
(*v).r = ((npy_cdouble *)PyArray_DATA(arr))->real;
(*v).i = ((npy_cdouble *)PyArray_DATA(arr))->imag;
return 1;
}
/* Python does not provide PyNumber_Complex function :-( */
(*v).i=0.0;
if (PyFloat_Check(obj)) {
#ifdef __sgi
(*v).r = PyFloat_AsDouble(obj);
#else
(*v).r = PyFloat_AS_DOUBLE(obj);
#endif
return 1;
}
if (PyInt_Check(obj)) {
(*v).r = (double)PyInt_AS_LONG(obj);
return 1;
}
if (PyLong_Check(obj)) {
(*v).r = PyLong_AsDouble(obj);
return (!PyErr_Occurred());
}
if (PySequence_Check(obj) && !(PyString_Check(obj) || PyUnicode_Check(obj))) {
PyObject *tmp = PySequence_GetItem(obj,0);
if (tmp) {
if (complex_double_from_pyobj(v,tmp,errmess)) {
Py_DECREF(tmp);
return 1;
}
Py_DECREF(tmp);
}
}
{
PyObject* err = PyErr_Occurred();
if (err==NULL)
err = PyExc_TypeError;
PyErr_SetString(err,errmess);
}
return 0;
}
"""
needs['complex_float_from_pyobj'] = [
'complex_float', 'complex_double_from_pyobj']
cfuncs['complex_float_from_pyobj'] = """\
static int complex_float_from_pyobj(complex_float* v,PyObject *obj,const char *errmess) {
complex_double cd={0.0,0.0};
if (complex_double_from_pyobj(&cd,obj,errmess)) {
(*v).r = (float)cd.r;
(*v).i = (float)cd.i;
return 1;
}
return 0;
}
"""
needs['try_pyarr_from_char'] = ['pyobj_from_char1', 'TRYPYARRAYTEMPLATE']
cfuncs[
'try_pyarr_from_char'] = 'static int try_pyarr_from_char(PyObject* obj,char* v) {\n TRYPYARRAYTEMPLATE(char,\'c\');\n}\n'
needs['try_pyarr_from_signed_char'] = ['TRYPYARRAYTEMPLATE', 'unsigned_char']
cfuncs[
'try_pyarr_from_unsigned_char'] = 'static int try_pyarr_from_unsigned_char(PyObject* obj,unsigned_char* v) {\n TRYPYARRAYTEMPLATE(unsigned_char,\'b\');\n}\n'
needs['try_pyarr_from_signed_char'] = ['TRYPYARRAYTEMPLATE', 'signed_char']
cfuncs[
'try_pyarr_from_signed_char'] = 'static int try_pyarr_from_signed_char(PyObject* obj,signed_char* v) {\n TRYPYARRAYTEMPLATE(signed_char,\'1\');\n}\n'
needs['try_pyarr_from_short'] = ['pyobj_from_short1', 'TRYPYARRAYTEMPLATE']
cfuncs[
'try_pyarr_from_short'] = 'static int try_pyarr_from_short(PyObject* obj,short* v) {\n TRYPYARRAYTEMPLATE(short,\'s\');\n}\n'
needs['try_pyarr_from_int'] = ['pyobj_from_int1', 'TRYPYARRAYTEMPLATE']
cfuncs[
'try_pyarr_from_int'] = 'static int try_pyarr_from_int(PyObject* obj,int* v) {\n TRYPYARRAYTEMPLATE(int,\'i\');\n}\n'
needs['try_pyarr_from_long'] = ['pyobj_from_long1', 'TRYPYARRAYTEMPLATE']
cfuncs[
'try_pyarr_from_long'] = 'static int try_pyarr_from_long(PyObject* obj,long* v) {\n TRYPYARRAYTEMPLATE(long,\'l\');\n}\n'
needs['try_pyarr_from_long_long'] = [
'pyobj_from_long_long1', 'TRYPYARRAYTEMPLATE', 'long_long']
cfuncs[
'try_pyarr_from_long_long'] = 'static int try_pyarr_from_long_long(PyObject* obj,long_long* v) {\n TRYPYARRAYTEMPLATE(long_long,\'L\');\n}\n'
needs['try_pyarr_from_float'] = ['pyobj_from_float1', 'TRYPYARRAYTEMPLATE']
cfuncs[
'try_pyarr_from_float'] = 'static int try_pyarr_from_float(PyObject* obj,float* v) {\n TRYPYARRAYTEMPLATE(float,\'f\');\n}\n'
needs['try_pyarr_from_double'] = ['pyobj_from_double1', 'TRYPYARRAYTEMPLATE']
cfuncs[
'try_pyarr_from_double'] = 'static int try_pyarr_from_double(PyObject* obj,double* v) {\n TRYPYARRAYTEMPLATE(double,\'d\');\n}\n'
needs['try_pyarr_from_complex_float'] = [
'pyobj_from_complex_float1', 'TRYCOMPLEXPYARRAYTEMPLATE', 'complex_float']
cfuncs[
'try_pyarr_from_complex_float'] = 'static int try_pyarr_from_complex_float(PyObject* obj,complex_float* v) {\n TRYCOMPLEXPYARRAYTEMPLATE(float,\'F\');\n}\n'
needs['try_pyarr_from_complex_double'] = [
'pyobj_from_complex_double1', 'TRYCOMPLEXPYARRAYTEMPLATE', 'complex_double']
cfuncs[
'try_pyarr_from_complex_double'] = 'static int try_pyarr_from_complex_double(PyObject* obj,complex_double* v) {\n TRYCOMPLEXPYARRAYTEMPLATE(double,\'D\');\n}\n'
needs['create_cb_arglist'] = ['CFUNCSMESS', 'PRINTPYOBJERR', 'MINMAX']
cfuncs['create_cb_arglist'] = """\
static int create_cb_arglist(PyObject* fun,PyTupleObject* xa,const int maxnofargs,const int nofoptargs,int *nofargs,PyTupleObject **args,const char *errmess) {
PyObject *tmp = NULL;
PyObject *tmp_fun = NULL;
int tot,opt,ext,siz,i,di=0;
CFUNCSMESS(\"create_cb_arglist\\n\");
tot=opt=ext=siz=0;
/* Get the total number of arguments */
if (PyFunction_Check(fun))
tmp_fun = fun;
else {
di = 1;
if (PyObject_HasAttrString(fun,\"im_func\")) {
tmp_fun = PyObject_GetAttrString(fun,\"im_func\");
}
else if (PyObject_HasAttrString(fun,\"__call__\")) {
tmp = PyObject_GetAttrString(fun,\"__call__\");
if (PyObject_HasAttrString(tmp,\"im_func\"))
tmp_fun = PyObject_GetAttrString(tmp,\"im_func\");
else {
tmp_fun = fun; /* built-in function */
tot = maxnofargs;
if (xa != NULL)
tot += PyTuple_Size((PyObject *)xa);
}
Py_XDECREF(tmp);
}
else if (PyFortran_Check(fun) || PyFortran_Check1(fun)) {
tot = maxnofargs;
if (xa != NULL)
tot += PyTuple_Size((PyObject *)xa);
tmp_fun = fun;
}
else if (F2PyCapsule_Check(fun)) {
tot = maxnofargs;
if (xa != NULL)
ext = PyTuple_Size((PyObject *)xa);
if(ext>0) {
fprintf(stderr,\"extra arguments tuple cannot be used with CObject call-back\\n\");
goto capi_fail;
}
tmp_fun = fun;
}
}
if (tmp_fun==NULL) {
fprintf(stderr,\"Call-back argument must be function|instance|instance.__call__|f2py-function but got %s.\\n\",(fun==NULL?\"NULL\":Py_TYPE(fun)->tp_name));
goto capi_fail;
}
#if PY_VERSION_HEX >= 0x03000000
if (PyObject_HasAttrString(tmp_fun,\"__code__\")) {
if (PyObject_HasAttrString(tmp = PyObject_GetAttrString(tmp_fun,\"__code__\"),\"co_argcount\"))
#else
if (PyObject_HasAttrString(tmp_fun,\"func_code\")) {
if (PyObject_HasAttrString(tmp = PyObject_GetAttrString(tmp_fun,\"func_code\"),\"co_argcount\"))
#endif
tot = PyInt_AsLong(PyObject_GetAttrString(tmp,\"co_argcount\")) - di;
Py_XDECREF(tmp);
}
/* Get the number of optional arguments */
#if PY_VERSION_HEX >= 0x03000000
if (PyObject_HasAttrString(tmp_fun,\"__defaults__\")) {
if (PyTuple_Check(tmp = PyObject_GetAttrString(tmp_fun,\"__defaults__\")))
#else
if (PyObject_HasAttrString(tmp_fun,\"func_defaults\")) {
if (PyTuple_Check(tmp = PyObject_GetAttrString(tmp_fun,\"func_defaults\")))
#endif
opt = PyTuple_Size(tmp);
Py_XDECREF(tmp);
}
/* Get the number of extra arguments */
if (xa != NULL)
ext = PyTuple_Size((PyObject *)xa);
/* Calculate the size of call-backs argument list */
siz = MIN(maxnofargs+ext,tot);
*nofargs = MAX(0,siz-ext);
#ifdef DEBUGCFUNCS
fprintf(stderr,\"debug-capi:create_cb_arglist:maxnofargs(-nofoptargs),tot,opt,ext,siz,nofargs=%d(-%d),%d,%d,%d,%d,%d\\n\",maxnofargs,nofoptargs,tot,opt,ext,siz,*nofargs);
#endif
if (siz<tot-opt) {
fprintf(stderr,\"create_cb_arglist: Failed to build argument list (siz) with enough arguments (tot-opt) required by user-supplied function (siz,tot,opt=%d,%d,%d).\\n\",siz,tot,opt);
goto capi_fail;
}
/* Initialize argument list */
*args = (PyTupleObject *)PyTuple_New(siz);
for (i=0;i<*nofargs;i++) {
Py_INCREF(Py_None);
PyTuple_SET_ITEM((PyObject *)(*args),i,Py_None);
}
if (xa != NULL)
for (i=(*nofargs);i<siz;i++) {
tmp = PyTuple_GetItem((PyObject *)xa,i-(*nofargs));
Py_INCREF(tmp);
PyTuple_SET_ITEM(*args,i,tmp);
}
CFUNCSMESS(\"create_cb_arglist-end\\n\");
return 1;
capi_fail:
if ((PyErr_Occurred())==NULL)
PyErr_SetString(#modulename#_error,errmess);
return 0;
}
"""
def buildcfuncs():
from .capi_maps import c2capi_map
for k in c2capi_map.keys():
m = 'pyarr_from_p_%s1' % k
cppmacros[
m] = '#define %s(v) (PyArray_SimpleNewFromData(0,NULL,%s,(char *)v))' % (m, c2capi_map[k])
k = 'string'
m = 'pyarr_from_p_%s1' % k
# NPY_CHAR compatibility, NPY_STRING with itemsize 1
cppmacros[
m] = '#define %s(v,dims) (PyArray_New(&PyArray_Type, 1, dims, NPY_STRING, NULL, v, 1, NPY_ARRAY_CARRAY, NULL))' % (m)
############ Auxiliary functions for sorting needs ###################
def append_needs(need, flag=1):
global outneeds, needs
if isinstance(need, list):
for n in need:
append_needs(n, flag)
elif isinstance(need, str):
if not need:
return
if need in includes0:
n = 'includes0'
elif need in includes:
n = 'includes'
elif need in typedefs:
n = 'typedefs'
elif need in typedefs_generated:
n = 'typedefs_generated'
elif need in cppmacros:
n = 'cppmacros'
elif need in cfuncs:
n = 'cfuncs'
elif need in callbacks:
n = 'callbacks'
elif need in f90modhooks:
n = 'f90modhooks'
elif need in commonhooks:
n = 'commonhooks'
else:
errmess('append_needs: unknown need %s\n' % (repr(need)))
return
if need in outneeds[n]:
return
if flag:
tmp = {}
if need in needs:
for nn in needs[need]:
t = append_needs(nn, 0)
if isinstance(t, dict):
for nnn in t.keys():
if nnn in tmp:
tmp[nnn] = tmp[nnn] + t[nnn]
else:
tmp[nnn] = t[nnn]
for nn in tmp.keys():
for nnn in tmp[nn]:
if nnn not in outneeds[nn]:
outneeds[nn] = [nnn] + outneeds[nn]
outneeds[n].append(need)
else:
tmp = {}
if need in needs:
for nn in needs[need]:
t = append_needs(nn, flag)
if isinstance(t, dict):
for nnn in t.keys():
if nnn in tmp:
tmp[nnn] = t[nnn] + tmp[nnn]
else:
tmp[nnn] = t[nnn]
if n not in tmp:
tmp[n] = []
tmp[n].append(need)
return tmp
else:
errmess('append_needs: expected list or string but got :%s\n' %
(repr(need)))
def get_needs():
global outneeds, needs
res = {}
for n in outneeds.keys():
out = []
saveout = copy.copy(outneeds[n])
while len(outneeds[n]) > 0:
if outneeds[n][0] not in needs:
out.append(outneeds[n][0])
del outneeds[n][0]
else:
flag = 0
for k in outneeds[n][1:]:
if k in needs[outneeds[n][0]]:
flag = 1
break
if flag:
outneeds[n] = outneeds[n][1:] + [outneeds[n][0]]
else:
out.append(outneeds[n][0])
del outneeds[n][0]
if saveout and (0 not in map(lambda x, y: x == y, saveout, outneeds[n])) \
and outneeds[n] != []:
print(n, saveout)
errmess(
'get_needs: no progress in sorting needs, probably circular dependence, skipping.\n')
out = out + saveout
break
saveout = copy.copy(outneeds[n])
if out == []:
out = [n]
res[n] = out
return res
|
agiliq/django
|
refs/heads/master
|
django/conf/locale/da/formats.py
|
315
|
# -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
from __future__ import unicode_literals
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'j. F Y'
TIME_FORMAT = 'H:i'
DATETIME_FORMAT = 'j. F Y H:i'
YEAR_MONTH_FORMAT = 'F Y'
MONTH_DAY_FORMAT = 'j. F'
SHORT_DATE_FORMAT = 'd.m.Y'
SHORT_DATETIME_FORMAT = 'd.m.Y H:i'
FIRST_DAY_OF_WEEK = 1
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = (
'%d.%m.%Y', # '25.10.2006'
)
DATETIME_INPUT_FORMATS = (
'%d.%m.%Y %H:%M:%S', # '25.10.2006 14:30:59'
'%d.%m.%Y %H:%M:%S.%f', # '25.10.2006 14:30:59.000200'
'%d.%m.%Y %H:%M', # '25.10.2006 14:30'
)
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = '.'
NUMBER_GROUPING = 3
|
dednal/chromium.src
|
refs/heads/nw12
|
build/android/gyp/java_cpp_enum.py
|
9
|
#!/usr/bin/env python
#
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import collections
import re
import optparse
import os
from string import Template
import sys
from util import build_utils
# List of C++ types that are compatible with the Java code generated by this
# script.
ENUM_FIXED_TYPE_WHITELIST = ['char', 'unsigned char',
'short', 'unsigned short',
'int', 'int8_t', 'int16_t', 'int32_t', 'uint8_t', 'uint16_t']
class EnumDefinition(object):
def __init__(self, original_enum_name=None, class_name_override=None,
enum_package=None, entries=None, fixed_type=None):
self.original_enum_name = original_enum_name
self.class_name_override = class_name_override
self.enum_package = enum_package
self.entries = collections.OrderedDict(entries or [])
self.prefix_to_strip = None
self.fixed_type = fixed_type
def AppendEntry(self, key, value):
if key in self.entries:
raise Exception('Multiple definitions of key %s found.' % key)
self.entries[key] = value
@property
def class_name(self):
return self.class_name_override or self.original_enum_name
def Finalize(self):
self._Validate()
self._AssignEntryIndices()
self._StripPrefix()
def _Validate(self):
assert self.class_name
assert self.enum_package
assert self.entries
if self.fixed_type and self.fixed_type not in ENUM_FIXED_TYPE_WHITELIST:
raise Exception('Fixed type %s for enum %s not whitelisted.' %
(self.fixed_type, self.class_name))
def _AssignEntryIndices(self):
# Enums, if given no value, are given the value of the previous enum + 1.
if not all(self.entries.values()):
prev_enum_value = -1
for key, value in self.entries.iteritems():
if not value:
self.entries[key] = prev_enum_value + 1
elif value in self.entries:
self.entries[key] = self.entries[value]
else:
try:
self.entries[key] = int(value)
except ValueError:
raise Exception('Could not interpret integer from enum value "%s" '
'for key %s.' % (value, key))
prev_enum_value = self.entries[key]
def _StripPrefix(self):
prefix_to_strip = self.prefix_to_strip
if not prefix_to_strip:
prefix_to_strip = self.original_enum_name
prefix_to_strip = re.sub('(?!^)([A-Z]+)', r'_\1', prefix_to_strip).upper()
prefix_to_strip += '_'
if not all([w.startswith(prefix_to_strip) for w in self.entries.keys()]):
prefix_to_strip = ''
entries = collections.OrderedDict()
for (k, v) in self.entries.iteritems():
stripped_key = k.replace(prefix_to_strip, '', 1)
if isinstance(v, basestring):
stripped_value = v.replace(prefix_to_strip, '', 1)
else:
stripped_value = v
entries[stripped_key] = stripped_value
self.entries = entries
class DirectiveSet(object):
class_name_override_key = 'CLASS_NAME_OVERRIDE'
enum_package_key = 'ENUM_PACKAGE'
prefix_to_strip_key = 'PREFIX_TO_STRIP'
known_keys = [class_name_override_key, enum_package_key, prefix_to_strip_key]
def __init__(self):
self._directives = {}
def Update(self, key, value):
if key not in DirectiveSet.known_keys:
raise Exception("Unknown directive: " + key)
self._directives[key] = value
@property
def empty(self):
return len(self._directives) == 0
def UpdateDefinition(self, definition):
definition.class_name_override = self._directives.get(
DirectiveSet.class_name_override_key, '')
definition.enum_package = self._directives.get(
DirectiveSet.enum_package_key)
definition.prefix_to_strip = self._directives.get(
DirectiveSet.prefix_to_strip_key)
class HeaderParser(object):
single_line_comment_re = re.compile(r'\s*//')
multi_line_comment_start_re = re.compile(r'\s*/\*')
enum_line_re = re.compile(r'^\s*(\w+)(\s*\=\s*([^,\n]+))?,?')
enum_end_re = re.compile(r'^\s*}\s*;\.*$')
generator_directive_re = re.compile(
r'^\s*//\s+GENERATED_JAVA_(\w+)\s*:\s*([\.\w]+)$')
multi_line_generator_directive_start_re = re.compile(
r'^\s*//\s+GENERATED_JAVA_(\w+)\s*:\s*([\.\w]*)\\$')
multi_line_directive_continuation_re = re.compile(
r'^\s*//\s+([\.\w]+)\\$')
multi_line_directive_end_re = re.compile(
r'^\s*//\s+([\.\w]*)$')
optional_class_or_struct_re = r'(class|struct)?'
enum_name_re = r'(\w+)'
optional_fixed_type_re = r'(\:\s*(\w+\s*\w+?))?'
enum_start_re = re.compile(r'^\s*enum\s+' + optional_class_or_struct_re +
'\s*' + enum_name_re + '\s*' + optional_fixed_type_re + '\s*{\s*$')
def __init__(self, lines, path=None):
self._lines = lines
self._path = path
self._enum_definitions = []
self._in_enum = False
self._current_definition = None
self._generator_directives = DirectiveSet()
self._multi_line_generator_directive = None
def _ApplyGeneratorDirectives(self):
self._generator_directives.UpdateDefinition(self._current_definition)
self._generator_directives = DirectiveSet()
def ParseDefinitions(self):
for line in self._lines:
self._ParseLine(line)
return self._enum_definitions
def _ParseLine(self, line):
if self._multi_line_generator_directive:
self._ParseMultiLineDirectiveLine(line)
elif not self._in_enum:
self._ParseRegularLine(line)
else:
self._ParseEnumLine(line)
def _ParseEnumLine(self, line):
if HeaderParser.single_line_comment_re.match(line):
return
if HeaderParser.multi_line_comment_start_re.match(line):
raise Exception('Multi-line comments in enums are not supported in ' +
self._path)
enum_end = HeaderParser.enum_end_re.match(line)
enum_entry = HeaderParser.enum_line_re.match(line)
if enum_end:
self._ApplyGeneratorDirectives()
self._current_definition.Finalize()
self._enum_definitions.append(self._current_definition)
self._in_enum = False
elif enum_entry:
enum_key = enum_entry.groups()[0]
enum_value = enum_entry.groups()[2]
self._current_definition.AppendEntry(enum_key, enum_value)
def _ParseMultiLineDirectiveLine(self, line):
multi_line_directive_continuation = (
HeaderParser.multi_line_directive_continuation_re.match(line))
multi_line_directive_end = (
HeaderParser.multi_line_directive_end_re.match(line))
if multi_line_directive_continuation:
value_cont = multi_line_directive_continuation.groups()[0]
self._multi_line_generator_directive[1].append(value_cont)
elif multi_line_directive_end:
directive_name = self._multi_line_generator_directive[0]
directive_value = "".join(self._multi_line_generator_directive[1])
directive_value += multi_line_directive_end.groups()[0]
self._multi_line_generator_directive = None
self._generator_directives.Update(directive_name, directive_value)
else:
raise Exception('Malformed multi-line directive declaration in ' +
self._path)
def _ParseRegularLine(self, line):
enum_start = HeaderParser.enum_start_re.match(line)
generator_directive = HeaderParser.generator_directive_re.match(line)
multi_line_generator_directive_start = (
HeaderParser.multi_line_generator_directive_start_re.match(line))
if generator_directive:
directive_name = generator_directive.groups()[0]
directive_value = generator_directive.groups()[1]
self._generator_directives.Update(directive_name, directive_value)
elif multi_line_generator_directive_start:
directive_name = multi_line_generator_directive_start.groups()[0]
directive_value = multi_line_generator_directive_start.groups()[1]
self._multi_line_generator_directive = (directive_name, [directive_value])
elif enum_start:
if self._generator_directives.empty:
return
self._current_definition = EnumDefinition(
original_enum_name=enum_start.groups()[1],
fixed_type=enum_start.groups()[3])
self._in_enum = True
def GetScriptName():
script_components = os.path.abspath(sys.argv[0]).split(os.path.sep)
build_index = script_components.index('build')
return os.sep.join(script_components[build_index:])
def DoGenerate(options, source_paths):
output_paths = []
for source_path in source_paths:
enum_definitions = DoParseHeaderFile(source_path)
for enum_definition in enum_definitions:
package_path = enum_definition.enum_package.replace('.', os.path.sep)
file_name = enum_definition.class_name + '.java'
output_path = os.path.join(options.output_dir, package_path, file_name)
output_paths.append(output_path)
if not options.print_output_only:
build_utils.MakeDirectory(os.path.dirname(output_path))
DoWriteOutput(source_path, output_path, enum_definition)
return output_paths
def DoParseHeaderFile(path):
with open(path) as f:
return HeaderParser(f.readlines(), path).ParseDefinitions()
def GenerateOutput(source_path, enum_definition):
template = Template("""
// Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// This file is autogenerated by
// ${SCRIPT_NAME}
// From
// ${SOURCE_PATH}
package ${PACKAGE};
public class ${CLASS_NAME} {
${ENUM_ENTRIES}
}
""")
enum_template = Template(' public static final int ${NAME} = ${VALUE};')
enum_entries_string = []
for enum_name, enum_value in enum_definition.entries.iteritems():
values = {
'NAME': enum_name,
'VALUE': enum_value,
}
enum_entries_string.append(enum_template.substitute(values))
enum_entries_string = '\n'.join(enum_entries_string)
values = {
'CLASS_NAME': enum_definition.class_name,
'ENUM_ENTRIES': enum_entries_string,
'PACKAGE': enum_definition.enum_package,
'SCRIPT_NAME': GetScriptName(),
'SOURCE_PATH': source_path,
}
return template.substitute(values)
def DoWriteOutput(source_path, output_path, enum_definition):
with open(output_path, 'w') as out_file:
out_file.write(GenerateOutput(source_path, enum_definition))
def AssertFilesList(output_paths, assert_files_list):
actual = set(output_paths)
expected = set(assert_files_list)
if not actual == expected:
need_to_add = list(actual - expected)
need_to_remove = list(expected - actual)
raise Exception('Output files list does not match expectations. Please '
'add %s and remove %s.' % (need_to_add, need_to_remove))
def DoMain(argv):
parser = optparse.OptionParser()
parser.add_option('--assert_file', action="append", default=[],
dest="assert_files_list", help='Assert that the given '
'file is an output. There can be multiple occurrences of '
'this flag.')
parser.add_option('--output_dir', help='Base path for generated files.')
parser.add_option('--print_output_only', help='Only print output paths.',
action='store_true')
options, args = parser.parse_args(argv)
output_paths = DoGenerate(options, args)
if options.assert_files_list:
AssertFilesList(output_paths, options.assert_files_list)
return " ".join(output_paths)
if __name__ == '__main__':
DoMain(sys.argv[1:])
|
msramalho/pyhparser
|
refs/heads/master
|
pyhparser/__init__.py
|
1
|
import sys
sys.path.append('../')
from pyhparser.pyhparser import *
from pyhparser.grammar import *
from pyhparser.utils import *
from pyhparser.classParser import *
|
lilclmrboy/workoutprogram
|
refs/heads/master
|
generate_climbing_workout.py
|
1
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Python rewrite of:
# https://code.google.com/p/weightliftingworkout/source/browse/#svn%2Ftrunk%253Fstate%253Dclosed
import sys
import workout
from workout import ExerciseDetail
from workout import Exercise
from workout import Workout
from workout import WorkoutProgram
from datetime import datetime, timedelta
from pytz import timezone
import pytz
# where:
# 0 - Sunday
# 1 - Monday
# 2 - Tuesday
# 3 - Wednesday
# 4 - Thursday
# 5 - Friday
# 6 - Saturday
print ("Generating climbing workout program")
startdate = datetime(2016,2,29,tzinfo=pytz.utc)
volume = 550.0
workout_day_inc = [1, 2, 2, 2]
wp = WorkoutProgram("Climbing Training", startdate, volume, 12, "Matt")
dayIndex = 0
workoutCount = 0
totalWorkouts = 36
# ------------------ Workout Generation --------------------#
for week in range(0, wp.workoutprogram_nWeeks):
dayIndex = dayIndex + workout_day_inc[0]
wPercent = workout.periodization_equation(workoutCount, totalWorkouts)
workoutCount = workoutCount + 1
workoutdate = wp.workoutprogram_dt_start + timedelta(days=dayIndex)
wkout = Workout("%s - Strength" % (workoutdate.strftime("%A")), workoutdate, wPercent, volume)
wkout.add_exercise_target_volume("Climbing", 8)
rndex = wkout.pick_random_exercise("Core")
wkout.add_exercise(rndex['name'])
wp.add_workout(wkout)
wPercent = workout.periodization_equation(workoutCount, totalWorkouts)
workoutCount = workoutCount + 1
dayIndex = dayIndex + workout_day_inc[1]
workoutdate = wp.workoutprogram_dt_start + timedelta(days=dayIndex)
wkout = Workout("%s - Strength" % (workoutdate.strftime("%A")), workoutdate, wPercent, volume)
wkout.add_exercise_target_volume("Climbing", 8)
rndex = wkout.pick_random_exercise("Core")
wkout.add_exercise(rndex['name'])
wp.add_workout(wkout)
wPercent = workout.periodization_equation(workoutCount, totalWorkouts)
workoutCount = workoutCount + 1
dayIndex = dayIndex + workout_day_inc[2]
workoutdate = wp.workoutprogram_dt_start + timedelta(days=dayIndex)
wkout = Workout("%s - Olympic" % (workoutdate.strftime("%A")), workoutdate, wPercent, volume)
wkout.add_exercise_target_volume("Climbing", 8)
rndex = wkout.pick_random_exercise("Core")
wkout.add_exercise(rndex['name'])
wp.add_workout(wkout)
dayIndex = dayIndex + workout_day_inc[3]
wp.create_txt_workout("climbing")
# wp.create_icalendar_workout()
|
morepath/morepath-cookiecutter
|
refs/heads/master
|
{{cookiecutter.package_name}}/{{cookiecutter.package_name}}/view.py
|
1
|
from .app import App
from . import model
{%- if 'REST' in cookiecutter.goal %}
@App.json(model=model.Root)
def view_root(self, request):
return {
'greetings': [
{
'name': greeting.person,
'@id': request.link(greeting)
}
for greeting in self.greetings
]
}
@App.json(model=model.Greeting)
def view_greeting(self, request):
return {
'greeting': 'hello ' + self.person
}
{%- else %}
@App.html(model=model.Root, template='index.pt')
def view_root(self, request):
return {
'greetings': self.greetings,
'request': request,
}
@App.html(model=model.Greeting, template='greeting.pt')
def view_greeting(self, request):
return {
'greeting': 'hello ' + self.person
}
{%- endif %}
|
uezo/minette-python
|
refs/heads/master
|
minette/scheduler/base.py
|
1
|
""" Scheduler for periodic tasks """
import traceback
from logging import getLogger
import schedule
import time
from concurrent.futures import ThreadPoolExecutor
class Task:
"""
Base class of tasks
Attributes
----------
config : minette.Config
Configuration
timezone : pytz.timezone
Timezone
logger : logging.Logger
Logger
connection_provider : minette.ConnectionProvider
Connection provider to use database in each tasks
"""
def __init__(self, config=None, timezone=None, logger=None,
connection_provider=None, **kwargs):
"""
Parameters
----------
config : minette.Config, default None
Configuration
timezone : pytz.timezone, default None
Timezone
logger : logging.Logger, default None
Logger
connection_provider : minette.ConnectionProvider
Connection provider to use database in each tasks
"""
self.config = config
self.timezone = timezone
self.logger = logger or getLogger(__name__)
self.connection_provider = connection_provider
def do(self, **kwargs):
"""
Implement your periodic task
"""
self.logger.error("Task is not implemented")
class Scheduler:
"""
Task scheduler for periodic tasks
Examples
--------
To start doing scheduled tasks, just create `Scheduler` instance
and register task(s), then call `start()`
>>> my_scheduler = MyScheduler()
>>> my_scheduler.every_minutes(MyTask)
>>> my_scheduler.start()
To register tasks, this class provides shortcut methods.
Each tasks run at worker threads.
>>> my_scheduler.every_minutes(MyTask)
>>> my_scheduler.every_seconds(MyTask, seconds=5)
>>> my_scheduler.every_seconds(MyTask, seconds=5, arg1="val1", arg2="val2")
You can also use internal `schedule` to register tasks
then the tasks run at main thread.
>>> my_scheduler.schedule.every().minute.do(self.create_task(MyTask))
>>> my_scheduler.schedule.every().hour.do(self.create_task(YourTask))
Notes
-----
How to execute jobs in parallel?
https://schedule.readthedocs.io/en/stable/faq.html#how-to-execute-jobs-in-parallel
Attributes
----------
config : minette.Config
Configuration
timezone : pytz.timezone
Timezone
logger : logging.Logger
Logger
threads : int
Number of worker threads to process tasks
connection_provider : minette.ConnectionProvider
Connection provider to use database in each tasks
schedule : schedule
schedule module
executor : concurrent.futures.ThreadPoolExecutor
Executor to run tasks at worker threads
"""
def __init__(self, config=None, timezone=None, logger=None, threads=None,
connection_provider=None, **kwargs):
"""
Parameters
----------
config : minette.Config, default None
Configuration
timezone : pytz.timezone, default None
Timezone
logger : logging.Logger, default None
Logger
threads : int, default None
Number of worker threads to process tasks
connection_provider : ConnectionProvider, default None
Connection provider to use database in each tasks
"""
self.config = config
self.timezone = timezone
self.logger = logger or getLogger(__name__)
self.threads = threads
self.connection_provider = connection_provider
self.schedule = schedule
self.executor = ThreadPoolExecutor(
max_workers=self.threads, thread_name_prefix="SchedulerThread")
self._is_running = False
@property
def is_running(self):
return self._is_running
def create_task(self, task_class, **kwargs):
"""
Create and return callable function of task
Parameters
----------
task_class : type
Class of task
Returns
-------
task_method : callable
Callable interface of task
"""
if isinstance(task_class, type):
if issubclass(task_class, Task):
return task_class(
config=self.config,
timezone=self.timezone,
logger=self.logger,
connection_provider=self.connection_provider,
**kwargs).do
else:
raise TypeError(
"task_class should be a subclass of minette.Task " +
"or callable, not {}".format(task_class.__name__))
elif callable(task_class):
return task_class
else:
raise TypeError(
"task_class should be a subclass of minette.Task " +
"or callable, not the instance of {}".format(
task_class.__class__.__name__))
def every_seconds(self, task, seconds=1, *args, **kwargs):
self.schedule.every(seconds).seconds.do(
self.executor.submit, self.create_task(task), *args, **kwargs)
def every_minutes(self, task, minutes=1, *args, **kwargs):
self.schedule.every(minutes).minutes.do(
self.executor.submit, self.create_task(task), *args, **kwargs)
def every_hours(self, task, hours=1, *args, **kwargs):
self.schedule.every(hours).hours.do(
self.executor.submit, self.create_task(task), *args, **kwargs)
def every_days(self, task, days=1, *args, **kwargs):
self.schedule.every(days).days.do(
self.executor.submit, self.create_task(task), *args, **kwargs)
def start(self):
"""
Start scheduler
"""
self.logger.info("Task scheduler started")
self._is_running = True
while self._is_running:
self.schedule.run_pending()
time.sleep(1)
self.logger.info("Task scheduler stopped")
def stop(self):
"""
Stop scheduler
"""
self._is_running = False
|
huxianglin/pythonstudy
|
refs/heads/master
|
week05-胡湘林/ATM/shop/shop_conf/settings.py
|
1
|
#!/usr/bin/env python
# encoding:utf-8
# __author__: huxianglin
# date: 2016-09-18
# blog: http://huxianglin.cnblogs.com/ http://xianglinhu.blog.51cto.com/
import os
import logging
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
USER_DATABASE = {
"name":"users",
"path": os.path.join(os.path.join(BASE_DIR,"shop_data"),"users_info")
}
DATABASE_ENGINE="file_storage" # support mysql in the future
GOODS_DATABASE={
"name":"goods",
"path": os.path.join(os.path.join(BASE_DIR,"shop_data"),"goods_data.json")
}
LOG_INFO = {
"shop_path":os.path.join(BASE_DIR,"shop_logs"),
"LOG_LEVEL":logging.INFO
}
BANK_CARD="666666"
|
Microsoft/PTVS
|
refs/heads/master
|
Python/Product/Pyvot/Pyvot/doc/src/conf.py
|
7
|
# -*- coding: utf-8 -*-
#
# Pyvot documentation build configuration file, created by
# sphinx-quickstart on Thu Sep 29 14:15:10 2011.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
sys.path = [ os.path.abspath('..\..') ] + sys.path
import xl
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.coverage']
autodoc_default_flags = [ 'members', 'undoc-members' ]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Pyvot'
copyright = u'2011, Microsoft Corporation'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = xl.__version__
# The full version, including alpha/beta/rc tags.
release = version
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = [ ]
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'Pyvotdoc'
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'Pyvot.tex', u'Pyvot Documentation',
u'Microsoft Corporation', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
|
velikodniy/aiotelegram
|
refs/heads/master
|
test/test_types.py
|
1
|
import unittest
from aiotelegram.types import *
class UserTest(unittest.TestCase):
def test_fromJson_all_fields(self):
user = User.fromJson('{"id": 1, "first_name": "John", "last_name": "Doe", "username": "@johndoe"}')
self.assertEqual(user.id, 1)
self.assertEqual(user.first_name, "John")
self.assertEqual(user.last_name, "Doe")
self.assertEqual(user.username, "@johndoe")
def test_fromJson_without_optional(self):
user = User.fromJson('{"id": 1, "first_name": "John"}')
self.assertEqual(user.id, 1)
self.assertEqual(user.first_name, "John")
self.assertIsNone(user.last_name)
self.assertIsNone(user.username)
def test_init_all_fields(self):
user = User(1, "John", "Doe", "@johndoe")
self.assertEqual(user.id, 1)
self.assertEqual(user.first_name, "John")
self.assertEqual(user.last_name, "Doe")
self.assertEqual(user.username, "@johndoe")
def test_fromJson_forget_required(self):
self.assertRaises(TypeError, User.fromJson,
'{"first_name": "John", "last_name": "Doe", "username": "@johndoe"}')
class ChatTest(unittest.TestCase):
def test_fromJson_all_fields(self):
chat = Chat.fromJson(
'''{"id": 1, "type": "group", "title": "Test group",
"username": "@johndoe", "first_name": "John", "last_name": "Doe"}'''
)
self.assertEqual(chat.id, 1)
self.assertEqual(chat.type, "group")
self.assertEqual(chat.title, "Test group")
self.assertEqual(chat.username, "@johndoe")
self.assertEqual(chat.first_name, "John")
self.assertEqual(chat.last_name, "Doe")
def test_fromJson_without_optional(self):
chat = Chat.fromJson('{"id": 1, "type": "group"}')
self.assertEqual(chat.id, 1)
self.assertEqual(chat.type, "group")
self.assertIsNone(chat.title)
self.assertIsNone(chat.username)
self.assertIsNone(chat.first_name)
self.assertIsNone(chat.last_name)
def test_init_all_fields(self):
chat = Chat(id=1, type="group", title="Test group",
username="@johndoe", first_name="John", last_name="Doe")
self.assertEqual(chat.id, 1)
self.assertEqual(chat.type, "group")
self.assertEqual(chat.title, "Test group")
self.assertEqual(chat.username, "@johndoe")
self.assertEqual(chat.first_name, "John")
self.assertEqual(chat.last_name, "Doe")
class PhotoSizeTest(unittest.TestCase):
def test_fromJson_all_fields(self):
photo_size = PhotoSize.fromJson(
'''{"file_id": "id123", "width": 800, "height": 600,
"file_size": 123456}'''
)
self.assertEqual(photo_size.file_id, "id123")
self.assertEqual(photo_size.width, 800)
self.assertEqual(photo_size.height, 600)
self.assertEqual(photo_size.file_size, 123456)
def test_fromJson_without_optional(self):
photo_size = PhotoSize.fromJson('{"file_id": "id123", "width": 800, "height": 600}')
self.assertEqual(photo_size.file_id, "id123")
self.assertEqual(photo_size.width, 800)
self.assertEqual(photo_size.height, 600)
self.assertIsNone(photo_size.file_size)
def test_init_all_fields(self):
photo_size = PhotoSize(file_id="id123", width=800, height=600, file_size=123456)
self.assertEqual(photo_size.file_id, "id123")
self.assertEqual(photo_size.width, 800)
self.assertEqual(photo_size.height, 600)
self.assertEqual(photo_size.file_size, 123456)
class AudioTest(unittest.TestCase):
def test_fromJson_all_fields(self):
audio = Audio.fromJson(
'''{"file_id": "id12345", "duration": 253, "performer": "Amy Nuttall",
"title": "Greensleeves", "mime_type": "audio/ogg", "file_size": 3940000}'''
)
self.assertEqual(audio.file_id, "id12345")
self.assertEqual(audio.duration, 253)
self.assertEqual(audio.performer, "Amy Nuttall")
self.assertEqual(audio.title, "Greensleeves")
self.assertEqual(audio.mime_type, "audio/ogg")
self.assertEqual(audio.file_size, 3940000)
def test_fromJson_without_optional(self):
audio = Audio.fromJson('{"file_id": "id12345", "duration": 253}')
self.assertEqual(audio.file_id, "id12345")
self.assertEqual(audio.duration, 253)
self.assertIsNone(audio.performer)
self.assertIsNone(audio.title)
self.assertIsNone(audio.mime_type)
self.assertIsNone(audio.file_size)
def test_init_all_fields(self):
audio = Audio(file_id="id12345", duration=253, performer="Amy Nuttall",
title="Greensleeves", mime_type="audio/ogg", file_size=3940000)
self.assertEqual(audio.file_id, "id12345")
self.assertEqual(audio.duration, 253)
self.assertEqual(audio.performer, "Amy Nuttall")
self.assertEqual(audio.title, "Greensleeves")
self.assertEqual(audio.mime_type, "audio/ogg")
self.assertEqual(audio.file_size, 3940000)
class DocumentTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.photo_size = PhotoSize(file_id="id123", width=800, height=600, file_size=123456)
def test_fromJson_all_fields(self):
document = Document.fromJson('''{"file_id": "id12345",
"thumb": {
"file_id": "id123", "width": 800, "height": 600,
"file_size": 123456
}, "file_name": "document.pdf", "mime_type": "application/pdf", "file_size": 654321}''')
self.assertEqual(document.file_id, 'id12345')
self.assertEqual(document.thumb, self.photo_size)
self.assertEqual(document.file_name, 'document.pdf')
self.assertEqual(document.mime_type, 'application/pdf')
self.assertEqual(document.file_size, 654321)
def test_fromJson_without_optional(self):
document = Document.fromJson('{"file_id": "id12345"}')
self.assertEqual(document.file_id, 'id12345')
self.assertIsNone(document.thumb)
self.assertIsNone(document.file_name)
self.assertIsNone(document.mime_type)
self.assertIsNone(document.file_size)
def test_init_all_fields(self):
document = Document(file_id='id12345',
thumb=self.photo_size, file_name='document.pdf',
mime_type='application/pdf', file_size=654321)
self.assertEqual(document.file_id, 'id12345')
self.assertEqual(document.thumb, self.photo_size)
self.assertEqual(document.file_name, 'document.pdf')
self.assertEqual(document.mime_type, 'application/pdf')
self.assertEqual(document.file_size, 654321)
class StickerTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.photo_size = PhotoSize(file_id="id123", width=800, height=600, file_size=123456)
def test_fromJson_all_fields(self):
sticker = Sticker.fromJson('''{"file_id": "id12345",
"width": 800, "height": 600,
"thumb": %s, "file_size": 654321}''' % self.photo_size.toJson())
self.assertEqual(sticker.file_id, 'id12345')
self.assertEqual(sticker.width, 800)
self.assertEqual(sticker.height, 600)
self.assertEqual(sticker.thumb, self.photo_size)
self.assertEqual(sticker.file_size, 654321)
def test_fromJson_without_optional(self):
sticker = Sticker.fromJson('''{"file_id": "id12345",
"width": 800, "height": 600}''')
self.assertEqual(sticker.file_id, 'id12345')
self.assertEqual(sticker.width, 800)
self.assertEqual(sticker.height, 600)
self.assertIsNone(sticker.thumb)
self.assertIsNone(sticker.file_size)
class VideoTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.photo_size = PhotoSize(file_id="id123", width=800, height=600, file_size=123456)
def test_fromJson_all_fields(self):
video = Video.fromJson('''{"file_id": "id12345",
"width": 800, "height": 600, "duration": 120,
"thumb": %s, "mime_type": "video/mp4", "file_size": 654321}'''
% self.photo_size.toJson())
self.assertEqual(video.file_id, 'id12345')
self.assertEqual(video.width, 800)
self.assertEqual(video.height, 600)
self.assertEqual(video.duration, 120)
self.assertEqual(video.thumb, self.photo_size)
self.assertEqual(video.mime_type, "video/mp4")
self.assertEqual(video.file_size, 654321)
def test_fromJson_without_optional(self):
video = Video.fromJson('''{"file_id": "id12345",
"width": 800, "height": 600, "duration": 120}''')
self.assertEqual(video.file_id, 'id12345')
self.assertEqual(video.width, 800)
self.assertEqual(video.height, 600)
self.assertEqual(video.duration, 120)
self.assertIsNone(video.thumb)
self.assertIsNone(video.mime_type)
self.assertIsNone(video.file_size)
class VoiceTest(unittest.TestCase):
def test_fromJson_all_fields(self):
voice = Voice.fromJson(
'''{"file_id": "id12345", "duration": 253,
"mime_type": "audio/ogg", "file_size": 3940000}'''
)
self.assertEqual(voice.file_id, "id12345")
self.assertEqual(voice.duration, 253)
self.assertEqual(voice.mime_type, "audio/ogg")
self.assertEqual(voice.file_size, 3940000)
def test_fromJson_without_optional(self):
audio = Audio.fromJson('{"file_id": "id12345", "duration": 253}')
self.assertEqual(audio.file_id, "id12345")
self.assertEqual(audio.duration, 253)
self.assertIsNone(audio.mime_type)
self.assertIsNone(audio.file_size)
class ContactTest(unittest.TestCase):
def test_fromJson_all_fields(self):
contact = Contact.fromJson(
'''{"phone_number": "+12345678", "first_name": "John",
"last_name": "Doe", "user_id": 1234}'''
)
self.assertEqual(contact.phone_number, '+12345678')
self.assertEqual(contact.first_name, 'John')
self.assertEqual(contact.last_name, 'Doe')
self.assertEqual(contact.user_id, 1234)
def test_fromJson_without_optional(self):
contact = Contact.fromJson(
'{"phone_number": "+12345678", "first_name": "John"}'
)
self.assertEqual(contact.phone_number, '+12345678')
self.assertEqual(contact.first_name, 'John')
self.assertIsNone(contact.last_name)
self.assertIsNone(contact.user_id)
class LocationTest(unittest.TestCase):
def test_fromJson_all_fields(self):
location = Location.fromJson('{"longitude": 29.471111, "latitude": 46.830556}')
self.assertEqual(location.longitude, 29.471111)
self.assertEqual(location.latitude, 46.830556)
def test_fromJson_int_fields(self):
location = Location.fromJson('{"longitude": 29, "latitude": 46}')
self.assertEqual(location.longitude, 29)
self.assertEqual(location.latitude, 46)
class UserProfilePhotosTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.ps1 = PhotoSize(file_id="id1", width=100, height=100, file_size=1000)
cls.ps2 = PhotoSize(file_id="id2", width=200, height=200, file_size=2000)
cls.ps3 = PhotoSize(file_id="id3", width=300, height=300, file_size=3000)
cls.ps4 = PhotoSize(file_id="id4", width=400, height=400, file_size=4000)
def test_fromJson_all_fields(self):
upp = UserProfilePhotos.fromJson('''
{
"total_count": 2,
"photos": [[%s, %s], [%s, %s]]
}
''' % (self.ps1.toJson(), self.ps2.toJson(), self.ps3.toJson(), self.ps4.toJson()))
self.assertEqual(upp.total_count, 2)
self.assertEqual(upp.photos[0][0], self.ps1)
class FileTest(unittest.TestCase):
def test_fromJson_all_fields(self):
f = File.fromJson(
'''
{
"file_id": "id123",
"file_size": 123456,
"file_path": "a/file.pdf"
}
'''
)
self.assertEqual(f.file_id, "id123")
self.assertEqual(f.file_size, 123456)
self.assertEqual(f.file_path, "a/file.pdf")
def test_fromJson_without_optional(self):
f = File.fromJson('{"file_id": "id123"}')
self.assertEqual(f.file_id, "id123")
self.assertIsNone(f.file_size)
self.assertIsNone(f.file_path)
class ReplyKeyboardMarkupTest(unittest.TestCase):
def test_fromJson_all_fields(self):
rkm = ReplyKeyboardMarkup.fromJson(
'''{
"keyboard": [["A", "B"], ["C", "D"]],
"resize_keyboard": true,
"one_time_keyboard": true,
"selective": true
}'''
)
self.assertEqual(rkm.keyboard, [['A', 'B'],['C', 'D']])
self.assertTrue(rkm.resize_keyboard)
self.assertTrue(rkm.one_time_keyboard)
self.assertTrue(rkm.selective)
def test_fromJson_without_optional(self):
rkm = ReplyKeyboardMarkup.fromJson('{"keyboard": [["A", "B"], ["C", "D"]]}')
self.assertEqual(rkm.keyboard, [['A', 'B'],['C', 'D']])
self.assertIsNone(rkm.resize_keyboard)
self.assertIsNone(rkm.one_time_keyboard)
self.assertIsNone(rkm.selective)
class ReplyKeyboardHideTest(unittest.TestCase):
def test_fromJson_all_fields(self):
rkh = ReplyKeyboardHide.fromJson(
'''{
"hide_keyboard": true,
"selective": true
}'''
)
self.assertTrue(rkh.hide_keyboard)
self.assertTrue(rkh.selective)
def test_fromJson_without_optional(self):
rkh = ReplyKeyboardHide.fromJson('{"hide_keyboard": true}')
self.assertTrue(rkh.hide_keyboard)
self.assertIsNone(rkh.selective)
def test_fromJson_without_optional_hide_false(self):
with self.assertRaises(ValueError):
rkh = ReplyKeyboardHide.fromJson('{"hide_keyboard": false}')
class ForceReplyTest(unittest.TestCase):
def test_fromJson_all_fields(self):
fr = ForceReply.fromJson(
'''{
"force_reply": true,
"selective": true
}'''
)
self.assertTrue(fr.force_reply)
self.assertTrue(fr.selective)
def test_fromJson_without_optional(self):
fr = ForceReply.fromJson('{"force_reply": true}')
self.assertTrue(fr.force_reply)
self.assertIsNone(fr.selective)
def test_fromJson_without_optional_hide_false(self):
with self.assertRaises(ValueError):
ForceReply.fromJson('{"force_reply": false}')
class MessageTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.chat = Chat.fromObject({'id': 1, 'type': 'private'})
def test_init_without_optional(self):
msg1 = Message(message_id=1, date=1234, chat=self.chat)
self.assertEqual(msg1.message_id, 1)
self.assertEqual(msg1.date, 1234)
self.assertEqual(msg1.chat, self.chat)
def test_init_without_optional_other_message(self):
msg1 = Message(message_id=1, date=1234, chat=self.chat)
msg2 = Message(message_id=2, date=1235, chat=self.chat, reply_to_message=msg1)
self.assertEqual(msg2.message_id, 2)
self.assertEqual(msg2.date, 1235)
self.assertEqual(msg2.chat, self.chat)
self.assertEqual(msg2.reply_to_message, msg1)
def test_init_without_optional_always_true_fields(self):
self.assertRaises(ValueError, Message,
message_id=1, date=1234, chat=self.chat,
delete_chat_photo=False)
self.assertRaises(ValueError, Message,
message_id=1, date=1234, chat=self.chat,
group_chat_created=False)
self.assertRaises(ValueError, Message,
message_id=1, date=1234, chat=self.chat,
supergroup_chat_created=False)
self.assertRaises(ValueError, Message,
message_id=1, date=1234, chat=self.chat,
channel_chat_created=False)
|
rikirenz/inspire-next
|
refs/heads/master
|
inspirehep/factory.py
|
3
|
# -*- coding: utf-8 -*-
#
# This file is part of INSPIRE.
# Copyright (C) 2014-2017 CERN.
#
# INSPIRE is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# INSPIRE is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with INSPIRE. If not, see <http://www.gnu.org/licenses/>.
#
# In applying this license, CERN does not waive the privileges and immunities
# granted to it by virtue of its status as an Intergovernmental Organization
# or submit itself to any jurisdiction.
"""INSPIREHEP app factories."""
from __future__ import absolute_import, division, print_function
import os
import sys
from invenio_base.app import create_app_factory
from invenio_base.wsgi import create_wsgi_factory
from invenio_config import create_conf_loader
from . import config
env_prefix = 'APP'
config_loader = create_conf_loader(config=config, env_prefix=env_prefix)
instance_path = os.getenv(env_prefix + '_INSTANCE_PATH') or \
os.path.join(sys.prefix, 'var', 'inspirehep-instance')
"""Instance path for Invenio.
Defaults to ``<env_prefix>_INSTANCE_PATH`` or if environment variable is not
set ``<sys.prefix>/var/<app_name>-instance``.
"""
static_folder = os.getenv(env_prefix + '_STATIC_FOLDER') or \
os.path.join(instance_path, 'static')
"""Static folder path.
Defaults to ``<env_prefix>_STATIC_FOLDER`` or if environment variable is not
set ``<sys.prefix>/var/<app_name>-instance/static``.
"""
create_api = create_app_factory(
'inspirehep',
config_loader=config_loader,
blueprint_entry_points=['invenio_base.api_blueprints'],
extension_entry_points=['invenio_base.api_apps'],
converter_entry_points=['invenio_base.api_converters'],
instance_path=instance_path,
)
create_app = create_app_factory(
'inspirehep',
config_loader=config_loader,
blueprint_entry_points=['invenio_base.blueprints'],
extension_entry_points=['invenio_base.apps'],
converter_entry_points=['invenio_base.converters'],
wsgi_factory=create_wsgi_factory({'/api': create_api}),
instance_path=instance_path,
static_folder=static_folder,
)
|
ClearCorp-dev/account-financial-reporting
|
refs/heads/8.0
|
account_financial_report_horizontal/__openerp__.py
|
8
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# This module copyright (C) 2012 Therp BV (<http://therp.nl>),
# Copyright (C) 2013 Agile Business Group sagl
# (<http://www.agilebg.com>) (<[email protected]>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
"name": "Accounting Financial Reports Horizontal",
"version": "0.3",
"author": "Therp BV,Agile Business Group,Odoo Community Association (OCA)",
"category": 'Accounting & Finance',
'website': 'https://github.com/OCA/account-financial-reporting',
'license': 'AGPL-3',
"depends": ["account"],
'data': [
"data/report_paperformat.xml",
"data/ir_actions_report_xml.xml",
"report/report_financial.xml",
],
'demo': [],
'test': [],
'active': False,
}
|
xbmcmegapack/plugin.video.megapack.dev
|
refs/heads/master
|
resources/lib/menus/home_countries_new_caledonia.py
|
1
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
This file is part of XBMC Mega Pack Addon.
Copyright (C) 2014 Wolverine ([email protected])
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License along
with this program. If not, see http://www.gnu.org/licenses/gpl-3.0.html
"""
class Countries_New_caledonia():
'''Class that manages this specific menu context.'''
def open(self, plugin, menu):
menu.add_xplugins(plugin.get_xplugins(dictionaries=["Channels",
"Events", "Live", "Movies", "Sports", "TVShows"],
countries=["New Caledonia"]))
|
lino-framework/extjs6
|
refs/heads/master
|
tests/__init__.py
|
2
|
from lino.utils.pythontest import TestCase
from lino_extjs6 import SETUP_INFO
class PackagesTests(TestCase):
def test_01(self):
self.run_packages_test(SETUP_INFO['packages'])
class ProjectsTests(TestCase):
def test_team(self):
self.run_django_manage_test("lino_extjs6/projects/team6")
def test_lydia(self):
self.run_django_manage_test("lino_extjs6/projects/lydia6")
|
ClaireAuffredou/pipe2py
|
refs/heads/master
|
tests/pypipelines/pipe_e65397e116d7754da0dd23425f1f0af1.py
|
6
|
# Pipe pipe_e65397e116d7754da0dd23425f1f0af1 generated by pipe2py
from pipe2py import Context
from pipe2py.modules.pipeforever import pipe_forever
from pipe2py.modules.pipeurlbuilder import pipe_urlbuilder
from pipe2py.modules.pipefetch import pipe_fetch
from pipe2py.modules.pipeloop import pipe_loop
from pipe2py.modules.piperename import pipe_rename
from pipe2py.modules.piperegex import pipe_regex
from pipe2py.modules.pipeoutput import pipe_output
def pipe_e65397e116d7754da0dd23425f1f0af1(context=None, _INPUT=None, conf=None, **kwargs):
# todo: insert pipeline description here
conf = conf or {}
if context and context.describe_input:
return []
if context and context.describe_dependencies:
return [u'pipefetch', u'pipeloop', u'pipeoutput', u'piperegex', u'piperename', u'pipeurlbuilder']
forever = pipe_forever()
# We need to wrap submodules (used by loops) so we can pass the
# input at runtime (as we can to subpipelines)
def pipe_sw_634(context=None, _INPUT=None, conf=None, **kwargs):
# todo: insert submodule description here
return pipe_urlbuilder(
context, forever, conf={'PATH': {'type': 'text', 'value': ''}, 'BASE': {'type': 'text', 'value': ''}, 'PARAM': [{'value': {'type': 'text', 'value': 'qr'}, 'key': {'type': 'text', 'value': 'cht'}}, {'value': {'type': 'text', 'value': '200x200'}, 'key': {'type': 'text', 'value': 'chs'}}, {'value': {'type': 'text', 'subkey': 'link'}, 'key': {'type': 'text', 'value': 'chl'}}]})
sw_565 = pipe_fetch(
context, forever, conf={'URL': {'type': 'url', 'value': ''}})
sw_626 = pipe_loop(
context, sw_565, embed=pipe_sw_634, conf={'assign_part': {'type': 'text', 'value': 'all'}, 'assign_to': {'type': 'text', 'value': 'media:content.url'}, 'emit_part': {'type': 'text', 'value': 'all'}, 'mode': {'type': 'text', 'value': 'assign'}, 'embed': {'type': 'module', 'value': {'type': 'urlbuilder', 'id': 'sw-634', 'conf': {'PATH': {'type': 'text', 'value': ''}, 'BASE': {'type': 'text', 'value': ''}, 'PARAM': [{'value': {'type': 'text', 'value': 'qr'}, 'key': {'type': 'text', 'value': 'cht'}}, {'value': {'type': 'text', 'value': '200x200'}, 'key': {'type': 'text', 'value': 'chs'}}, {'value': {'type': 'text', 'subkey': 'link'}, 'key': {'type': 'text', 'value': 'chl'}}]}}}, 'with': {'type': 'text', 'value': ''}})
sw_592 = pipe_rename(
context, sw_626, conf={'RULE': [{'field': {'type': 'text', 'value': 'media:content.url'}, 'op': {'type': 'text', 'value': 'copy'}, 'newval': {'type': 'text', 'value': 'description'}}]})
sw_636 = pipe_regex(
context, sw_592, conf={'RULE': [{'field': {'type': 'text', 'value': 'description'}, 'match': {'type': 'text', 'value': '(.*)'}, 'replace': {'type': 'text', 'value': '<img src="$1" alt="QRcode" /><br/>${title}'}}]})
_OUTPUT = pipe_output(
context, sw_636, conf={})
return _OUTPUT
if __name__ == "__main__":
pipeline = pipe_e65397e116d7754da0dd23425f1f0af1(Context())
for i in pipeline:
print i
|
joeythesaint/yocto-autobuilder
|
refs/heads/master
|
lib/python2.7/site-packages/Twisted-12.2.0-py2.7-linux-x86_64.egg/twisted/internet/_win32stdio.py
|
96
|
# -*- test-case-name: twisted.test.test_stdio -*-
"""
Windows-specific implementation of the L{twisted.internet.stdio} interface.
"""
import win32api
import os, msvcrt
from zope.interface import implements
from twisted.internet.interfaces import IHalfCloseableProtocol, ITransport, IAddress
from twisted.internet.interfaces import IConsumer, IPushProducer
from twisted.internet import _pollingfile, main
from twisted.python.failure import Failure
class Win32PipeAddress(object):
implements(IAddress)
class StandardIO(_pollingfile._PollingTimer):
implements(ITransport,
IConsumer,
IPushProducer)
disconnecting = False
disconnected = False
def __init__(self, proto):
"""
Start talking to standard IO with the given protocol.
Also, put it stdin/stdout/stderr into binary mode.
"""
from twisted.internet import reactor
for stdfd in range(0, 1, 2):
msvcrt.setmode(stdfd, os.O_BINARY)
_pollingfile._PollingTimer.__init__(self, reactor)
self.proto = proto
hstdin = win32api.GetStdHandle(win32api.STD_INPUT_HANDLE)
hstdout = win32api.GetStdHandle(win32api.STD_OUTPUT_HANDLE)
self.stdin = _pollingfile._PollableReadPipe(
hstdin, self.dataReceived, self.readConnectionLost)
self.stdout = _pollingfile._PollableWritePipe(
hstdout, self.writeConnectionLost)
self._addPollableResource(self.stdin)
self._addPollableResource(self.stdout)
self.proto.makeConnection(self)
def dataReceived(self, data):
self.proto.dataReceived(data)
def readConnectionLost(self):
if IHalfCloseableProtocol.providedBy(self.proto):
self.proto.readConnectionLost()
self.checkConnLost()
def writeConnectionLost(self):
if IHalfCloseableProtocol.providedBy(self.proto):
self.proto.writeConnectionLost()
self.checkConnLost()
connsLost = 0
def checkConnLost(self):
self.connsLost += 1
if self.connsLost >= 2:
self.disconnecting = True
self.disconnected = True
self.proto.connectionLost(Failure(main.CONNECTION_DONE))
# ITransport
def write(self, data):
self.stdout.write(data)
def writeSequence(self, seq):
self.stdout.write(''.join(seq))
def loseConnection(self):
self.disconnecting = True
self.stdin.close()
self.stdout.close()
def getPeer(self):
return Win32PipeAddress()
def getHost(self):
return Win32PipeAddress()
# IConsumer
def registerProducer(self, producer, streaming):
return self.stdout.registerProducer(producer, streaming)
def unregisterProducer(self):
return self.stdout.unregisterProducer()
# def write() above
# IProducer
def stopProducing(self):
self.stdin.stopProducing()
# IPushProducer
def pauseProducing(self):
self.stdin.pauseProducing()
def resumeProducing(self):
self.stdin.resumeProducing()
|
rempferg/espresso
|
refs/heads/master
|
testsuite/python/tabulated.py
|
4
|
#
# Copyright (C) 2013,2014,2015,2016 The ESPResSo project
#
# This file is part of ESPResSo.
#
# ESPResSo is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ESPResSo is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Tests particle property setters/getters
from __future__ import print_function
import unittest as ut
import espressomd
import numpy as np
from espressomd.interactions import *
from espressomd.analyze import *
import espressomd
import sys
class Tabulated(ut.TestCase):
""" Checks the tabulated non bonded interactions """
epsilon=1e-6
system = espressomd.System()
def setUp(self):
self.system.box_l = [10.0, 10.0, 10.0]
self.system.time_step = 0.01
self.system.non_bonded_inter[0,0].tabulated.set_params(filename="lj1.tab")
self.system.non_bonded_inter[1,1].tabulated.set_params(filename="lj2.tab")
self.system.non_bonded_inter[0,1].tabulated.set_params(filename="lj3.tab")
self.system.part.add(id=0, type=1, pos=[32.6999, 6.14966, 5.34031], f=[172609.00468, 90109.0416958, -61747.0304427])
self.system.part.add(id=1, type=0, pos=[-23.8829, 9.23126, 12.761], f=[194048.535562, -868975.312333, -1019245.73779])
self.system.part.add(id=2, type=1, pos=[2.49633, 6.89473, -21.0496], f=[-66673.5016673, -116413.712678, 73235.9116717])
self.system.part.add(id=3, type=0, pos=[-6.25779, 5.91975, -9.65196], f=[-130347.30713, 89529.0329246, -108882.021846])
self.system.part.add(id=4, type=1, pos=[-3.92171, 34.2882, 0.628891], f=[-52238.8344566, -181492.367762, 76280.000539])
self.system.part.add(id=5, type=0, pos=[-6.39955, 15.3786, 25.894], f=[-7425.60874055, -4505.42966605, 19116.9230427])
self.system.part.add(id=6, type=1, pos=[-0.722762, -2.23058, -15.6541], f=[12506.1474018, -19155.104344, 21369.9662972])
self.system.part.add(id=7, type=1, pos=[-20.9391, 12.2143, 16.7516], f=[125903.444986, 103109.156146, -257503.832784])
self.system.part.add(id=8, type=0, pos=[9.86137, 6.08379, 7.63482], f=[1847.60894613, -1985.50671808, -1909.10150878])
self.system.part.add(id=9, type=0, pos=[-44.3811, 10.9472, -3.26886], f=[-15095.247015, -12842.0288675, -7141.70122153])
self.system.part.add(id=10, type=1, pos=[23.6451, -10.2068, 16.6861], f=[2.37389477315, -3.35215844929, 3.42189532799])
self.system.part.add(id=11, type=1, pos=[-17.9408, -14.6865, -25.3674], f=[26020.6658993, -32430.6362582, -119602.087884])
self.system.part.add(id=12, type=0, pos=[-1.63968, 9.40066, 26.7607], f=[-41419.6546134, -5474.92532854, 300735.00413])
self.system.part.add(id=13, type=1, pos=[28.2716, -17.8088, 8.54688], f=[-5690.30932184, -12493.9110305, 17585.245942])
self.system.part.add(id=14, type=1, pos=[42.8477, 15.7742, -0.662483], f=[73700.5739568, 3319.20263911, 55501.8198917])
self.system.part.add(id=15, type=0, pos=[-1.28645, 4.77404, 6.51004], f=[19519.5595256, -4705.00286595, 345.144361529])
self.system.part.add(id=16, type=1, pos=[11.7026, -6.11787, 2.5266], f=[88588.1367573, -104459.360083, 53375.4248874])
self.system.part.add(id=17, type=0, pos=[-8.49265, 24.7968, 9.66456], f=[-60852.3157448, -726744.614596, -773758.404103])
self.system.part.add(id=18, type=0, pos=[4.66071, 20.7015, 19.5571], f=[17193.5175185, -33844.4625239, 8657.16715595])
self.system.part.add(id=19, type=0, pos=[23.3877, 4.01316, 7.01578], f=[36001.2255447, -7135.83245551, -72914.452808])
self.system.part.add(id=20, type=0, pos=[-3.3036, 24.3823, -10.3294], f=[-52424.3356493, -90234.8967785, 30743.2445911])
self.system.part.add(id=21, type=1, pos=[23.9279, 2.12368, 10.556], f=[824869.88849, 346310.948811, -358914.824381])
self.system.part.add(id=22, type=1, pos=[25.3336, 2.15027, 12.9974], f=[376501.307915, 420465.603537, -208797.553059])
self.system.part.add(id=23, type=1, pos=[9.34445, 9.92401, -20.3987], f=[73428.4288282, 70101.4241105, -27611.5261125])
self.system.part.add(id=24, type=0, pos=[16.9007, 29.3776, 21.9794], f=[-10.8560118382, -0.02632908525, -1.14782854744])
self.system.part.add(id=25, type=0, pos=[-1.88566, 11.78, 6.83349], f=[-658040.419202, 937869.894235, 69234.2227065])
self.system.part.add(id=26, type=1, pos=[22.4545, 1.89461, 15.8406], f=[-116086.888559, 55842.1868688, -68178.5491237])
self.system.part.add(id=27, type=1, pos=[19.352, -16.6605, -10.3743], f=[-1392239.68679, -208063.095013, -144392.689293])
self.system.part.add(id=28, type=0, pos=[-18.7889, -27.0408, 13.4411], f=[1300759.62579, 558083.248751, -523770.751304])
self.system.part.add(id=29, type=0, pos=[2.67754, 33.2706, 19.4872], f=[120433.549718, -813043.668731, 397467.561612])
self.system.part.add(id=30, type=0, pos=[0.730263, 2.43414, 17.7877], f=[15589.1172731, 5275.98064041, -42990.341693])
self.system.part.add(id=31, type=1, pos=[38.7614, -11.1486, 41.2415], f=[246253.048687, 85887.8637986, -142794.825589])
self.system.part.add(id=32, type=1, pos=[21.0854, 4.71322, -16.4078], f=[-31627.2771072, -97588.3102019, -53240.6216801])
self.system.part.add(id=33, type=1, pos=[-8.3059, 3.84164, -5.53546], f=[595025.843309, -203798.084415, -316532.071628])
self.system.part.add(id=34, type=1, pos=[8.69924, -10.7915, -4.19637], f=[48604.9068606, -208904.415945, -425100.434326])
self.system.part.add(id=35, type=1, pos=[18.1181, 6.62936, 20.3124], f=[-10386.0983216, 9160.70189944, -320019.782398])
self.system.part.add(id=36, type=1, pos=[3.973, -12.0568, 17.9474], f=[-6471.17490869, -11486.2546826, -7048.46673507])
self.system.part.add(id=37, type=1, pos=[31.2158, 28.1003, -23.5672], f=[-62149.6264734, 292979.374578, 35491.752507])
self.system.part.add(id=38, type=1, pos=[-2.60289, 3.05326, 0.97681], f=[-18235.6833415, 10787.837609, -2753.19180882])
self.system.part.add(id=39, type=0, pos=[19.8426, 24.9846, 12.774], f=[-10007.6642592, 32965.2158189, -51394.7282553])
self.system.part.add(id=40, type=0, pos=[18.67, -12.5272, -0.126244], f=[-7573.12828027, -5476.18722583, -64500.8217879])
self.system.part.add(id=41, type=1, pos=[7.37841, 14.1114, 31.3792], f=[-161882.082132, -148736.5019, -227912.948758])
self.system.part.add(id=42, type=0, pos=[8.67561, 6.09108, 17.9593], f=[-491025.81778, -614801.416008, -476136.442698])
self.system.part.add(id=43, type=0, pos=[19.8366, -20.7088, 3.97279], f=[-116196.581074, -38895.5417518, 86522.0419231])
self.system.part.add(id=44, type=1, pos=[0.821285, -11.1865, 2.72424], f=[-24492.1535642, -41.7742331054, 6188.7370069])
self.system.part.add(id=45, type=1, pos=[25.9696, 29.8633, 13.4993], f=[74891.4767541, 978900.504558, 1189316.99237])
self.system.part.add(id=46, type=1, pos=[0.622923, -2.95182, 29.6812], f=[4116.07802411, 6313.48772716, 54921.4245201])
self.system.part.add(id=47, type=0, pos=[-19.7758, 5.82526, 21.2401], f=[1205.07700647, 1938.72299675, 3665.70043791])
self.system.part.add(id=48, type=1, pos=[18.3183, 9.69368, 9.32863], f=[89237.5982226, 37564.1524319, -152890.860977])
self.system.part.add(id=49, type=1, pos=[-19.4871, 2.44379, 22.6629], f=[-10892.9635981, -8436.38872703, -13129.943772])
self.system.part.add(id=50, type=0, pos=[-3.90983, 28.1874, 11.4462], f=[494156.49023, -14722.3274365, -394232.94051])
self.system.part.add(id=51, type=0, pos=[22.5021, 35.1942, 41.2124], f=[-164720.38568, -255080.601046, -269213.989108])
self.system.part.add(id=52, type=0, pos=[23.7698, 16.7017, 25.1381], f=[-6015.08147685, -12532.2353915, -33265.5903181])
self.system.part.add(id=53, type=0, pos=[26.1393, 1.28842, 5.94661], f=[-81969.8494125, -11767.0100018, 52633.4081558])
self.system.part.add(id=54, type=0, pos=[2.44402, -1.22219, -8.59332], f=[-25059.9630057, -28893.6620007, -36941.7374539])
self.system.part.add(id=55, type=1, pos=[10.966, -28.2954, -6.10675], f=[-7.51893276524, -526.93148402, -252.911008188])
self.system.part.add(id=56, type=0, pos=[-4.6061, 6.41508, 16.3606], f=[-13358.7432442, 16518.0773394, 61187.4543354])
self.system.part.add(id=57, type=0, pos=[8.26169, -7.71632, -18.8751], f=[-11806.9158773, 18188.1314777, 3929.27617715])
self.system.part.add(id=58, type=0, pos=[21.9765, 35.7343, -11.3207], f=[-36227.9985529, -55304.5784177, -62954.6903597])
self.system.part.add(id=59, type=0, pos=[9.66668, 15.3888, 38.372], f=[-7441.26435483, 99647.2595038, -114379.127188])
self.system.part.add(id=60, type=1, pos=[0.178205, -13.8037, -16.9067], f=[-305278.79365, 202105.743422, 445161.592825])
self.system.part.add(id=61, type=0, pos=[-9.30627, 23.5376, 28.1433], f=[7718.73883263, 14821.232665, -10927.1874525])
self.system.part.add(id=62, type=0, pos=[-11.4102, 19.9895, -26.0798], f=[-10511.6821031, 184592.232844, -211953.669025])
self.system.part.add(id=63, type=0, pos=[1.96605, -6.32699, -24.2179], f=[936.83710797, -815.514376144, 664.10729736])
self.system.part.add(id=64, type=1, pos=[20.8479, 10.4264, -3.73398], f=[340726.952462, 224332.72294, -201441.470561])
self.system.part.add(id=65, type=0, pos=[3.95199, 14.3214, 28.4226], f=[49091.6229689, -32132.6534984, 12167.146391])
self.system.part.add(id=66, type=0, pos=[21.9361, 32.6479, 24.1313], f=[-4311.608664, -9641.79630086, 7800.14016572])
self.system.part.add(id=67, type=0, pos=[21.4614, -7.25158, -13.3158], f=[-484.846140478, -317.801410153, -674.418905396])
self.system.part.add(id=68, type=1, pos=[-12.9327, 38.3009, 34.4506], f=[62.7217895012, -46.8626136096, -59.3269667096])
self.system.part.add(id=69, type=0, pos=[-0.136573, 1.44237, -3.99287], f=[-88942.1103456, -2076.3418768, -54119.954511])
self.system.part.add(id=70, type=0, pos=[19.5677, 37.5006, 17.5663], f=[3296.03524408, -36072.0662483, 11044.1509663])
self.system.part.add(id=71, type=1, pos=[-3.51132, 6.29551, 2.40634], f=[5421.70887205, -10894.3122696, -173.303901693])
self.system.part.add(id=72, type=1, pos=[31.9103, 1.86439, -16.8056], f=[-44423.6291438, 42187.0600003, 51162.2904663])
self.system.part.add(id=73, type=1, pos=[10.9271, 11.8231, -29.5946], f=[-144.342426025, -334.268545232, 235.963541717])
self.system.part.add(id=74, type=1, pos=[1.85393, -3.40053, 4.37367], f=[-46908.4308281, -61375.5987395, -66522.4658835])
self.system.part.add(id=75, type=1, pos=[1.32773, -3.37234, -21.2072], f=[198020.982594, 304381.403858, -140106.565523])
self.system.part.add(id=76, type=1, pos=[10.6974, 4.76438, 27.623], f=[-92253.0055769, -33450.9296363, 8667.38697277])
self.system.part.add(id=77, type=1, pos=[-0.198799, -29.7271, -12.0833], f=[21548.5368432, -2597.64537528, 8281.62303866])
self.system.part.add(id=78, type=0, pos=[0.833598, -8.88612, 7.96833], f=[15679.1694996, -74414.5158585, -106306.882519])
self.system.part.add(id=79, type=0, pos=[-15.6552, -11.6322, -5.03931], f=[-114473.317588, -7234.95048733, 51822.679476])
self.system.part.add(id=80, type=0, pos=[12.3292, 20.6077, 10.597], f=[223572.929042, 52069.3560054, -73338.431435])
self.system.part.add(id=81, type=1, pos=[9.89068, -5.51084, 23.7728], f=[2074.07453854, -24818.569321, 49757.1478338])
self.system.part.add(id=82, type=0, pos=[15.4584, -4.13943, -9.66844], f=[4016.54281022, -9694.36014807, -19415.6604144])
self.system.part.add(id=83, type=1, pos=[32.8647, -14.2026, -8.04338], f=[164470.271778, 271414.065882, 337602.893515])
self.system.part.add(id=84, type=1, pos=[-15.3861, -13.1607, 13.5337], f=[-38.2174782874, 656.41302401, -334.33524204])
self.system.part.add(id=85, type=1, pos=[0.647266, -10.4272, -16.6674], f=[127123.335966, 26339.4104897, -92496.9475158])
self.system.part.add(id=86, type=0, pos=[0.946113, 24.4829, 4.65973], f=[-580189.486663, 456080.501388, 91898.8746231])
self.system.part.add(id=87, type=0, pos=[-6.68799, 1.52127, 26.3686], f=[589071.21444, -149048.196102, -303611.122232])
self.system.part.add(id=88, type=1, pos=[-3.36343, 3.33105, -12.1742], f=[-49595.6992172, -6627.79216572, -43951.9525055])
self.system.part.add(id=89, type=0, pos=[-17.0377, 27.5254, -10.2817], f=[77381.1227604, -86787.7463367, -95743.2668424])
self.system.part.add(id=90, type=1, pos=[-12.7927, -3.50878, -5.52209], f=[112469.501296, 48219.9981041, -15428.9369625])
self.system.part.add(id=91, type=0, pos=[4.22216, 8.04145, -27.6365], f=[49461.2686743, 47324.8790582, 54376.3546942])
self.system.part.add(id=92, type=0, pos=[27.5759, 26.1765, -2.10853], f=[7905.20634567, 2967.75744558, -2196.98888552])
self.system.part.add(id=93, type=1, pos=[-3.07339, 22.0591, 20.4203], f=[2881.66328068, 8469.49219937, 27681.8875588])
self.system.part.add(id=94, type=0, pos=[14.794, 18.8543, 3.91248], f=[-139577.745191, 104288.610797, -238968.513311])
self.system.part.add(id=95, type=0, pos=[13.5918, 9.88955, -25.778], f=[275098.792162, -47859.3717152, 141892.649728])
self.system.part.add(id=96, type=0, pos=[25.9797, 13.4831, 14.1553], f=[-2018.86212338, 1654.38928066, 624.238150809])
self.system.part.add(id=97, type=0, pos=[5.16693, 33.9763, 13.4321], f=[-292109.676807, 131444.743503, 507342.207201])
self.system.part.add(id=98, type=1, pos=[-27.4465, -10.3867, 24.3574], f=[3121.1786896, -2488.53841048, 19265.4761733])
self.system.part.add(id=99, type=0, pos=[15.5114, 15.9482, 14.0686], f=[845889.646147, -688417.978255, -1289692.33631])
self.system.part.add(id=100, type=1, pos=[-9.51218, -30.172, 2.03994], f=[162996.363537, 211535.188006, 66359.352747])
self.system.part.add(id=101, type=1, pos=[3.91474, 20.3536, 15.7574], f=[327595.077012, -167759.275945, 35628.4875937])
self.system.part.add(id=102, type=1, pos=[15.5526, 36.2062, 7.65031], f=[353.779222886, -398.985107565, 1184.12410894])
self.system.part.add(id=103, type=0, pos=[0.965672, 19.8596, 8.86631], f=[41325.6868278, -26607.7558133, 158781.496898])
self.system.part.add(id=104, type=1, pos=[16.5893, -7.56041, -26.2389], f=[-198005.314799, -360595.088131, -510083.457847])
self.system.part.add(id=105, type=1, pos=[-8.49689, 11.2801, 9.26527], f=[-14564.4541616, -546.140209797, -4261.48250269])
self.system.part.add(id=106, type=1, pos=[-12.125, 19.7259, -31.776], f=[60777.9803639, 139734.936406, 72401.0982798])
self.system.part.add(id=107, type=1, pos=[-6.58669, -5.74361, -15.647], f=[-34729.435197, 98387.2740839, 136229.81842])
self.system.part.add(id=108, type=0, pos=[-15.7165, -16.0138, 7.52355], f=[-8865.84849125, -32264.3541126, 4693.58294673])
self.system.part.add(id=109, type=1, pos=[8.63379, 19.3186, 14.7194], f=[11683.4753938, -181403.070404, 216536.406468])
self.system.part.add(id=110, type=1, pos=[10.4293, 3.47129, 21.1444], f=[-15921.7346397, 4287.30479964, 3121.48953315])
self.system.part.add(id=111, type=1, pos=[-7.47532, 31.6808, 6.98578], f=[-474672.218056, 95499.893301, 370549.893526])
self.system.part.add(id=112, type=1, pos=[-25.6404, 6.33415, 5.98913], f=[-32475.6428617, -5388.54055658, 50428.6250506])
self.system.part.add(id=113, type=1, pos=[-9.18993, 15.3198, -19.8304], f=[-537566.093453, 298388.397041, 359823.510801])
self.system.part.add(id=114, type=1, pos=[-2.96488, 6.00711, 0.406486], f=[51717.3127804, -258470.857164, -157496.126838])
self.system.part.add(id=115, type=0, pos=[-5.50418, -29.609, 3.46299], f=[-8976.41893123, -47166.7129837, -87559.7609972])
self.system.part.add(id=116, type=1, pos=[-2.36794, 54.8164, -13.0382], f=[-54751.3554204, 79833.6452591, -8714.88234935])
self.system.part.add(id=117, type=1, pos=[32.3499, 8.50294, -12.8513], f=[28088.4933904, -5513.82421545, -8245.9258622])
self.system.part.add(id=118, type=1, pos=[-24.508, 31.0574, 32.7862], f=[242851.45029, -306252.291771, -33211.3224033])
self.system.part.add(id=119, type=0, pos=[34.7384, -38.5751, 3.37959], f=[-949950.61886, -167825.742554, 318020.063015])
self.system.part.add(id=120, type=1, pos=[4.34161, -4.41151, 18.5127], f=[264.053872407, 928.740405042, 10.6671323767])
self.system.part.add(id=121, type=1, pos=[-32.9134, 1.97272, -13.7444], f=[12626.5215816, -10219.1493308, 23057.1902466])
self.system.part.add(id=122, type=1, pos=[-0.754001, 6.47309, 14.7011], f=[-111742.002427, -3975.68098121, 69630.3513354])
self.system.part.add(id=123, type=0, pos=[-17.6259, 10.8601, 24.8652], f=[34372.5272609, -45961.5661768, -14664.240145])
self.system.part.add(id=124, type=0, pos=[11.8854, 25.5638, 25.6666], f=[-189595.397928, -91228.6184122, 191598.431906])
self.system.part.add(id=125, type=1, pos=[7.05698, -4.82465, -10.9658], f=[-885460.279125, 917114.856906, -373122.862692])
self.system.part.add(id=126, type=0, pos=[-4.84107, -1.66014, -3.7881], f=[91104.5309891, -110468.56248, 22740.9670498])
self.system.part.add(id=127, type=1, pos=[10.1586, 9.09826, 25.1363], f=[-4963.68191006, 3489.81572572, 10221.1139143])
self.system.part.add(id=128, type=1, pos=[-0.222215, 25.0987, 0.305143], f=[134216.113652, 93239.3836534, -3783.24886356])
self.system.part.add(id=129, type=1, pos=[5.66027, -16.2463, 2.57482], f=[305749.038842, -155240.772051, -503417.774935])
self.system.part.add(id=130, type=0, pos=[28.916, -16.7754, 2.67366], f=[-821799.935359, -50531.0620306, -733043.104752])
self.system.part.add(id=131, type=0, pos=[-1.14309, 8.68334, -0.520204], f=[-81829.6707747, 164027.6659, -524794.296815])
self.system.part.add(id=132, type=1, pos=[-24.17, 2.54552, 27.7739], f=[-18774.5666068, 98388.2183558, -53106.4571845])
self.system.part.add(id=133, type=1, pos=[-13.5445, 10.1514, 27.2345], f=[-11214.2670455, -10229.9938142, 16911.4980988])
self.system.part.add(id=134, type=1, pos=[-2.15534, 3.23596, 19.6995], f=[-302375.26654, 426323.051261, -205087.497673])
self.system.part.add(id=135, type=1, pos=[-20.6863, 19.223, 12.7965], f=[25433.2340026, -92939.883816, -3609.51960117])
self.system.part.add(id=136, type=1, pos=[3.74969, 6.21914, 21.5582], f=[35.5767604388, 1118.13686572, 4171.76125022])
self.system.part.add(id=137, type=1, pos=[8.37468, -4.21701, -5.0865], f=[114.417385586, 36.8019441848, -10.5786351728])
self.system.part.add(id=138, type=1, pos=[47.3101, -10.7765, 16.57], f=[-113603.379275, -19137.179892, -22837.1139832])
self.system.part.add(id=139, type=0, pos=[-5.78292, 3.82749, 9.79264], f=[-325802.656979, 494737.918659, -111240.656964])
self.system.part.add(id=140, type=0, pos=[4.56906, -10.2363, 14.6282], f=[-23572.8045964, -19626.9610079, 4078.73868049])
self.system.part.add(id=141, type=0, pos=[8.0594, -22.9628, 18.8739], f=[-5940.37278726, 8946.83739789, -3551.85663108])
self.system.part.add(id=142, type=0, pos=[8.47375, 13.6732, 1.08021], f=[7092.46286623, 3760.31047166, -4544.60481136])
self.system.part.add(id=143, type=0, pos=[11.4906, 0.85363, 32.764], f=[569556.012003, 57158.7773401, 148544.704688])
self.system.part.add(id=144, type=1, pos=[1.99228, -18.1409, 30.7974], f=[-84306.650964, 2510.0684668, -64456.5356776])
self.system.part.add(id=145, type=1, pos=[-15.5443, 31.2741, 5.65632], f=[-23818.6095365, -16076.5197137, 32605.8385225])
self.system.part.add(id=146, type=1, pos=[25.3654, -3.74981, -7.02039], f=[4179.2315355, 17115.5722851, -39275.3548668])
self.system.part.add(id=147, type=0, pos=[21.0065, 13.4246, 26.2033], f=[38738.6189107, 18898.6919169, -11491.3676433])
self.system.part.add(id=148, type=0, pos=[5.27122, 8.38946, -9.724], f=[-796.450734988, 137.629343628, 46.8830032743])
self.system.part.add(id=149, type=1, pos=[2.07177, 37.7938, 30.8016], f=[158.258126165, -4828.3670545, -7174.44621296])
self.system.part.add(id=150, type=0, pos=[23.1038, 4.12193, 16.012], f=[-2.33916003888, 0.896197384391, -8.2705250575])
self.system.part.add(id=151, type=0, pos=[-30.1055, 0.466755, 23.351], f=[-436066.449263, -58166.4081785, -100617.729142])
self.system.part.add(id=152, type=1, pos=[-9.71363, 0.750421, 8.93093], f=[-39005.7408491, 32677.0545167, 19498.8039885])
self.system.part.add(id=153, type=0, pos=[1.8349, 3.99351, 46.9701], f=[-10699.9965232, 15428.0371135, -9483.16998122])
self.system.part.add(id=154, type=1, pos=[16.4225, 22.7599, 26.5832], f=[54238.5170426, 16146.7370145, -1781.26279376])
self.system.part.add(id=155, type=1, pos=[4.13263, 16.2125, 24.1544], f=[-198480.028634, 274102.27509, 227789.579271])
self.system.part.add(id=156, type=0, pos=[-8.59059, 39.138, 14.8296], f=[-306500.538591, -133449.970023, -168613.356828])
self.system.part.add(id=157, type=0, pos=[1.21264, 28.7849, 0.757462], f=[54695.302674, -99610.9566808, -609683.959504])
self.system.part.add(id=158, type=1, pos=[20.5903, 9.99587, 9.97631], f=[79353.6958175, -125815.160959, -264269.262151])
self.system.part.add(id=159, type=1, pos=[1.07782, -2.46876, 0.704081], f=[-9505.18074748, 333.648674141, -31753.6234551])
self.system.part.add(id=160, type=0, pos=[20.4141, 17.0722, -1.41481], f=[-198730.198228, 506494.111184, -524908.205901])
self.system.part.add(id=161, type=0, pos=[26.3415, -7.53124, 2.45962], f=[10657.9381234, 3711.11524989, -4913.50188024])
self.system.part.add(id=162, type=0, pos=[31.068, -5.33086, 22.1746], f=[-86694.9267585, 144319.242153, 62110.867572])
self.system.part.add(id=163, type=1, pos=[2.90542, 14.1131, 17.9978], f=[-62218.2751412, 3908.89993204, 61565.9697819])
self.system.part.add(id=164, type=0, pos=[23.6408, 19.6548, -0.555014], f=[17769.8473232, 11431.0115143, -3894.62141472])
self.system.part.add(id=165, type=1, pos=[13.8822, -1.23203, -2.75511], f=[14074.3286159, 37624.4957182, 15991.1205614])
self.system.part.add(id=166, type=0, pos=[-2.7656, 30.5664, 9.00829], f=[-16291.0827041, -6182.14594511, -4163.48306493])
self.system.part.add(id=167, type=1, pos=[-23.6137, 1.76972, -22.9781], f=[19080.920396, 17458.3924231, 14045.8457605])
self.system.part.add(id=168, type=1, pos=[32.7545, -11.1446, 8.03269], f=[34467.3133421, -41893.5477479, -46761.9932339])
self.system.part.add(id=169, type=0, pos=[4.67527, 3.046, -2.33888], f=[-2776.2067977, 1400.7979632, -433.124488731])
self.system.part.add(id=170, type=1, pos=[-5.60794, 9.07545, -3.95086], f=[-98351.1601356, 95392.3520574, -19467.9607498])
self.system.part.add(id=171, type=1, pos=[19.2931, 28.5405, -22.7765], f=[-9871.69062551, 38758.5220915, -12678.3333139])
self.system.part.add(id=172, type=0, pos=[-2.94387, 4.33293, 8.41991], f=[1478.01503062, -234345.26268, -171421.457041])
self.system.part.add(id=173, type=0, pos=[-1.22187, 18.4656, -6.40994], f=[6633.95058063, 11398.3661614, -6545.71150327])
self.system.part.add(id=174, type=0, pos=[21.3272, 26.1586, -9.27202], f=[12361.9680665, 27268.7597068, 22087.1694731])
self.system.part.add(id=175, type=0, pos=[31.8616, 4.10457, 10.39], f=[192188.164038, -32802.7516998, 7676.46467202])
self.system.part.add(id=176, type=1, pos=[-0.223968, -5.32098, -0.831345], f=[136345.022177, 71683.7713873, 335063.558685])
self.system.part.add(id=177, type=0, pos=[-3.12085, 5.57438, 23.888], f=[15596.1896781, -37845.5284647, 20594.5847185])
self.system.part.add(id=178, type=0, pos=[17.4926, 18.8434, -2.23507], f=[-166687.891724, -143105.361675, -126778.918925])
self.system.part.add(id=179, type=1, pos=[-12.3708, 29.4583, -5.07754], f=[468310.082716, 3105.59933404, -164164.921306])
self.system.part.add(id=180, type=0, pos=[-27.906, 10.4461, 6.34728], f=[1387495.84087, -1980940.78896, 750472.648989])
self.system.part.add(id=181, type=1, pos=[-22.4067, 28.6025, 15.5707], f=[1346.94359569, -1184.01323713, 414.020752306])
self.system.part.add(id=182, type=1, pos=[-9.44938, 4.03549, -7.08772], f=[-29042.6885009, -53053.989688, 52333.3304139])
self.system.part.add(id=183, type=1, pos=[13.2814, 2.50232, 5.58722], f=[-152.217353732, 2993.97734175, -2328.86136957])
self.system.part.add(id=184, type=1, pos=[-19.5498, -3.82472, 49.069], f=[-17561.5142809, -452436.017412, 221689.857997])
self.system.part.add(id=185, type=0, pos=[-19.7632, 8.56354, -19.6358], f=[4473.27935694, 2987.74396774, -2539.74882969])
self.system.part.add(id=186, type=1, pos=[0.0874139, -7.13767, -13.4772], f=[-25440.7307568, -171841.936303, -40670.8767558])
self.system.part.add(id=187, type=0, pos=[28.4694, 22.2221, 3.06984], f=[-82100.6861296, -152807.493501, -399816.912837])
self.system.part.add(id=188, type=0, pos=[21.399, -2.59167, 31.7917], f=[64132.1506303, 155865.350946, -136287.148645])
self.system.part.add(id=189, type=1, pos=[19.6205, 27.2998, 19.2357], f=[-163468.589388, 46783.4874263, 137288.568067])
self.system.part.add(id=190, type=1, pos=[-1.22958, 7.65032, 32.7295], f=[189513.047991, 131037.417945, -177334.063394])
self.system.part.add(id=191, type=0, pos=[4.73174, 13.6923, -3.44309], f=[-14024.0850271, 21446.5282381, 106993.986997])
self.system.part.add(id=192, type=0, pos=[-21.0219, -29.755, 22.948], f=[-2419.96871018, 141404.025457, 17856.5737251])
self.system.part.add(id=193, type=1, pos=[-24.0523, -2.75459, -21.2883], f=[-90216.6052501, -289529.337871, -135437.868085])
self.system.part.add(id=194, type=0, pos=[-6.05277, 29.0204, 18.4026], f=[1345.30025017, -29373.0246105, 30452.0852325])
self.system.part.add(id=195, type=1, pos=[29.6669, 6.62547, 10.2761], f=[2804.1063629, 1498.72208064, -9811.29058924])
self.system.part.add(id=196, type=1, pos=[13.8616, -14.7066, 14.7599], f=[-32462.5445951, 3063.4378207, -52256.2247881])
self.system.part.add(id=197, type=0, pos=[13.4033, 40.7943, 8.72951], f=[25655.5362207, 249901.952952, 166032.114672])
self.system.part.add(id=198, type=1, pos=[13.2453, 27.1712, 0.723936], f=[16377.4477928, -23218.8020152, 69412.5669709])
self.system.part.add(id=199, type=0, pos=[-38.5987, -3.75107, 47.8266], f=[164698.957653, -487527.301991, 175971.764388])
self.system.part.add(id=200, type=1, pos=[26.0792, 18.1285, -22.9965], f=[375994.426593, 422135.264289, 191860.620768])
self.system.part.add(id=201, type=0, pos=[5.13931, 14.1044, -1.31902], f=[-27101.1244086, 24838.9152064, -133326.719346])
self.system.part.add(id=202, type=1, pos=[8.34896, 24.6854, 4.66302], f=[12021.8048414, 18621.5745536, -51794.2662549])
self.system.part.add(id=203, type=1, pos=[27.5075, -8.13471, -5.89235], f=[152145.945086, 225700.818429, 64577.044111])
self.system.part.add(id=204, type=0, pos=[20.079, 11.0004, 24.6306], f=[-64105.9100022, 125797.786895, -97674.9835382])
self.system.part.add(id=205, type=0, pos=[-8.38593, 12.9526, -5.00555], f=[-26395.6016399, -273045.569282, 167394.463557])
self.system.part.add(id=206, type=0, pos=[-13.729, -15.5658, 23.5712], f=[-83441.5259386, -171409.153175, 125767.739572])
self.system.part.add(id=207, type=1, pos=[28.2817, 11.0168, 19.2674], f=[-17091.678551, 42139.349389, 36400.8619194])
self.system.part.add(id=208, type=0, pos=[13.6304, -26.8779, -21.6353], f=[-61863.6285203, 182429.05685, -18749.9535966])
self.system.part.add(id=209, type=0, pos=[-11.737, -14.2424, -13.0626], f=[27346.2293081, 37527.0279588, 12871.835422])
self.system.part.add(id=210, type=0, pos=[20.7668, -1.4249, 14.1964], f=[4736.42871829, -1210.58830488, -1562.51228168])
self.system.part.add(id=211, type=1, pos=[26.5376, 27.6911, 13.3257], f=[-70972.1193862, -172210.899708, 191191.983833])
self.system.part.add(id=212, type=1, pos=[-8.81282, -0.945807, -28.2678], f=[-91889.1983233, 191757.313025, 539430.164036])
self.system.part.add(id=213, type=1, pos=[23.007, -1.69326, -9.55568], f=[6768.75868336, 117411.697742, 104543.722507])
self.system.part.add(id=214, type=1, pos=[3.28699, 9.92412, -1.83124], f=[-50804.7328021, -226405.574109, -169838.714863])
self.system.part.add(id=215, type=1, pos=[-6.4398, -3.69166, -6.66101], f=[-727.6784606, 535.310103832, -147.385354794])
self.system.part.add(id=216, type=0, pos=[2.18018, 9.48908, 8.73084], f=[-47312.3996048, 49502.3004704, 29441.1475727])
self.system.part.add(id=217, type=1, pos=[36.4798, 38.0181, 5.15765], f=[48313.2977876, 30275.0668463, 11273.0086697])
self.system.part.add(id=218, type=0, pos=[21.7302, 29.201, 16.3088], f=[-7896.74216109, -7473.49967707, -12136.3880219])
self.system.part.add(id=219, type=1, pos=[3.40345, 2.06122, 17.5394], f=[-131239.981356, -110879.164441, 146923.859348])
self.system.part.add(id=220, type=0, pos=[10.2974, 47.5883, -13.5401], f=[-203423.976165, -139000.74492, -59898.0045339])
self.system.part.add(id=221, type=1, pos=[20.186, 15.303, -15.8004], f=[-6753.92445093, -4230.14730454, -4587.30094863])
self.system.part.add(id=222, type=0, pos=[5.33247, -12.8054, 15.4006], f=[35930.9197489, 102106.473365, 69250.699426])
self.system.part.add(id=223, type=1, pos=[12.8802, 0.231196, 3.5283], f=[-163567.633721, 77678.343926, -158887.831488])
self.system.part.add(id=224, type=0, pos=[-2.5267, -26.9339, 22.5802], f=[72147.3199523, -70449.4719221, 5544.86985723])
self.system.part.add(id=225, type=0, pos=[7.73989, -11.6712, -0.126099], f=[2738.35270799, -50761.6862309, -73222.1745293])
self.system.part.add(id=226, type=1, pos=[44.1566, -8.30003, -0.62461], f=[87185.6740898, -7379.04281058, -37979.5083656])
self.system.part.add(id=227, type=1, pos=[8.93566, 19.6858, 7.62401], f=[100817.163014, -76427.3995833, 102503.282652])
self.system.part.add(id=228, type=0, pos=[16.2284, -14.7406, 10.2218], f=[-15766.9826509, 125474.489106, -77803.9354664])
self.system.part.add(id=229, type=1, pos=[-7.04009, 17.1097, 5.89688], f=[799085.223093, -188140.367352, 749267.762679])
self.system.part.add(id=230, type=0, pos=[25.4163, 1.65227, 35.2032], f=[64360.4621418, 3215.60928331, -2674.17155178])
self.system.part.add(id=231, type=1, pos=[7.85975, -8.33836, -22.1744], f=[-8599.12500528, 149066.038472, 453010.135208])
self.system.part.add(id=232, type=0, pos=[-11.0211, -28.746, 33.9259], f=[-1897.12278157, -4442.61136336, -9152.84085435])
self.system.part.add(id=233, type=1, pos=[-11.6944, 9.35147, 12.9068], f=[-35548.7639642, -38888.7296503, -10319.8926257])
self.system.part.add(id=234, type=1, pos=[-7.59205, 33.6735, 13.7742], f=[-219874.094675, 136846.955604, 391711.57238])
self.system.part.add(id=235, type=0, pos=[16.9355, -1.56197, -7.29217], f=[98554.0815453, 184882.08448, -154007.353702])
self.system.part.add(id=236, type=1, pos=[2.25575, -4.7257, 20.1252], f=[630485.601727, 413747.202855, 341317.523151])
self.system.part.add(id=237, type=0, pos=[15.025, 4.98649, 9.85821], f=[-18203.2599974, 78724.6467208, 8857.37607488])
self.system.part.add(id=238, type=1, pos=[4.1663, 55.9964, -12.5882], f=[18602.8490008, 21103.9134027, 6427.44561638])
self.system.part.add(id=239, type=0, pos=[-29.6778, 3.46219, -0.293918], f=[1152599.78626, -154010.141559, -178474.899234])
self.system.part.add(id=240, type=1, pos=[9.22559, 31.5867, -1.62517], f=[-100802.757446, -174802.110838, 200279.0771])
self.system.part.add(id=241, type=0, pos=[15.1418, -10.4736, 2.97578], f=[-278294.512009, -113258.844974, -175986.563969])
self.system.part.add(id=242, type=0, pos=[17.058, -2.52854, -1.36913], f=[36009.4875747, 7436.48579622, -1898.2810172])
self.system.part.add(id=243, type=1, pos=[21.29, 7.73121, 28.713], f=[58902.7404681, 43128.5693611, 8617.14660669])
self.system.part.add(id=244, type=0, pos=[8.04927, 18.4757, -28.1098], f=[-347473.694167, 86772.2353552, 535361.744907])
self.system.part.add(id=245, type=0, pos=[29.5998, -17.7623, 7.64968], f=[235425.092939, 182303.973658, 22091.9097663])
self.system.part.add(id=246, type=1, pos=[-0.987863, 11.4071, 25.1309], f=[3513.70902731, 4021.44741173, 5510.4900067])
self.system.part.add(id=247, type=1, pos=[-9.06954, 11.3071, 11.4038], f=[5268.39437184, 7665.72936154, 4418.19536781])
self.system.part.add(id=248, type=0, pos=[1.91847, 19.7806, 9.64636], f=[161727.489457, 545146.463547, -29039.1577357])
self.system.part.add(id=249, type=1, pos=[-8.35956, 0.0667881, 11.9653], f=[5253.95283875, -27745.7792488, -28831.7948391])
self.system.part.add(id=250, type=1, pos=[0.487446, 0.216921, -24.7753], f=[66795.5983579, -129392.319368, 97417.4799718])
self.system.part.add(id=251, type=0, pos=[-1.32048, 35.0078, -9.6084], f=[-81798.7512647, -87946.9887898, -44846.4545817])
self.system.part.add(id=252, type=0, pos=[8.09979, 4.32332, 15.6889], f=[21056.3882373, -100730.428199, 27155.2679953])
self.system.part.add(id=253, type=0, pos=[7.38974, 11.9896, -17.0046], f=[-5734.24618771, -16060.7541932, -68843.2670803])
self.system.part.add(id=254, type=1, pos=[-14.4854, 11.1328, -9.33585], f=[25533.0582186, -194603.698516, -28963.1725391])
self.system.part.add(id=255, type=0, pos=[28.3348, -46.6827, 35.9808], f=[91220.9825979, -11914.1811302, -10285.5058093])
self.system.part.add(id=256, type=1, pos=[7.26847, 2.20901, 8.84062], f=[290678.96438, 351389.857049, -259745.178826])
self.system.part.add(id=257, type=1, pos=[-7.12085, -12.5296, 41.946], f=[779.536302848, 745.441700483, 616.383800664])
self.system.part.add(id=258, type=0, pos=[-14.6772, 22.17, 0.731227], f=[-107706.826926, 176311.31789, 39411.9155093])
self.system.part.add(id=259, type=1, pos=[5.03154, 6.33535, 14.823], f=[-896589.354985, 588387.315716, 1246292.73422])
self.system.part.add(id=260, type=0, pos=[6.66531, 9.45172, 5.2604], f=[-562045.409724, -257397.306656, 302568.165387])
self.system.part.add(id=261, type=0, pos=[22.0649, 17.5148, 26.312], f=[139402.101873, -126302.879678, -30870.4296256])
self.system.part.add(id=262, type=1, pos=[14.5992, 10.8936, 4.42445], f=[155598.243911, -7704.21934303, 38147.8878215])
self.system.part.add(id=263, type=1, pos=[-24.9766, -2.14758, 14.2537], f=[149430.236573, -156939.36269, 87887.6206494])
self.system.part.add(id=264, type=0, pos=[-11.341, 20.734, -2.36689], f=[360.849501993, -11710.2457094, 541360.828776])
self.system.part.add(id=265, type=1, pos=[37.8317, 15.3555, 5.91795], f=[-26585.8557645, 90822.7275781, 7202.21172967])
self.system.part.add(id=266, type=0, pos=[2.11403, 12.7257, 12.4375], f=[-281885.457701, -90382.8559818, 238212.132245])
self.system.part.add(id=267, type=0, pos=[20.1597, 16.5056, -5.86826], f=[114562.623608, 43571.9493337, 40338.2475928])
self.system.part.add(id=268, type=0, pos=[1.32697, -6.80647, 0.388346], f=[24112.8258782, -37648.825121, -1368.99280021])
self.system.part.add(id=269, type=0, pos=[19.3903, 4.08523, 8.40683], f=[367530.978255, 910004.376711, -346921.68909])
self.system.part.add(id=270, type=1, pos=[41.9324, -0.559761, 23.3367], f=[9241.88935091, 109938.645364, 151715.548551])
self.system.part.add(id=271, type=1, pos=[9.88502, 5.77538, -14.3992], f=[-18206.9501359, 1759.26632485, 7686.00171995])
self.system.part.add(id=272, type=1, pos=[-6.69898, 18.6133, -17.5642], f=[272646.558003, 97109.1107779, -640959.775235])
self.system.part.add(id=273, type=1, pos=[-14.1101, 25.3543, 25.5114], f=[29226.8404809, 14872.7879474, -36409.2263885])
self.system.part.add(id=274, type=1, pos=[-18.3439, 31.0623, -12.9845], f=[-302994.857485, 418712.118205, 431757.072161])
self.system.part.add(id=275, type=0, pos=[24.2161, -10.0633, 10.3326], f=[5559.18030796, 1424.6288843, -1727.95513734])
self.system.part.add(id=276, type=1, pos=[27.7054, 8.85837, 3.78302], f=[-21285.5017442, 7834.06737852, 4127.04211873])
self.system.part.add(id=277, type=1, pos=[11.2489, 4.73301, 1.12555], f=[10394.0660847, -1296.11055649, -164354.03048])
self.system.part.add(id=278, type=0, pos=[22.999, 10.822, 25.6585], f=[-324000.299339, 160908.505068, -36521.1509323])
self.system.part.add(id=279, type=1, pos=[20.0984, 7.47846, 21.0774], f=[20108.4989246, 24796.8004047, -13072.4385271])
self.system.part.add(id=280, type=0, pos=[-24.7071, 10.7715, -12.232], f=[1734046.04072, -482751.891085, -1168036.29264])
self.system.part.add(id=281, type=1, pos=[13.005, -4.13413, 4.21103], f=[-46.4592621129, -57.1129929678, -61.6328497454])
self.system.part.add(id=282, type=1, pos=[5.32996, 23.94, 9.72681], f=[96860.810285, -94903.9560632, 119805.455483])
self.system.part.add(id=283, type=0, pos=[12.6258, -18.8574, -0.403599], f=[129486.448111, -145493.925925, -39696.9247785])
self.system.part.add(id=284, type=1, pos=[21.1657, -3.13347, -2.94105], f=[-152829.864303, 397304.569582, -451951.021149])
self.system.part.add(id=285, type=1, pos=[16.9634, 11.3226, 35.2329], f=[791.909136547, 45984.1345276, 10405.249916])
self.system.part.add(id=286, type=1, pos=[1.80774, 3.22237, 18.6318], f=[192077.973738, -138009.476699, -134968.06012])
self.system.part.add(id=287, type=0, pos=[-12.8805, -13.1627, -18.9711], f=[21364.4720209, 245175.153491, 185725.369628])
self.system.part.add(id=288, type=0, pos=[-4.91328, -15.2753, 12.3476], f=[72374.584109, 148611.850025, 426162.160716])
self.system.part.add(id=289, type=0, pos=[19.4424, 5.30228, 17.015], f=[-2383.52945197, 153.411365338, 763.93932165])
self.system.part.add(id=290, type=1, pos=[23.5326, 21.2132, 13.76], f=[-61737.2523539, -45353.8696814, -351994.603377])
self.system.part.add(id=291, type=0, pos=[-15.9254, 8.02975, 13.7683], f=[-7474.81060352, 99439.4743799, 52369.774661])
self.system.part.add(id=292, type=1, pos=[8.77804, -19.7785, -24.7348], f=[4940.57284102, -11095.5499563, -54003.1698257])
self.system.part.add(id=293, type=1, pos=[-11.0482, -9.46797, -19.6454], f=[-914.876757168, 9257.01140839, -1095.20214657])
self.system.part.add(id=294, type=1, pos=[-16.1757, 4.78398, -20.4276], f=[-81726.2285351, 189261.375095, -40577.5938991])
self.system.part.add(id=295, type=1, pos=[23.2319, 7.63347, 14.8446], f=[331230.055368, -12782.8830981, -168476.024481])
self.system.part.add(id=296, type=1, pos=[-7.91514, 18.084, 19.3609], f=[-63212.3291657, 40221.6464025, -25738.074593])
self.system.part.add(id=297, type=1, pos=[-10.8636, 2.85694, 11.6977], f=[40708.3684066, -22567.9738496, -173791.126607])
self.system.part.add(id=298, type=0, pos=[7.94237, 27.1333, 17.8495], f=[2499.09987105, -164.153956036, 1095.08773678])
self.system.part.add(id=299, type=0, pos=[-39.901, -1.68615, 18.6173], f=[-109.372626152, 2274.24541351, -964.02877604])
self.system.part.add(id=300, type=0, pos=[-11.9578, 6.423, 21.3234], f=[44143.1674187, 887670.026953, 490126.951251])
self.system.part.add(id=301, type=0, pos=[8.11667, -3.92853, -27.5054], f=[-206909.591102, 159936.553623, 13443.5620746])
self.system.part.add(id=302, type=1, pos=[12.7103, 4.18886, 31.8423], f=[-149936.127119, -105704.750768, 38734.6088894])
self.system.part.add(id=303, type=0, pos=[2.89015, 33.4006, 2.91344], f=[216092.64091, -113389.259984, -394527.82677])
self.system.part.add(id=304, type=1, pos=[10.9898, 4.56687, -24.2688], f=[5097.86882148, 12608.7868618, 135208.863876])
self.system.part.add(id=305, type=0, pos=[26.7415, -0.0124172, -19.5371], f=[423731.080534, 535831.415897, 681221.524538])
self.system.part.add(id=306, type=1, pos=[-16.0261, 12.1251, -1.52792], f=[-395638.110323, -676974.742697, -118308.77589])
self.system.part.add(id=307, type=1, pos=[22.4778, -3.66117, 30.3185], f=[41555.4393138, -24441.3954031, -99759.5531849])
self.system.part.add(id=308, type=0, pos=[1.56766, -4.31244, -6.42737], f=[33345.1578199, 101276.862127, 7594.9251329])
self.system.part.add(id=309, type=1, pos=[-9.34157, -14.768, -3.37169], f=[-18999.4998714, -15088.897609, -805.859152471])
self.system.part.add(id=310, type=0, pos=[-8.12854, 12.2901, 17.8688], f=[-62898.8476857, -99290.8711373, 38620.0173253])
self.system.part.add(id=311, type=0, pos=[0.271517, 22.9569, 24.8564], f=[74141.8002108, 101672.675084, 92266.5421552])
self.system.part.add(id=312, type=0, pos=[-1.70059, 23.9174, -1.93373], f=[-52488.957608, 55080.643627, -33150.8340226])
self.system.part.add(id=313, type=1, pos=[8.3439, -12.3577, -3.66346], f=[-0.168052368041, 0.0590614534735, -0.0203304929785])
self.system.part.add(id=314, type=1, pos=[8.5742, 24.6412, 13.6058], f=[47004.4830121, 48070.1032676, 29004.3010486])
self.system.part.add(id=315, type=0, pos=[-15.5886, 2.68378, 22.5946], f=[-25464.2072097, 12130.3098164, 6061.34124545])
self.system.part.add(id=316, type=0, pos=[-47.8756, -23.4025, 11.3119], f=[-31229.3390781, 25311.9582273, 83638.9245573])
self.system.part.add(id=317, type=0, pos=[25.687, 44.0353, -3.21308], f=[-66258.5423071, 8351.31579581, 6630.14584447])
self.system.part.add(id=318, type=1, pos=[-7.27703, 19.4779, 22.22], f=[248966.853692, 234205.327418, -28184.0412103])
self.system.part.add(id=319, type=1, pos=[34.2794, 19.9522, 27.7506], f=[2426.6347506, 9997.97715896, -7532.71536888])
self.system.part.add(id=320, type=1, pos=[-1.68596, 26.0317, 9.4843], f=[-461832.807937, 138104.627025, 42299.3395431])
self.system.part.add(id=321, type=0, pos=[-8.4018, 25.9001, -13.3804], f=[19207.1947717, -14330.4351431, -16222.5961508])
self.system.part.add(id=322, type=0, pos=[34.6457, 12.8489, 48.6769], f=[395887.701691, 554517.62011, 61444.9726936])
self.system.part.add(id=323, type=0, pos=[-19.0386, -12.9167, 15.63], f=[72920.6514787, -5180.47789705, 22376.1575985])
self.system.part.add(id=324, type=0, pos=[-7.67313, -12.7886, -14.8602], f=[-905141.474807, 71559.2566984, -654270.260837])
self.system.part.add(id=325, type=1, pos=[-42.1065, 20.0133, 10.5374], f=[254675.557079, 215011.720425, 413252.322271])
self.system.part.add(id=326, type=1, pos=[7.85882, -9.83641, -13.8482], f=[-39756.9739766, 58193.1073736, -46449.8897941])
self.system.part.add(id=327, type=1, pos=[17.7812, 18.9758, 10.7564], f=[-36785.6331824, 36834.9724548, 90417.0335526])
self.system.part.add(id=328, type=0, pos=[-1.07174, 0.350308, -11.3479], f=[3602.87628621, -1535.76243078, -106032.334094])
self.system.part.add(id=329, type=1, pos=[13.3291, 25.2511, 28.2701], f=[-27395.4876737, 38703.4396355, 43021.3310208])
self.system.part.add(id=330, type=1, pos=[-5.65484, -2.91928, 1.88109], f=[999.034795293, -117933.631902, -36701.5355173])
self.system.part.add(id=331, type=0, pos=[-3.76892, 18.1284, 19.1779], f=[58007.7718012, 281882.230492, 54488.5805645])
self.system.part.add(id=332, type=0, pos=[19.8696, -0.978259, -8.2067], f=[-147122.786591, -200953.628028, -77957.4879106])
self.system.part.add(id=333, type=0, pos=[0.316131, 10.4155, 10.887], f=[-389177.932998, 132790.499972, 364601.617299])
self.system.part.add(id=334, type=0, pos=[16.3898, 16.8903, -19.7831], f=[-37516.3238532, 45742.4914466, -10857.9217686])
self.system.part.add(id=335, type=0, pos=[19.9042, 28.9257, 19.4875], f=[13176.7087196, -24110.0770056, -3113.26781302])
self.system.part.add(id=336, type=0, pos=[4.14909, -15.6467, 15.4463], f=[-402604.322827, -81212.5506167, -129850.233018])
self.system.part.add(id=337, type=0, pos=[-15.3921, 17.6944, -23.1293], f=[2970.94560481, -283.687361462, -3211.9755691])
self.system.part.add(id=338, type=0, pos=[-2.51628, -6.63483, 18.5411], f=[48467.3210391, 10481.9814221, 33021.9075003])
self.system.part.add(id=339, type=0, pos=[3.19186, 12.0895, -0.326107], f=[-116227.890014, 41827.6530265, 10127.9429285])
self.system.part.add(id=340, type=1, pos=[7.17579, 14.6012, 2.95932], f=[-688629.345609, 720526.832369, -208850.135697])
self.system.part.add(id=341, type=1, pos=[16.7636, 13.8178, 26.8868], f=[-122517.525471, -2549.8228951, -15519.2751994])
self.system.part.add(id=342, type=1, pos=[6.97664, 9.52169, 3.53827], f=[12316.4229811, -62495.6233809, 88072.081659])
self.system.part.add(id=343, type=0, pos=[20.9549, 15.6723, 25.1484], f=[17169.7343359, 3320.77559522, -6538.82588156])
self.system.part.add(id=344, type=0, pos=[0.533194, 27.4352, -5.75287], f=[504581.437285, -367492.642748, 614637.339096])
self.system.part.add(id=345, type=0, pos=[-13.4279, 2.52396, 5.40281], f=[-15820.7933988, 17454.5506358, -33800.9771954])
self.system.part.add(id=346, type=0, pos=[-10.9643, -6.13645, -14.5351], f=[-79512.3364187, -155298.405639, 30135.9227917])
self.system.part.add(id=347, type=1, pos=[2.07633, -4.78464, 12.6415], f=[18119.0665009, -7712.83297066, -15289.6269358])
self.system.part.add(id=348, type=0, pos=[-25.065, 13.4621, -5.58194], f=[18811.8389902, -24186.8813873, -12260.8709225])
self.system.part.add(id=349, type=1, pos=[20.5237, 20.7999, 22.458], f=[-529599.822374, -20988.5023144, -198522.235651])
self.system.part.add(id=350, type=1, pos=[-18.183, 17.6236, 17.3922], f=[-41181.7808857, 3244.47660203, 31251.7610209])
self.system.part.add(id=351, type=1, pos=[8.99255, 23.1866, -11.4973], f=[-272429.251764, -1011610.02952, 480196.771778])
self.system.part.add(id=352, type=1, pos=[-2.05616, -18.7755, 15.9484], f=[-31613.037539, -43244.5567568, 55549.6536462])
self.system.part.add(id=353, type=1, pos=[-2.74523, 17.4506, 15.0548], f=[456.180231377, 79.0446597149, 185.753856165])
self.system.part.add(id=354, type=1, pos=[-6.37235, -2.90565, 7.47273], f=[61977.0829392, -32137.675754, -25395.8292902])
self.system.part.add(id=355, type=1, pos=[-15.5279, 5.41772, -9.06838], f=[127538.852035, -91061.7382711, 109071.558467])
self.system.part.add(id=356, type=0, pos=[25.3237, -11.9966, -6.82038], f=[10239.3913099, -2007.33823426, 4585.81731527])
self.system.part.add(id=357, type=1, pos=[25.6056, 1.08582, 3.83722], f=[272880.288421, -145784.305926, 134124.365797])
self.system.part.add(id=358, type=1, pos=[13.0481, 4.62343, -19.6275], f=[13703.0836879, -12186.8088907, -19228.241143])
self.system.part.add(id=359, type=1, pos=[31.7489, 1.75658, 25.0869], f=[434035.194361, 158866.577629, -133531.313457])
self.system.part.add(id=360, type=0, pos=[4.93794, 7.72619, -18.8734], f=[-2154.11175497, -4580.89972713, -53083.3344172])
self.system.part.add(id=361, type=1, pos=[2.55317, -5.84126, 29.0504], f=[-41368.5811504, 809857.57283, -352953.878257])
self.system.part.add(id=362, type=1, pos=[-5.51764, 2.06697, -5.30596], f=[49173.7250526, 113843.024728, 1571.55505067])
self.system.part.add(id=363, type=0, pos=[27.9355, 10.9121, 4.94288], f=[77395.1334365, -62139.0317007, -249463.562579])
self.system.part.add(id=364, type=0, pos=[38.0845, -22.8538, -36.6678], f=[-284532.867889, -108597.733955, 121907.369618])
self.system.part.add(id=365, type=0, pos=[5.25071, 1.57145, -8.08758], f=[-69661.5225665, 144933.951358, -254681.742887])
self.system.part.add(id=366, type=1, pos=[-5.18666, 29.753, 28.6484], f=[-32445.141122, 69926.9005615, -44097.9618832])
self.system.part.add(id=367, type=0, pos=[-19.9623, 23.7312, 7.15746], f=[-6756.75635019, 105027.72239, 77021.1124197])
self.system.part.add(id=368, type=1, pos=[17.4057, -11.844, 16.736], f=[-322.356448863, -2686.15597778, -3984.21463105])
self.system.part.add(id=369, type=0, pos=[29.285, 5.72952, -20.5642], f=[540324.452872, -102078.39818, -596.846350509])
self.system.part.add(id=370, type=0, pos=[-3.83269, 8.68623, 16.0081], f=[34329.2471466, -198.131563349, 15529.0681631])
self.system.part.add(id=371, type=1, pos=[-33.6956, 9.39888, -10.2114], f=[-505817.734598, -563914.722607, -389356.587742])
self.system.part.add(id=372, type=1, pos=[-17.5739, 33.1382, -2.50818], f=[70096.4110789, 78916.5449178, -30874.0384892])
self.system.part.add(id=373, type=0, pos=[36.5357, 24.9728, 41.4357], f=[3582.74664871, 23172.1578332, 18669.4097444])
self.system.part.add(id=374, type=0, pos=[16.9954, 5.33989, 15.0416], f=[4615.45711424, 24586.0682807, 5613.69638571])
self.system.part.add(id=375, type=1, pos=[1.82334, 7.08591, 19.9433], f=[43700.0784101, 106294.552757, 30046.5379008])
self.system.part.add(id=376, type=0, pos=[13.0631, 42.3133, 2.65241], f=[-7279.48024105, -48082.1141698, -15064.4285264])
self.system.part.add(id=377, type=1, pos=[14.1583, 23.6694, 1.89258], f=[592.894775157, 820.523838976, -2249.92688651])
self.system.part.add(id=378, type=0, pos=[-0.35301, 9.62877, 20.8785], f=[5884.6572214, 334.54595679, 903.221744571])
self.system.part.add(id=379, type=0, pos=[6.67499, 5.20424, 26.3819], f=[266434.093426, -581671.982115, -996293.891106])
self.system.part.add(id=380, type=1, pos=[25.2999, -21.789, -7.9224], f=[-465258.329093, 81069.5777949, 359876.747496])
self.system.part.add(id=381, type=0, pos=[16.0707, 14.5449, 8.78741], f=[-8701.79958014, -5573.80598606, -2732.06560018])
self.system.part.add(id=382, type=0, pos=[-25.8306, 27.0075, -50.1218], f=[2315.31639344, -396.390181723, -2040.26686864])
self.system.part.add(id=383, type=0, pos=[-8.60183, -3.87077, 19.6745], f=[62924.1245455, -359785.709668, 471082.568389])
self.system.part.add(id=384, type=1, pos=[19.9619, -2.14535, 23.5428], f=[-502660.753334, 365889.478594, -616466.879706])
self.system.part.add(id=385, type=0, pos=[-11.917, 25.0486, -11.9888], f=[-252841.487364, 13692.6243924, -380884.674871])
self.system.part.add(id=386, type=1, pos=[17.6316, 42.7367, 26.7442], f=[-100260.641148, -50713.7110736, -47507.852387])
self.system.part.add(id=387, type=1, pos=[-16.5078, 28.1644, 41.4039], f=[-12799.5774151, -1592.63817009, 10915.5790055])
self.system.part.add(id=388, type=0, pos=[2.40707, 7.63493, -25.9164], f=[-98416.3018031, 67976.5133864, -32383.8630576])
self.system.part.add(id=389, type=0, pos=[11.3639, 2.55183, 29.29], f=[-321680.940885, 162955.406554, 34657.7013554])
self.system.part.add(id=390, type=0, pos=[-12.5677, 30.7672, 17.4674], f=[-78288.3865436, -181844.032266, -73372.387592])
self.system.part.add(id=391, type=0, pos=[11.019, -23.0012, -6.70126], f=[2185.24956457, 15847.3152984, 15616.7800604])
self.system.part.add(id=392, type=1, pos=[-4.68606, 28.8649, 4.82749], f=[197825.116518, 29776.8092684, 194665.584048])
self.system.part.add(id=393, type=0, pos=[3.62642, 3.64346, 3.50788], f=[95426.7747516, 133305.407313, -174294.905297])
self.system.part.add(id=394, type=1, pos=[18.701, -7.43215, -5.99042], f=[95402.7472941, 143373.87079, 387719.704107])
self.system.part.add(id=395, type=0, pos=[-0.918984, 14.3861, -7.71807], f=[-69714.6766965, 2325.95057819, 59656.3688397])
self.system.part.add(id=396, type=0, pos=[-5.1385, 11.8359, -12.3905], f=[4962.35281625, 17872.6162777, -21529.0742789])
self.system.part.add(id=397, type=1, pos=[-34.6372, -40.5613, 25.8628], f=[-42698.5500802, 58502.6591514, -13546.623495])
self.system.part.add(id=398, type=1, pos=[-11.1098, -4.07139, -9.06733], f=[-20436.6408737, -270760.045318, -221611.353537])
self.system.part.add(id=399, type=0, pos=[-19.408, -0.602715, -3.59942], f=[-10950.1426195, -101096.607992, -84884.8335726])
self.system.part.add(id=400, type=1, pos=[23.0766, -0.355263, 0.687203], f=[-5923.80805124, -18251.7762592, 315.982007073])
self.system.part.add(id=401, type=1, pos=[22.8645, 20.7687, 1.53809], f=[-1344471.45332, 1115941.1681, -158312.630525])
self.system.part.add(id=402, type=1, pos=[5.72787, 39.9511, 14.4921], f=[13339.1898212, 2852.47977543, -2485.03727824])
self.system.part.add(id=403, type=0, pos=[-0.81178, -5.67944, 7.37332], f=[-10.6582398182, -0.696123053823, -0.00325471160703])
self.system.part.add(id=404, type=0, pos=[7.79782, -0.0438411, -8.22903], f=[-855095.741221, -511155.30489, 355875.160454])
self.system.part.add(id=405, type=0, pos=[-37.4238, 1.16415, 32.6662], f=[44661.7520812, -45904.5136667, -32550.6355932])
self.system.part.add(id=406, type=0, pos=[34.235, -11.9125, 10.5171], f=[18975.3517119, -1844.64270364, -17265.7857491])
self.system.part.add(id=407, type=1, pos=[30.7658, 11.8295, 8.77407], f=[-92895.7722952, -33881.6072116, 38582.0179338])
self.system.part.add(id=408, type=1, pos=[19.9135, -15.6501, 21.5744], f=[69409.2651814, -3513.21557656, -60134.2206779])
self.system.part.add(id=409, type=1, pos=[29.574, 2.16048, 29.5709], f=[1703.70949271, -553.933784607, -767.745478369])
self.system.part.add(id=410, type=1, pos=[31.9819, 11.9493, -10.1563], f=[149948.63715, -120821.414054, 287492.388316])
self.system.part.add(id=411, type=1, pos=[-3.71502, 5.29894, 12.9606], f=[-16681.496585, 155478.063976, -146493.741388])
self.system.part.add(id=412, type=1, pos=[20.2454, -17.408, -9.58198], f=[-7430.0122484, -36661.4555396, 28114.012798])
self.system.part.add(id=413, type=1, pos=[-12.2415, -14.5315, -8.75518], f=[-251354.266942, -842186.935718, -71665.9694396])
self.system.part.add(id=414, type=0, pos=[18.0658, 1.20393, 13.3932], f=[38588.9912102, -18145.5060944, 8551.5081197])
self.system.part.add(id=415, type=1, pos=[19.9133, 47.1448, -14.7023], f=[-75786.8169704, 3042.87958746, -20874.9523635])
self.system.part.add(id=416, type=1, pos=[-4.98195, 23.2353, 10.9409], f=[-11762.8858906, 41169.4830253, 8132.7193765])
self.system.part.add(id=417, type=0, pos=[32.7212, -2.61271, -1.97336], f=[2819.72349085, 80999.955603, -69569.9823528])
self.system.part.add(id=418, type=1, pos=[-14.9407, 24.5064, 15.8982], f=[495169.250935, -191847.532312, 274085.573474])
self.system.part.add(id=419, type=1, pos=[-0.370994, 11.0288, 12.138], f=[114634.624253, -340243.460463, -89445.6577944])
self.system.part.add(id=420, type=1, pos=[24.7918, 23.006, 19.818], f=[458738.822709, -648743.354097, 41219.8067326])
self.system.part.add(id=421, type=0, pos=[12.2324, 10.8853, 18.0148], f=[-81481.7328728, 22983.9150517, 74994.3319967])
self.system.part.add(id=422, type=1, pos=[-9.63005, 2.60135, 3.79172], f=[-1285441.45978, -562539.308989, 495438.318098])
self.system.part.add(id=423, type=1, pos=[8.67506, 25.5567, -8.22007], f=[-10244.3934164, -261203.838925, -397322.001959])
self.system.part.add(id=424, type=0, pos=[27.8241, -5.3103, -0.57812], f=[1343169.59562, 427658.923732, -2513304.53874])
self.system.part.add(id=425, type=0, pos=[-15.5693, 2.79493, 3.76479], f=[158891.233171, 33656.8393647, 3189.94676725])
self.system.part.add(id=426, type=1, pos=[-4.74511, 2.17108, -0.728546], f=[50075.9506856, -62528.133547, 61181.749071])
self.system.part.add(id=427, type=1, pos=[6.78528, 17.2248, 27.3346], f=[-2758.1580956, 201.880806605, -1887.72294416])
self.system.part.add(id=428, type=0, pos=[5.41781, 23.7927, 7.778], f=[-1904.78438599, 1870.50633386, -209.562530744])
self.system.part.add(id=429, type=0, pos=[-27.1897, -11.444, 23.3116], f=[-363023.379898, -40166.9733598, 648772.476571])
self.system.part.add(id=430, type=1, pos=[0.721097, 22.6138, -8.29598], f=[-239731.672951, -170143.561325, 63704.0568416])
self.system.part.add(id=431, type=1, pos=[-9.14098, -4.92886, 48.7759], f=[-28482.55279, 15212.5069863, -26657.9210307])
self.system.part.add(id=432, type=0, pos=[15.5035, -16.5838, 25.9115], f=[79867.1129408, 22225.5418003, 3056.77310978])
self.system.part.add(id=433, type=0, pos=[31.2859, -14.321, 21.7618], f=[-8322.57895104, 7940.9282434, 2679.22206354])
self.system.part.add(id=434, type=0, pos=[13.3687, -9.24494, -9.64525], f=[3551.26805761, 10350.3650979, -5076.08659636])
self.system.part.add(id=435, type=1, pos=[1.42919, -1.11248, 39.6929], f=[-231600.685597, -449365.581371, -69037.1473738])
self.system.part.add(id=436, type=1, pos=[7.26355, 13.5482, 6.01046], f=[-104737.77398, 5144.30106208, -14422.4482978])
self.system.part.add(id=437, type=0, pos=[15.5378, -2.61664, 16.5859], f=[-343370.294598, -472423.897941, -264832.980026])
self.system.part.add(id=438, type=1, pos=[10.3619, -1.05464, 37.3556], f=[-579934.340977, 106309.138829, -37395.8330258])
self.system.part.add(id=439, type=1, pos=[37.1155, 31.809, -8.4016], f=[-300.169276722, -765.138941032, -395.293746518])
self.system.part.add(id=440, type=1, pos=[4.03636, 13.1279, 25.0957], f=[-13252.7762994, -7073.58469897, 9319.45886376])
self.system.part.add(id=441, type=0, pos=[2.73306, 28.5467, -5.46719], f=[-35207.6019812, 3621.8986015, -18517.3499795])
self.system.part.add(id=442, type=0, pos=[16.1892, -3.69667, 24.8044], f=[-83198.1720846, -13703.1726585, -5776.57261601])
self.system.part.add(id=443, type=0, pos=[-12.6562, 29.6716, -10.2506], f=[-319335.659743, -141817.998052, -332121.691525])
self.system.part.add(id=444, type=1, pos=[-20.6156, -9.07022, 9.51543], f=[14902.9009195, 18932.0763092, 28206.9514831])
self.system.part.add(id=445, type=0, pos=[-36.4144, -5.24311, 11.6416], f=[149316.340645, 99679.7290012, -34645.8440396])
self.system.part.add(id=446, type=1, pos=[26.0792, 21.8337, 10.0011], f=[-331700.256771, 150621.887836, 237763.782273])
self.system.part.add(id=447, type=0, pos=[-0.681588, 15.9295, 32.5118], f=[59730.9718097, 66470.3358774, 122193.174955])
self.system.part.add(id=448, type=1, pos=[48.4635, 13.4953, -15.4497], f=[-16891.8412425, -10923.5443149, -27340.2010334])
self.system.part.add(id=449, type=0, pos=[5.95144, 37.3585, 12.4261], f=[-4952.86824306, -35161.6461147, -24468.1205676])
self.system.part.add(id=450, type=0, pos=[14.86, 7.39436, -0.724951], f=[-4660.17510101, 823.474923273, 2616.68587604])
self.system.part.add(id=451, type=0, pos=[29.1326, 20.0451, -3.70222], f=[136080.815175, 62306.8076497, 122129.253081])
self.system.part.add(id=452, type=0, pos=[-6.44948, -12.0999, 46.6197], f=[-13099.3117824, -24473.230839, -37032.5187097])
self.system.part.add(id=453, type=0, pos=[-0.787449, 14.1766, 0.36943], f=[-64988.879062, -65297.7761163, 35018.5931316])
self.system.part.add(id=454, type=0, pos=[35.7854, 5.00133, 44.444], f=[18275.6628638, -3508.40350419, -41998.8393095])
self.system.part.add(id=455, type=1, pos=[15.3581, -3.91337, 28.685], f=[176195.770164, -134945.127273, -415653.933411])
self.system.part.add(id=456, type=0, pos=[-3.15312, 1.48484, -0.573851], f=[180215.594205, -529066.492005, -31008.7150906])
self.system.part.add(id=457, type=1, pos=[-3.29893, 13.822, 12.4555], f=[-79692.9879287, 77165.753708, -40302.9478819])
self.system.part.add(id=458, type=1, pos=[-15.2886, 34.5822, 14.5128], f=[331.81673669, 29195.7801971, -47447.5090764])
self.system.part.add(id=459, type=1, pos=[-9.1355, -19.3829, -26.3464], f=[272397.905287, 129776.100492, 94618.3149323])
self.system.part.add(id=460, type=1, pos=[-13.886, 0.472531, 19.6791], f=[-26934.729863, 30792.1220637, -48056.689134])
self.system.part.add(id=461, type=1, pos=[-4.14837, -26.8611, 18.9785], f=[-45148.3981119, 8031.13705147, -33699.2657512])
self.system.part.add(id=462, type=0, pos=[35.9996, 27.2453, 34.5419], f=[-24853.6669223, -63691.774776, -41988.0130773])
self.system.part.add(id=463, type=0, pos=[8.12899, 12.2741, 15.3574], f=[-1106.51360065, 8671.92488291, -2862.79263427])
self.system.part.add(id=464, type=1, pos=[-20.5306, -16.1592, -25.612], f=[6766.17962799, 1141.17266119, -21173.1588201])
self.system.part.add(id=465, type=0, pos=[7.81445, -26.2477, 7.00682], f=[256899.097168, 38682.6539982, 91019.1339103])
self.system.part.add(id=466, type=0, pos=[-13.1114, -7.00921, 4.55311], f=[182036.48396, 361909.771543, 528434.480923])
self.system.part.add(id=467, type=0, pos=[13.6581, 13.0613, -22.8111], f=[25776.685139, 90393.525096, -32420.7195382])
self.system.part.add(id=468, type=1, pos=[0.731612, 18.1677, -18.5018], f=[-183740.577106, 95251.2464941, 8113.25764991])
self.system.part.add(id=469, type=1, pos=[2.86373, -27.0131, 1.78031], f=[287991.498759, 147780.641893, -301541.57882])
self.system.part.add(id=470, type=1, pos=[26.0484, 4.38345, 5.63318], f=[14602.484419, 24628.3216086, -12061.3372804])
self.system.part.add(id=471, type=0, pos=[-14.6628, -20.9057, 17.409], f=[-1015.8699092, 1479.15886529, 596.173189892])
self.system.part.add(id=472, type=0, pos=[6.37026, -11.1698, 27.8465], f=[9655.71795619, 23155.5653768, 27961.9366783])
self.system.part.add(id=473, type=1, pos=[13.124, -9.3699, 7.45203], f=[90663.4010148, -26323.5856306, -58376.3247593])
self.system.part.add(id=474, type=1, pos=[15.4635, 16.5177, 11.293], f=[415.466619071, 10572.3109383, 17070.9022577])
self.system.part.add(id=475, type=0, pos=[16.6037, 1.11661, 18.1862], f=[61.3419027553, 52.4388846718, 847.4470994])
self.system.part.add(id=476, type=0, pos=[-8.35625, 4.22599, 8.44338], f=[-123448.461953, 348831.974023, -116434.922867])
self.system.part.add(id=477, type=0, pos=[5.84946, -9.98721, 2.12309], f=[533.824913842, -1625.78070183, -1413.93576566])
self.system.part.add(id=478, type=1, pos=[7.6953, 0.927625, 2.36443], f=[40627.3957397, 67360.350236, -49538.277109])
self.system.part.add(id=479, type=0, pos=[3.614, 20.1627, -18.3327], f=[1390361.88427, -1115450.78318, 241560.535146])
self.system.part.add(id=480, type=1, pos=[10.0858, 22.713, 38.6951], f=[-26159.6012799, -7482.95629493, 34470.8617829])
self.system.part.add(id=481, type=1, pos=[-6.62086, 2.63495, 3.66959], f=[-193837.899644, -248567.805867, 86942.5029001])
self.system.part.add(id=482, type=0, pos=[-5.37556, -4.46687, 13.5613], f=[197693.466549, -294105.760938, -241652.678305])
self.system.part.add(id=483, type=0, pos=[-21.2751, 19.3341, 0.263397], f=[-25054.5629593, 194899.721279, 5850.88464789])
self.system.part.add(id=484, type=1, pos=[4.31085, 2.67273, 11.4199], f=[-1499.32842013, 2801.28027497, -15255.4558408])
self.system.part.add(id=485, type=1, pos=[10.813, 24.1835, -19.766], f=[-17151.3889024, 360675.062033, 289872.521782])
self.system.part.add(id=486, type=0, pos=[23.0983, -15.3739, 33.1195], f=[2368.40240459, -324.750659196, -97.4640564601])
self.system.part.add(id=487, type=1, pos=[26.3883, 26.289, 5.92522], f=[88375.3927259, -3528.27187387, -5619.77714169])
self.system.part.add(id=488, type=1, pos=[-13.4154, -1.88384, 10.2086], f=[27549.5568974, 2173.23119217, 94711.2680476])
self.system.part.add(id=489, type=1, pos=[27.0924, 26.9394, 6.09175], f=[-0.046689540432, -0.373854802518, -0.323516903851])
self.system.part.add(id=490, type=0, pos=[-5.60108, 9.51396, -27.8059], f=[110.880120795, 100.701338548, -28.0786446345])
self.system.part.add(id=491, type=0, pos=[-0.0711613, -2.10658, -7.72058], f=[1605247.43442, -312633.129266, 1725922.13238])
self.system.part.add(id=492, type=0, pos=[1.5837, -9.94894, 4.21], f=[-40983.6423457, 5010.35045904, 98024.956066])
self.system.part.add(id=493, type=0, pos=[14.0725, -5.04008, 32.6928], f=[-10.7202580926, 6.99810114045, 28.2709331886])
self.system.part.add(id=494, type=1, pos=[21.6236, 6.59296, -7.62256], f=[103301.847764, -312342.307167, 250908.549565])
self.system.part.add(id=495, type=1, pos=[16.9848, 20.3233, 34.7887], f=[219366.8315, -542754.127861, 519313.566779])
self.system.part.add(id=496, type=0, pos=[-4.38621, -4.72923, 6.57385], f=[-36149.8924372, -4897.0884601, 65511.2790487])
self.system.part.add(id=497, type=0, pos=[-2.08958, 24.5887, 32.1344], f=[208109.22093, 140145.095743, 179034.64802])
self.system.part.add(id=498, type=0, pos=[4.8845, 17.2248, -1.70277], f=[38133.3832608, 44438.9584195, -22858.4056466])
self.system.part.add(id=499, type=0, pos=[16.4905, 20.12, 16.0676], f=[-76.3689329003, 763.361068076, -19188.6723771])
self.system.part.add(id=500, type=1, pos=[-5.61473, 0.677861, -2.94091], f=[-20941.7302511, -2783.3291711, -14900.8261328])
self.system.part.add(id=501, type=0, pos=[10.9471, 28.1139, 15.2034], f=[1381.64092797, -2314.14831914, -3238.42771624])
self.system.part.add(id=502, type=0, pos=[26.9334, -9.79043, 2.69692], f=[-59548.2228531, 12738.6090492, -56639.4061343])
self.system.part.add(id=503, type=0, pos=[1.94696, 8.83478, -27.5241], f=[-131120.270743, -350866.882814, -43781.1055603])
self.system.part.add(id=504, type=1, pos=[17.9352, 1.39162, 10.368], f=[-3575.75615258, -9264.5338282, -3813.56456115])
self.system.part.add(id=505, type=0, pos=[-8.06977, 7.98535, -36.9275], f=[128.263728071, -186.562213232, 51.4800231937])
self.system.part.add(id=506, type=1, pos=[-9.25449, 27.7317, 17.4527], f=[11229.1036941, -39662.6021962, 63838.7501524])
self.system.part.add(id=507, type=1, pos=[5.78556, 0.106931, 8.71538], f=[-42757.3274639, 52063.6101367, -2965.1502642])
self.system.part.add(id=508, type=1, pos=[-9.0765, -0.222421, -22.1939], f=[-11513.058278, -1196.91357487, -173780.203385])
self.system.part.add(id=509, type=0, pos=[-16.8338, 8.25018, 15.7144], f=[-10300.2490307, 94632.7889183, 133916.123983])
self.system.part.add(id=510, type=1, pos=[-7.74373, -0.495665, -4.70655], f=[313810.255451, 137904.258013, 157305.449681])
self.system.part.add(id=511, type=1, pos=[-10.0257, 3.23297, 2.27256], f=[37587.2621297, 395.410193423, -14199.3195391])
self.system.part.add(id=512, type=1, pos=[-0.894466, -13.1217, -16.3853], f=[81752.5639607, -20955.1412871, 21659.456902])
self.system.part.add(id=513, type=1, pos=[-1.22637, -1.70874, 15.0646], f=[-0.117807588582, -8.09360173851, 69.3625094134])
self.system.part.add(id=514, type=1, pos=[-10.7919, -3.24196, -11.5246], f=[487620.738228, 611490.025483, 480860.575943])
self.system.part.add(id=515, type=0, pos=[4.78847, -0.964608, 10.9806], f=[-37.0143859987, 60.1219807127, -72.6572380898])
self.system.part.add(id=516, type=1, pos=[-15.482, 0.991917, -1.69862], f=[-1716727.40688, 466723.668367, 1194518.43442])
self.system.part.add(id=517, type=0, pos=[-0.688694, 31.9809, 2.38321], f=[-108142.579988, 305731.329489, 115199.387751])
self.system.part.add(id=518, type=0, pos=[26.7374, 12.9698, 9.62709], f=[13724.965183, -7032.8449552, 34707.6196532])
self.system.part.add(id=519, type=1, pos=[29.5335, 4.79558, 15.5648], f=[73613.8661045, 210776.7568, -4277.80150252])
self.system.part.add(id=520, type=1, pos=[-0.975577, 48.3502, 10.4248], f=[205259.77763, -406938.606011, 622883.031058])
self.system.part.add(id=521, type=0, pos=[-23.4618, 29.2964, -11.231], f=[99262.9103663, -72735.1830103, -230453.779877])
self.system.part.add(id=522, type=1, pos=[12.6893, 9.0265, -0.329318], f=[82257.8171907, -108560.830621, 23523.349894])
self.system.part.add(id=523, type=0, pos=[-2.73528, -3.67112, -30.7565], f=[-48261.8407682, 28151.3788544, -15351.9535394])
self.system.part.add(id=524, type=1, pos=[-23.5013, 25.7466, 28.2362], f=[-7645.59442967, -2819.40067021, 2022.85461027])
self.system.part.add(id=525, type=1, pos=[13.591, -6.39236, 20.625], f=[-103383.854922, -36322.7047744, 137516.133054])
self.system.part.add(id=526, type=1, pos=[-17.5074, 12.9774, 30.6448], f=[8499.72038733, -4647.91016543, 8734.43131088])
self.system.part.add(id=527, type=1, pos=[-3.84318, -6.26911, -28.5044], f=[-488.613892051, -1597.29759405, 74.9106188577])
self.system.part.add(id=528, type=1, pos=[-13.0563, 4.34363, -5.52071], f=[23015.539596, -20065.1495617, 27696.2228826])
self.system.part.add(id=529, type=0, pos=[15.8341, 19.2309, 1.42873], f=[463.657124932, 884.726474976, -570.831760995])
self.system.part.add(id=530, type=0, pos=[16.9428, 33.6052, 3.55647], f=[-19939.8452882, 4756.53351716, 16157.3483418])
self.system.part.add(id=531, type=1, pos=[4.01827, -3.42838, -1.52657], f=[-63641.8915974, -47994.3344477, 12669.5387627])
self.system.part.add(id=532, type=1, pos=[38.3054, 27.8912, -18.92], f=[96816.4080113, -219236.92912, -304328.701135])
self.system.part.add(id=533, type=0, pos=[8.57537, -7.1674, 37.6287], f=[-157048.113415, -112928.19886, -335425.031925])
self.system.part.add(id=534, type=0, pos=[0.804599, 21.5071, 25.3841], f=[-908528.887019, 88644.2145222, -105002.497401])
self.system.part.add(id=535, type=1, pos=[4.73834, -3.93679, 11.8489], f=[-11.7524846559, 99.892464586, -43.9516765978])
self.system.part.add(id=536, type=1, pos=[8.38832, 5.98932, 13.516], f=[51701.5290397, -23160.7083143, 189347.736083])
self.system.part.add(id=537, type=0, pos=[-3.17043, -8.93144, 24.1512], f=[-336519.345004, 561767.940429, -702149.629401])
self.system.part.add(id=538, type=0, pos=[3.98126, 21.4658, 11.6601], f=[368.588634741, -2979.65433683, 2420.61583084])
self.system.part.add(id=539, type=1, pos=[6.55026, 26.6019, 3.46024], f=[-5562.30398587, 19585.3896269, -8798.30142546])
self.system.part.add(id=540, type=1, pos=[-5.38646, 2.79602, 5.95052], f=[-90916.4843827, -114648.553129, -101589.287968])
self.system.part.add(id=541, type=1, pos=[-11.1073, 31.3432, 11.0437], f=[20596.8835901, -30720.2616252, -2665.19696276])
self.system.part.add(id=542, type=1, pos=[-16.7853, 16.8975, 4.05033], f=[64006.000344, -59858.3710018, -7352.25350474])
self.system.part.add(id=543, type=1, pos=[19.2724, -1.98208, 21.5867], f=[-1627202.54965, 296087.459005, -1713287.44642])
self.system.part.add(id=544, type=1, pos=[23.4088, -0.66687, 5.32616], f=[3176.51825857, 14395.6765959, -724.334579196])
self.system.part.add(id=545, type=0, pos=[13.4265, -4.84288, -12.8438], f=[-28417.2515593, -24671.7723309, -57756.1175871])
self.system.part.add(id=546, type=0, pos=[-3.46973, 31.108, 3.05], f=[147174.869517, 7227.21380937, 31165.490752])
self.system.part.add(id=547, type=0, pos=[-0.0821683, -26.1713, 5.97847], f=[9431.05520458, -2869.02394666, -4554.40936193])
self.system.part.add(id=548, type=1, pos=[1.30985, -29.6225, 0.582482], f=[27667.3925862, -71205.9383829, -93542.2091541])
self.system.part.add(id=549, type=0, pos=[21.5514, 13.2031, 11.4834], f=[246110.035491, 166411.5634, -62666.2743888])
self.system.part.add(id=550, type=0, pos=[10.3845, 16.5408, -27.8597], f=[69152.7654511, 120494.250639, -334448.222144])
self.system.part.add(id=551, type=0, pos=[-8.28935, -4.84754, -12.475], f=[93058.170596, 35487.9514189, -8740.31014256])
self.system.part.add(id=552, type=0, pos=[35.5385, 12.1062, 4.31989], f=[40437.0559535, 37224.5875459, 3925.33903065])
self.system.part.add(id=553, type=1, pos=[-0.528858, 16.7777, 26.5076], f=[-16796.5878377, -17180.8273947, -1168.32782917])
self.system.part.add(id=554, type=1, pos=[-11.3957, 0.945531, 16.64], f=[534683.44857, -845665.029378, -756961.971268])
self.system.part.add(id=555, type=0, pos=[10.8882, 25.5222, 12.8017], f=[334029.149506, -305713.669699, -160171.623362])
self.system.part.add(id=556, type=1, pos=[-1.35742, 5.02778, -11.1326], f=[430427.634949, 36956.0359961, 222499.153345])
self.system.part.add(id=557, type=1, pos=[26.3316, 12.4681, 11.1585], f=[32978.448699, 9749.36363618, 12333.3473075])
self.system.part.add(id=558, type=1, pos=[-10.3761, 2.20258, 34.4384], f=[-75333.257308, -84943.151016, -48216.2058522])
self.system.part.add(id=559, type=0, pos=[-15.9597, 9.30416, -6.74832], f=[984.565632145, 864.516672928, 1042.44637639])
self.system.part.add(id=560, type=0, pos=[4.73959, -4.61488, 5.46095], f=[-104314.641274, 296080.255884, -135506.975987])
self.system.part.add(id=561, type=1, pos=[-0.321168, 8.50724, 26.2196], f=[-22533.7943523, 29882.4549259, -8475.692398])
self.system.part.add(id=562, type=1, pos=[-2.35207, 34.378, 20.3019], f=[-607917.01772, -1114002.19345, 3150964.25683])
self.system.part.add(id=563, type=0, pos=[15.4751, -10.1832, 10.4265], f=[-40716.7427674, 7599.72576054, 34299.5047818])
self.system.part.add(id=564, type=0, pos=[-11.565, 42.5, 30.0747], f=[335582.872422, -416136.618707, 214012.41817])
self.system.part.add(id=565, type=0, pos=[17.4963, -2.73451, 11.934], f=[-537.041296168, -397.038099413, 580.567525136])
self.system.part.add(id=566, type=0, pos=[-5.0233, -13.6238, -10.4104], f=[-175412.970652, 133259.924797, 416318.285427])
self.system.part.add(id=567, type=1, pos=[-1.72105, 7.05162, 14.4728], f=[3692.93598319, -1796.3085125, 21661.1759251])
self.system.part.add(id=568, type=1, pos=[-0.455731, 43.3159, 3.43069], f=[749251.472751, 108886.713525, 902801.848806])
self.system.part.add(id=569, type=0, pos=[-2.55408, -1.40829, 8.82917], f=[-20800.9981288, -6454.01162037, 11367.6470174])
self.system.part.add(id=570, type=0, pos=[17.8645, -16.0832, 3.17078], f=[655768.3762, -743171.443044, 185671.44561])
self.system.part.add(id=571, type=0, pos=[14.1322, -7.79982, -3.22981], f=[87709.9404572, -5399.86980565, -78756.3259062])
self.system.part.add(id=572, type=1, pos=[14.586, -4.92644, 7.36388], f=[22522.8812545, 31087.9661929, -1404.82487508])
self.system.part.add(id=573, type=0, pos=[12.4169, 25.0823, -3.49218], f=[-355.321571297, -1711.98412225, -719.084465816])
self.system.part.add(id=574, type=1, pos=[-17.3399, 26.2522, 17.0015], f=[12989.9502409, 6627.84271454, 5267.67518978])
self.system.part.add(id=575, type=1, pos=[8.43289, 18.8736, 8.29628], f=[184157.723274, 414061.995607, -181239.725202])
self.system.part.add(id=576, type=1, pos=[-2.04445, -23.4408, 15.6832], f=[-7.53855974517, 18.909731445, -29.5145652576])
self.system.part.add(id=577, type=0, pos=[2.68949, 1.94822, -1.36305], f=[-1952.90957292, -304.32617811, 986.560564078])
self.system.part.add(id=578, type=1, pos=[-8.1431, -0.232726, 17.3082], f=[2192.11042208, 9174.11793883, 28196.946931])
self.system.part.add(id=579, type=1, pos=[23.4328, 27.5271, 13.08], f=[-105969.989808, -89853.6587918, -100047.283123])
self.system.part.add(id=580, type=0, pos=[2.36644, 41.9396, 11.8427], f=[10049.6525191, -14256.9672057, 57078.9068609])
self.system.part.add(id=581, type=1, pos=[8.60727, 20.4134, 11.424], f=[857218.368201, 484472.250307, -367433.934444])
self.system.part.add(id=582, type=0, pos=[24.8102, 10.2426, 21.2611], f=[-1243.57938913, -1571.59905197, 1053.88233439])
self.system.part.add(id=583, type=0, pos=[-14.4274, -1.74955, 17.9685], f=[-39683.8746159, 10595.5533946, 73856.2360285])
self.system.part.add(id=584, type=0, pos=[5.41668, -10.9445, 9.23773], f=[-16666.0758777, -86599.5485483, 10704.1934281])
self.system.part.add(id=585, type=0, pos=[-11.6949, 7.98835, 8.79446], f=[-58383.2858161, -404328.07732, 227558.558214])
self.system.part.add(id=586, type=1, pos=[6.45267, 25.6837, -12.7838], f=[-240326.232937, 591924.485372, 1027520.29285])
self.system.part.add(id=587, type=0, pos=[-16.9133, 21.7649, -19.0497], f=[-740214.741475, -336932.081593, 379580.930376])
self.system.part.add(id=588, type=1, pos=[21.6308, -8.8838, 15.8314], f=[-644875.09658, 1361842.98543, -965626.219512])
self.system.part.add(id=589, type=1, pos=[27.7168, 22.7955, 33.748], f=[32809.8025293, -3472.53543405, 31439.6913332])
self.system.part.add(id=590, type=0, pos=[16.2575, 20.649, 21.4141], f=[13957.6186603, -9221.67382856, 14133.9868696])
self.system.part.add(id=591, type=0, pos=[-0.0244431, 20.2215, -3.21805], f=[-302537.968566, -73161.1707768, 153682.278988])
self.system.part.add(id=592, type=0, pos=[1.92302, 8.19984, 5.20481], f=[-117.719104508, -636.204024245, -210.396688318])
self.system.part.add(id=593, type=1, pos=[0.784799, 21.4609, 6.57932], f=[93877.7998564, 1447.51721784, 69236.4359148])
self.system.part.add(id=594, type=0, pos=[1.30943, 18.6782, -12.4001], f=[557104.064168, -104034.928757, 164382.342909])
self.system.part.add(id=595, type=0, pos=[-3.77016, 21.9838, 28.5867], f=[-16164.4993007, -112553.926353, 102211.299275])
self.system.part.add(id=596, type=0, pos=[13.7001, 11.3459, 14.772], f=[-224534.129172, 9028.76864647, 406747.799368])
self.system.part.add(id=597, type=0, pos=[-0.547051, 16.6198, 21.4523], f=[188050.02466, 231110.636436, 216128.798878])
self.system.part.add(id=598, type=1, pos=[-15.1058, 14.4433, 11.3813], f=[-85498.6154746, -125783.482557, -431629.751872])
self.system.part.add(id=599, type=0, pos=[15.3668, 2.46229, -3.21754], f=[-80857.95333, -24766.2501019, -53034.2227219])
#read Blockfile
def compare(self):
self.system.integrator.run(0)
energy = Analysis(self.system).energy()['non_bonded', 0, 0] + Analysis(self.system).energy()['non_bonded', 0, 1] + Analysis(self.system).energy()['non_bonded', 1, 1]
pressure = Analysis(self.system).pressure()['non_bonded', 0, 0] + Analysis(self.system).pressure()['non_bonded', 0, 1] + Analysis(self.system).pressure()['non_bonded', 1, 1]
totenergy = Analysis(self.system).energy()["total"]
totpressure = Analysis(self.system).pressure()["total"]
self.assertTrue(np.abs(energy -totenergy)/totenergy < self.epsilon, "Failed. Energy difference too large")
self.assertTrue(np.abs(pressure - totpressure)/totpressure < self.epsilon, "Failed. Pressure difference too large")
if "TABULATED" in espressomd.features():
def test_tab(self):
self.compare()
else:
print("TABULATED feature inactive")
sys.exit()
if __name__ == "__main__" :
ut.main()
|
achauvinhameau/netProbe
|
refs/heads/master
|
py-net-probe/config/config.py
|
1
|
# -*- Mode: Python; python-indent-offset: 4 -*-
#
# Time-stamp: <2017-05-14 18:09:06 alex>
#
#
# --------------------------------------------------------------------
# PiProbe
# Copyright (C) 2016-2017 Alexandre Chauvin Hameau <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# --------------------------------------------------------------------
"""
config class
"""
import logging
import ConfigParser
# import pprint
class config(object):
""" class to manipulate the configuration """
# ----------------------------------------------------------
def __init__(self):
"""constructor
"""
self.conf = ConfigParser.RawConfigParser()
self.conf.read("init.cfg")
self.scheduler = {
'get_conf': 3600,
'push_results': 15,
'ping_server': 60,
'check_probes': 30,
'stats_probes': 60,
'stats_push': 300,
'upgrade': 3600*6
}
for k in self.scheduler:
try:
self.scheduler[k] = self.conf.getint("scheduler", k)
except Exception as ex:
logging.error("exception {}".format(", ".join(ex.args)))
assert False, "key not found in the config file {}".format(k)
return
# ----------------------------------------------------------
def get(self, _, key):
"""get a key
"""
return self.scheduler[key]
|
listamilton/supermilton.repository
|
refs/heads/supermilton.repository
|
script.areswizard/requests/packages/chardet/jpcntx.py
|
1776
|
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .compat import wrap_ord
NUM_OF_CATEGORY = 6
DONT_KNOW = -1
ENOUGH_REL_THRESHOLD = 100
MAX_REL_THRESHOLD = 1000
MINIMUM_DATA_THRESHOLD = 4
# This is hiragana 2-char sequence table, the number in each cell represents its frequency category
jp2CharContext = (
(0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1),
(2,4,0,4,0,3,0,4,0,3,4,4,4,2,4,3,3,4,3,2,3,3,4,2,3,3,3,2,4,1,4,3,3,1,5,4,3,4,3,4,3,5,3,0,3,5,4,2,0,3,1,0,3,3,0,3,3,0,1,1,0,4,3,0,3,3,0,4,0,2,0,3,5,5,5,5,4,0,4,1,0,3,4),
(0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2),
(0,4,0,5,0,5,0,4,0,4,5,4,4,3,5,3,5,1,5,3,4,3,4,4,3,4,3,3,4,3,5,4,4,3,5,5,3,5,5,5,3,5,5,3,4,5,5,3,1,3,2,0,3,4,0,4,2,0,4,2,1,5,3,2,3,5,0,4,0,2,0,5,4,4,5,4,5,0,4,0,0,4,4),
(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
(0,3,0,4,0,3,0,3,0,4,5,4,3,3,3,3,4,3,5,4,4,3,5,4,4,3,4,3,4,4,4,4,5,3,4,4,3,4,5,5,4,5,5,1,4,5,4,3,0,3,3,1,3,3,0,4,4,0,3,3,1,5,3,3,3,5,0,4,0,3,0,4,4,3,4,3,3,0,4,1,1,3,4),
(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
(0,4,0,3,0,3,0,4,0,3,4,4,3,2,2,1,2,1,3,1,3,3,3,3,3,4,3,1,3,3,5,3,3,0,4,3,0,5,4,3,3,5,4,4,3,4,4,5,0,1,2,0,1,2,0,2,2,0,1,0,0,5,2,2,1,4,0,3,0,1,0,4,4,3,5,4,3,0,2,1,0,4,3),
(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
(0,3,0,5,0,4,0,2,1,4,4,2,4,1,4,2,4,2,4,3,3,3,4,3,3,3,3,1,4,2,3,3,3,1,4,4,1,1,1,4,3,3,2,0,2,4,3,2,0,3,3,0,3,1,1,0,0,0,3,3,0,4,2,2,3,4,0,4,0,3,0,4,4,5,3,4,4,0,3,0,0,1,4),
(1,4,0,4,0,4,0,4,0,3,5,4,4,3,4,3,5,4,3,3,4,3,5,4,4,4,4,3,4,2,4,3,3,1,5,4,3,2,4,5,4,5,5,4,4,5,4,4,0,3,2,2,3,3,0,4,3,1,3,2,1,4,3,3,4,5,0,3,0,2,0,4,5,5,4,5,4,0,4,0,0,5,4),
(0,5,0,5,0,4,0,3,0,4,4,3,4,3,3,3,4,0,4,4,4,3,4,3,4,3,3,1,4,2,4,3,4,0,5,4,1,4,5,4,4,5,3,2,4,3,4,3,2,4,1,3,3,3,2,3,2,0,4,3,3,4,3,3,3,4,0,4,0,3,0,4,5,4,4,4,3,0,4,1,0,1,3),
(0,3,1,4,0,3,0,2,0,3,4,4,3,1,4,2,3,3,4,3,4,3,4,3,4,4,3,2,3,1,5,4,4,1,4,4,3,5,4,4,3,5,5,4,3,4,4,3,1,2,3,1,2,2,0,3,2,0,3,1,0,5,3,3,3,4,3,3,3,3,4,4,4,4,5,4,2,0,3,3,2,4,3),
(0,2,0,3,0,1,0,1,0,0,3,2,0,0,2,0,1,0,2,1,3,3,3,1,2,3,1,0,1,0,4,2,1,1,3,3,0,4,3,3,1,4,3,3,0,3,3,2,0,0,0,0,1,0,0,2,0,0,0,0,0,4,1,0,2,3,2,2,2,1,3,3,3,4,4,3,2,0,3,1,0,3,3),
(0,4,0,4,0,3,0,3,0,4,4,4,3,3,3,3,3,3,4,3,4,2,4,3,4,3,3,2,4,3,4,5,4,1,4,5,3,5,4,5,3,5,4,0,3,5,5,3,1,3,3,2,2,3,0,3,4,1,3,3,2,4,3,3,3,4,0,4,0,3,0,4,5,4,4,5,3,0,4,1,0,3,4),
(0,2,0,3,0,3,0,0,0,2,2,2,1,0,1,0,0,0,3,0,3,0,3,0,1,3,1,0,3,1,3,3,3,1,3,3,3,0,1,3,1,3,4,0,0,3,1,1,0,3,2,0,0,0,0,1,3,0,1,0,0,3,3,2,0,3,0,0,0,0,0,3,4,3,4,3,3,0,3,0,0,2,3),
(2,3,0,3,0,2,0,1,0,3,3,4,3,1,3,1,1,1,3,1,4,3,4,3,3,3,0,0,3,1,5,4,3,1,4,3,2,5,5,4,4,4,4,3,3,4,4,4,0,2,1,1,3,2,0,1,2,0,0,1,0,4,1,3,3,3,0,3,0,1,0,4,4,4,5,5,3,0,2,0,0,4,4),
(0,2,0,1,0,3,1,3,0,2,3,3,3,0,3,1,0,0,3,0,3,2,3,1,3,2,1,1,0,0,4,2,1,0,2,3,1,4,3,2,0,4,4,3,1,3,1,3,0,1,0,0,1,0,0,0,1,0,0,0,0,4,1,1,1,2,0,3,0,0,0,3,4,2,4,3,2,0,1,0,0,3,3),
(0,1,0,4,0,5,0,4,0,2,4,4,2,3,3,2,3,3,5,3,3,3,4,3,4,2,3,0,4,3,3,3,4,1,4,3,2,1,5,5,3,4,5,1,3,5,4,2,0,3,3,0,1,3,0,4,2,0,1,3,1,4,3,3,3,3,0,3,0,1,0,3,4,4,4,5,5,0,3,0,1,4,5),
(0,2,0,3,0,3,0,0,0,2,3,1,3,0,4,0,1,1,3,0,3,4,3,2,3,1,0,3,3,2,3,1,3,0,2,3,0,2,1,4,1,2,2,0,0,3,3,0,0,2,0,0,0,1,0,0,0,0,2,2,0,3,2,1,3,3,0,2,0,2,0,0,3,3,1,2,4,0,3,0,2,2,3),
(2,4,0,5,0,4,0,4,0,2,4,4,4,3,4,3,3,3,1,2,4,3,4,3,4,4,5,0,3,3,3,3,2,0,4,3,1,4,3,4,1,4,4,3,3,4,4,3,1,2,3,0,4,2,0,4,1,0,3,3,0,4,3,3,3,4,0,4,0,2,0,3,5,3,4,5,2,0,3,0,0,4,5),
(0,3,0,4,0,1,0,1,0,1,3,2,2,1,3,0,3,0,2,0,2,0,3,0,2,0,0,0,1,0,1,1,0,0,3,1,0,0,0,4,0,3,1,0,2,1,3,0,0,0,0,0,0,3,0,0,0,0,0,0,0,4,2,2,3,1,0,3,0,0,0,1,4,4,4,3,0,0,4,0,0,1,4),
(1,4,1,5,0,3,0,3,0,4,5,4,4,3,5,3,3,4,4,3,4,1,3,3,3,3,2,1,4,1,5,4,3,1,4,4,3,5,4,4,3,5,4,3,3,4,4,4,0,3,3,1,2,3,0,3,1,0,3,3,0,5,4,4,4,4,4,4,3,3,5,4,4,3,3,5,4,0,3,2,0,4,4),
(0,2,0,3,0,1,0,0,0,1,3,3,3,2,4,1,3,0,3,1,3,0,2,2,1,1,0,0,2,0,4,3,1,0,4,3,0,4,4,4,1,4,3,1,1,3,3,1,0,2,0,0,1,3,0,0,0,0,2,0,0,4,3,2,4,3,5,4,3,3,3,4,3,3,4,3,3,0,2,1,0,3,3),
(0,2,0,4,0,3,0,2,0,2,5,5,3,4,4,4,4,1,4,3,3,0,4,3,4,3,1,3,3,2,4,3,0,3,4,3,0,3,4,4,2,4,4,0,4,5,3,3,2,2,1,1,1,2,0,1,5,0,3,3,2,4,3,3,3,4,0,3,0,2,0,4,4,3,5,5,0,0,3,0,2,3,3),
(0,3,0,4,0,3,0,1,0,3,4,3,3,1,3,3,3,0,3,1,3,0,4,3,3,1,1,0,3,0,3,3,0,0,4,4,0,1,5,4,3,3,5,0,3,3,4,3,0,2,0,1,1,1,0,1,3,0,1,2,1,3,3,2,3,3,0,3,0,1,0,1,3,3,4,4,1,0,1,2,2,1,3),
(0,1,0,4,0,4,0,3,0,1,3,3,3,2,3,1,1,0,3,0,3,3,4,3,2,4,2,0,1,0,4,3,2,0,4,3,0,5,3,3,2,4,4,4,3,3,3,4,0,1,3,0,0,1,0,0,1,0,0,0,0,4,2,3,3,3,0,3,0,0,0,4,4,4,5,3,2,0,3,3,0,3,5),
(0,2,0,3,0,0,0,3,0,1,3,0,2,0,0,0,1,0,3,1,1,3,3,0,0,3,0,0,3,0,2,3,1,0,3,1,0,3,3,2,0,4,2,2,0,2,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,2,1,2,0,1,0,1,0,0,0,1,3,1,2,0,0,0,1,0,0,1,4),
(0,3,0,3,0,5,0,1,0,2,4,3,1,3,3,2,1,1,5,2,1,0,5,1,2,0,0,0,3,3,2,2,3,2,4,3,0,0,3,3,1,3,3,0,2,5,3,4,0,3,3,0,1,2,0,2,2,0,3,2,0,2,2,3,3,3,0,2,0,1,0,3,4,4,2,5,4,0,3,0,0,3,5),
(0,3,0,3,0,3,0,1,0,3,3,3,3,0,3,0,2,0,2,1,1,0,2,0,1,0,0,0,2,1,0,0,1,0,3,2,0,0,3,3,1,2,3,1,0,3,3,0,0,1,0,0,0,0,0,2,0,0,0,0,0,2,3,1,2,3,0,3,0,1,0,3,2,1,0,4,3,0,1,1,0,3,3),
(0,4,0,5,0,3,0,3,0,4,5,5,4,3,5,3,4,3,5,3,3,2,5,3,4,4,4,3,4,3,4,5,5,3,4,4,3,4,4,5,4,4,4,3,4,5,5,4,2,3,4,2,3,4,0,3,3,1,4,3,2,4,3,3,5,5,0,3,0,3,0,5,5,5,5,4,4,0,4,0,1,4,4),
(0,4,0,4,0,3,0,3,0,3,5,4,4,2,3,2,5,1,3,2,5,1,4,2,3,2,3,3,4,3,3,3,3,2,5,4,1,3,3,5,3,4,4,0,4,4,3,1,1,3,1,0,2,3,0,2,3,0,3,0,0,4,3,1,3,4,0,3,0,2,0,4,4,4,3,4,5,0,4,0,0,3,4),
(0,3,0,3,0,3,1,2,0,3,4,4,3,3,3,0,2,2,4,3,3,1,3,3,3,1,1,0,3,1,4,3,2,3,4,4,2,4,4,4,3,4,4,3,2,4,4,3,1,3,3,1,3,3,0,4,1,0,2,2,1,4,3,2,3,3,5,4,3,3,5,4,4,3,3,0,4,0,3,2,2,4,4),
(0,2,0,1,0,0,0,0,0,1,2,1,3,0,0,0,0,0,2,0,1,2,1,0,0,1,0,0,0,0,3,0,0,1,0,1,1,3,1,0,0,0,1,1,0,1,1,0,0,0,0,0,2,0,0,0,0,0,0,0,0,1,1,2,2,0,3,4,0,0,0,1,1,0,0,1,0,0,0,0,0,1,1),
(0,1,0,0,0,1,0,0,0,0,4,0,4,1,4,0,3,0,4,0,3,0,4,0,3,0,3,0,4,1,5,1,4,0,0,3,0,5,0,5,2,0,1,0,0,0,2,1,4,0,1,3,0,0,3,0,0,3,1,1,4,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0),
(1,4,0,5,0,3,0,2,0,3,5,4,4,3,4,3,5,3,4,3,3,0,4,3,3,3,3,3,3,2,4,4,3,1,3,4,4,5,4,4,3,4,4,1,3,5,4,3,3,3,1,2,2,3,3,1,3,1,3,3,3,5,3,3,4,5,0,3,0,3,0,3,4,3,4,4,3,0,3,0,2,4,3),
(0,1,0,4,0,0,0,0,0,1,4,0,4,1,4,2,4,0,3,0,1,0,1,0,0,0,0,0,2,0,3,1,1,1,0,3,0,0,0,1,2,1,0,0,1,1,1,1,0,1,0,0,0,1,0,0,3,0,0,0,0,3,2,0,2,2,0,1,0,0,0,2,3,2,3,3,0,0,0,0,2,1,0),
(0,5,1,5,0,3,0,3,0,5,4,4,5,1,5,3,3,0,4,3,4,3,5,3,4,3,3,2,4,3,4,3,3,0,3,3,1,4,4,3,4,4,4,3,4,5,5,3,2,3,1,1,3,3,1,3,1,1,3,3,2,4,5,3,3,5,0,4,0,3,0,4,4,3,5,3,3,0,3,4,0,4,3),
(0,5,0,5,0,3,0,2,0,4,4,3,5,2,4,3,3,3,4,4,4,3,5,3,5,3,3,1,4,0,4,3,3,0,3,3,0,4,4,4,4,5,4,3,3,5,5,3,2,3,1,2,3,2,0,1,0,0,3,2,2,4,4,3,1,5,0,4,0,3,0,4,3,1,3,2,1,0,3,3,0,3,3),
(0,4,0,5,0,5,0,4,0,4,5,5,5,3,4,3,3,2,5,4,4,3,5,3,5,3,4,0,4,3,4,4,3,2,4,4,3,4,5,4,4,5,5,0,3,5,5,4,1,3,3,2,3,3,1,3,1,0,4,3,1,4,4,3,4,5,0,4,0,2,0,4,3,4,4,3,3,0,4,0,0,5,5),
(0,4,0,4,0,5,0,1,1,3,3,4,4,3,4,1,3,0,5,1,3,0,3,1,3,1,1,0,3,0,3,3,4,0,4,3,0,4,4,4,3,4,4,0,3,5,4,1,0,3,0,0,2,3,0,3,1,0,3,1,0,3,2,1,3,5,0,3,0,1,0,3,2,3,3,4,4,0,2,2,0,4,4),
(2,4,0,5,0,4,0,3,0,4,5,5,4,3,5,3,5,3,5,3,5,2,5,3,4,3,3,4,3,4,5,3,2,1,5,4,3,2,3,4,5,3,4,1,2,5,4,3,0,3,3,0,3,2,0,2,3,0,4,1,0,3,4,3,3,5,0,3,0,1,0,4,5,5,5,4,3,0,4,2,0,3,5),
(0,5,0,4,0,4,0,2,0,5,4,3,4,3,4,3,3,3,4,3,4,2,5,3,5,3,4,1,4,3,4,4,4,0,3,5,0,4,4,4,4,5,3,1,3,4,5,3,3,3,3,3,3,3,0,2,2,0,3,3,2,4,3,3,3,5,3,4,1,3,3,5,3,2,0,0,0,0,4,3,1,3,3),
(0,1,0,3,0,3,0,1,0,1,3,3,3,2,3,3,3,0,3,0,0,0,3,1,3,0,0,0,2,2,2,3,0,0,3,2,0,1,2,4,1,3,3,0,0,3,3,3,0,1,0,0,2,1,0,0,3,0,3,1,0,3,0,0,1,3,0,2,0,1,0,3,3,1,3,3,0,0,1,1,0,3,3),
(0,2,0,3,0,2,1,4,0,2,2,3,1,1,3,1,1,0,2,0,3,1,2,3,1,3,0,0,1,0,4,3,2,3,3,3,1,4,2,3,3,3,3,1,0,3,1,4,0,1,1,0,1,2,0,1,1,0,1,1,0,3,1,3,2,2,0,1,0,0,0,2,3,3,3,1,0,0,0,0,0,2,3),
(0,5,0,4,0,5,0,2,0,4,5,5,3,3,4,3,3,1,5,4,4,2,4,4,4,3,4,2,4,3,5,5,4,3,3,4,3,3,5,5,4,5,5,1,3,4,5,3,1,4,3,1,3,3,0,3,3,1,4,3,1,4,5,3,3,5,0,4,0,3,0,5,3,3,1,4,3,0,4,0,1,5,3),
(0,5,0,5,0,4,0,2,0,4,4,3,4,3,3,3,3,3,5,4,4,4,4,4,4,5,3,3,5,2,4,4,4,3,4,4,3,3,4,4,5,5,3,3,4,3,4,3,3,4,3,3,3,3,1,2,2,1,4,3,3,5,4,4,3,4,0,4,0,3,0,4,4,4,4,4,1,0,4,2,0,2,4),
(0,4,0,4,0,3,0,1,0,3,5,2,3,0,3,0,2,1,4,2,3,3,4,1,4,3,3,2,4,1,3,3,3,0,3,3,0,0,3,3,3,5,3,3,3,3,3,2,0,2,0,0,2,0,0,2,0,0,1,0,0,3,1,2,2,3,0,3,0,2,0,4,4,3,3,4,1,0,3,0,0,2,4),
(0,0,0,4,0,0,0,0,0,0,1,0,1,0,2,0,0,0,0,0,1,0,2,0,1,0,0,0,0,0,3,1,3,0,3,2,0,0,0,1,0,3,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,4,0,2,0,0,0,0,0,0,2),
(0,2,1,3,0,2,0,2,0,3,3,3,3,1,3,1,3,3,3,3,3,3,4,2,2,1,2,1,4,0,4,3,1,3,3,3,2,4,3,5,4,3,3,3,3,3,3,3,0,1,3,0,2,0,0,1,0,0,1,0,0,4,2,0,2,3,0,3,3,0,3,3,4,2,3,1,4,0,1,2,0,2,3),
(0,3,0,3,0,1,0,3,0,2,3,3,3,0,3,1,2,0,3,3,2,3,3,2,3,2,3,1,3,0,4,3,2,0,3,3,1,4,3,3,2,3,4,3,1,3,3,1,1,0,1,1,0,1,0,1,0,1,0,0,0,4,1,1,0,3,0,3,1,0,2,3,3,3,3,3,1,0,0,2,0,3,3),
(0,0,0,0,0,0,0,0,0,0,3,0,2,0,3,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,3,0,3,0,3,1,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,2,0,2,3,0,0,0,0,0,0,0,0,3),
(0,2,0,3,1,3,0,3,0,2,3,3,3,1,3,1,3,1,3,1,3,3,3,1,3,0,2,3,1,1,4,3,3,2,3,3,1,2,2,4,1,3,3,0,1,4,2,3,0,1,3,0,3,0,0,1,3,0,2,0,0,3,3,2,1,3,0,3,0,2,0,3,4,4,4,3,1,0,3,0,0,3,3),
(0,2,0,1,0,2,0,0,0,1,3,2,2,1,3,0,1,1,3,0,3,2,3,1,2,0,2,0,1,1,3,3,3,0,3,3,1,1,2,3,2,3,3,1,2,3,2,0,0,1,0,0,0,0,0,0,3,0,1,0,0,2,1,2,1,3,0,3,0,0,0,3,4,4,4,3,2,0,2,0,0,2,4),
(0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,2,2,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,3,1,0,0,0,0,0,0,0,3),
(0,3,0,3,0,2,0,3,0,3,3,3,2,3,2,2,2,0,3,1,3,3,3,2,3,3,0,0,3,0,3,2,2,0,2,3,1,4,3,4,3,3,2,3,1,5,4,4,0,3,1,2,1,3,0,3,1,1,2,0,2,3,1,3,1,3,0,3,0,1,0,3,3,4,4,2,1,0,2,1,0,2,4),
(0,1,0,3,0,1,0,2,0,1,4,2,5,1,4,0,2,0,2,1,3,1,4,0,2,1,0,0,2,1,4,1,1,0,3,3,0,5,1,3,2,3,3,1,0,3,2,3,0,1,0,0,0,0,0,0,1,0,0,0,0,4,0,1,0,3,0,2,0,1,0,3,3,3,4,3,3,0,0,0,0,2,3),
(0,0,0,1,0,0,0,0,0,0,2,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,1,0,0,1,0,0,0,0,0,3),
(0,1,0,3,0,4,0,3,0,2,4,3,1,0,3,2,2,1,3,1,2,2,3,1,1,1,2,1,3,0,1,2,0,1,3,2,1,3,0,5,5,1,0,0,1,3,2,1,0,3,0,0,1,0,0,0,0,0,3,4,0,1,1,1,3,2,0,2,0,1,0,2,3,3,1,2,3,0,1,0,1,0,4),
(0,0,0,1,0,3,0,3,0,2,2,1,0,0,4,0,3,0,3,1,3,0,3,0,3,0,1,0,3,0,3,1,3,0,3,3,0,0,1,2,1,1,1,0,1,2,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,2,2,1,2,0,0,2,0,0,0,0,2,3,3,3,3,0,0,0,0,1,4),
(0,0,0,3,0,3,0,0,0,0,3,1,1,0,3,0,1,0,2,0,1,0,0,0,0,0,0,0,1,0,3,0,2,0,2,3,0,0,2,2,3,1,2,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,2,0,0,0,0,2,3),
(2,4,0,5,0,5,0,4,0,3,4,3,3,3,4,3,3,3,4,3,4,4,5,4,5,5,5,2,3,0,5,5,4,1,5,4,3,1,5,4,3,4,4,3,3,4,3,3,0,3,2,0,2,3,0,3,0,0,3,3,0,5,3,2,3,3,0,3,0,3,0,3,4,5,4,5,3,0,4,3,0,3,4),
(0,3,0,3,0,3,0,3,0,3,3,4,3,2,3,2,3,0,4,3,3,3,3,3,3,3,3,0,3,2,4,3,3,1,3,4,3,4,4,4,3,4,4,3,2,4,4,1,0,2,0,0,1,1,0,2,0,0,3,1,0,5,3,2,1,3,0,3,0,1,2,4,3,2,4,3,3,0,3,2,0,4,4),
(0,3,0,3,0,1,0,0,0,1,4,3,3,2,3,1,3,1,4,2,3,2,4,2,3,4,3,0,2,2,3,3,3,0,3,3,3,0,3,4,1,3,3,0,3,4,3,3,0,1,1,0,1,0,0,0,4,0,3,0,0,3,1,2,1,3,0,4,0,1,0,4,3,3,4,3,3,0,2,0,0,3,3),
(0,3,0,4,0,1,0,3,0,3,4,3,3,0,3,3,3,1,3,1,3,3,4,3,3,3,0,0,3,1,5,3,3,1,3,3,2,5,4,3,3,4,5,3,2,5,3,4,0,1,0,0,0,0,0,2,0,0,1,1,0,4,2,2,1,3,0,3,0,2,0,4,4,3,5,3,2,0,1,1,0,3,4),
(0,5,0,4,0,5,0,2,0,4,4,3,3,2,3,3,3,1,4,3,4,1,5,3,4,3,4,0,4,2,4,3,4,1,5,4,0,4,4,4,4,5,4,1,3,5,4,2,1,4,1,1,3,2,0,3,1,0,3,2,1,4,3,3,3,4,0,4,0,3,0,4,4,4,3,3,3,0,4,2,0,3,4),
(1,4,0,4,0,3,0,1,0,3,3,3,1,1,3,3,2,2,3,3,1,0,3,2,2,1,2,0,3,1,2,1,2,0,3,2,0,2,2,3,3,4,3,0,3,3,1,2,0,1,1,3,1,2,0,0,3,0,1,1,0,3,2,2,3,3,0,3,0,0,0,2,3,3,4,3,3,0,1,0,0,1,4),
(0,4,0,4,0,4,0,0,0,3,4,4,3,1,4,2,3,2,3,3,3,1,4,3,4,0,3,0,4,2,3,3,2,2,5,4,2,1,3,4,3,4,3,1,3,3,4,2,0,2,1,0,3,3,0,0,2,0,3,1,0,4,4,3,4,3,0,4,0,1,0,2,4,4,4,4,4,0,3,2,0,3,3),
(0,0,0,1,0,4,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,3,2,0,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,2),
(0,2,0,3,0,4,0,4,0,1,3,3,3,0,4,0,2,1,2,1,1,1,2,0,3,1,1,0,1,0,3,1,0,0,3,3,2,0,1,1,0,0,0,0,0,1,0,2,0,2,2,0,3,1,0,0,1,0,1,1,0,1,2,0,3,0,0,0,0,1,0,0,3,3,4,3,1,0,1,0,3,0,2),
(0,0,0,3,0,5,0,0,0,0,1,0,2,0,3,1,0,1,3,0,0,0,2,0,0,0,1,0,0,0,1,1,0,0,4,0,0,0,2,3,0,1,4,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,1,0,0,0,0,0,0,0,2,0,0,3,0,0,0,0,0,3),
(0,2,0,5,0,5,0,1,0,2,4,3,3,2,5,1,3,2,3,3,3,0,4,1,2,0,3,0,4,0,2,2,1,1,5,3,0,0,1,4,2,3,2,0,3,3,3,2,0,2,4,1,1,2,0,1,1,0,3,1,0,1,3,1,2,3,0,2,0,0,0,1,3,5,4,4,4,0,3,0,0,1,3),
(0,4,0,5,0,4,0,4,0,4,5,4,3,3,4,3,3,3,4,3,4,4,5,3,4,5,4,2,4,2,3,4,3,1,4,4,1,3,5,4,4,5,5,4,4,5,5,5,2,3,3,1,4,3,1,3,3,0,3,3,1,4,3,4,4,4,0,3,0,4,0,3,3,4,4,5,0,0,4,3,0,4,5),
(0,4,0,4,0,3,0,3,0,3,4,4,4,3,3,2,4,3,4,3,4,3,5,3,4,3,2,1,4,2,4,4,3,1,3,4,2,4,5,5,3,4,5,4,1,5,4,3,0,3,2,2,3,2,1,3,1,0,3,3,3,5,3,3,3,5,4,4,2,3,3,4,3,3,3,2,1,0,3,2,1,4,3),
(0,4,0,5,0,4,0,3,0,3,5,5,3,2,4,3,4,0,5,4,4,1,4,4,4,3,3,3,4,3,5,5,2,3,3,4,1,2,5,5,3,5,5,2,3,5,5,4,0,3,2,0,3,3,1,1,5,1,4,1,0,4,3,2,3,5,0,4,0,3,0,5,4,3,4,3,0,0,4,1,0,4,4),
(1,3,0,4,0,2,0,2,0,2,5,5,3,3,3,3,3,0,4,2,3,4,4,4,3,4,0,0,3,4,5,4,3,3,3,3,2,5,5,4,5,5,5,4,3,5,5,5,1,3,1,0,1,0,0,3,2,0,4,2,0,5,2,3,2,4,1,3,0,3,0,4,5,4,5,4,3,0,4,2,0,5,4),
(0,3,0,4,0,5,0,3,0,3,4,4,3,2,3,2,3,3,3,3,3,2,4,3,3,2,2,0,3,3,3,3,3,1,3,3,3,0,4,4,3,4,4,1,1,4,4,2,0,3,1,0,1,1,0,4,1,0,2,3,1,3,3,1,3,4,0,3,0,1,0,3,1,3,0,0,1,0,2,0,0,4,4),
(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
(0,3,0,3,0,2,0,3,0,1,5,4,3,3,3,1,4,2,1,2,3,4,4,2,4,4,5,0,3,1,4,3,4,0,4,3,3,3,2,3,2,5,3,4,3,2,2,3,0,0,3,0,2,1,0,1,2,0,0,0,0,2,1,1,3,1,0,2,0,4,0,3,4,4,4,5,2,0,2,0,0,1,3),
(0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,1,1,0,0,1,1,0,0,0,4,2,1,1,0,1,0,3,2,0,0,3,1,1,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,1,0,0,0,2,0,0,0,1,4,0,4,2,1,0,0,0,0,0,1),
(0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,1,0,1,0,0,0,0,3,1,0,0,0,2,0,2,1,0,0,1,2,1,0,1,1,0,0,3,0,0,0,0,0,0,0,0,0,0,0,1,3,1,0,0,0,0,0,1,0,0,2,1,0,0,0,0,0,0,0,0,2),
(0,4,0,4,0,4,0,3,0,4,4,3,4,2,4,3,2,0,4,4,4,3,5,3,5,3,3,2,4,2,4,3,4,3,1,4,0,2,3,4,4,4,3,3,3,4,4,4,3,4,1,3,4,3,2,1,2,1,3,3,3,4,4,3,3,5,0,4,0,3,0,4,3,3,3,2,1,0,3,0,0,3,3),
(0,4,0,3,0,3,0,3,0,3,5,5,3,3,3,3,4,3,4,3,3,3,4,4,4,3,3,3,3,4,3,5,3,3,1,3,2,4,5,5,5,5,4,3,4,5,5,3,2,2,3,3,3,3,2,3,3,1,2,3,2,4,3,3,3,4,0,4,0,2,0,4,3,2,2,1,2,0,3,0,0,4,1),
)
class JapaneseContextAnalysis:
def __init__(self):
self.reset()
def reset(self):
self._mTotalRel = 0 # total sequence received
# category counters, each interger counts sequence in its category
self._mRelSample = [0] * NUM_OF_CATEGORY
# if last byte in current buffer is not the last byte of a character,
# we need to know how many bytes to skip in next buffer
self._mNeedToSkipCharNum = 0
self._mLastCharOrder = -1 # The order of previous char
# If this flag is set to True, detection is done and conclusion has
# been made
self._mDone = False
def feed(self, aBuf, aLen):
if self._mDone:
return
# The buffer we got is byte oriented, and a character may span in more than one
# buffers. In case the last one or two byte in last buffer is not
# complete, we record how many byte needed to complete that character
# and skip these bytes here. We can choose to record those bytes as
# well and analyse the character once it is complete, but since a
# character will not make much difference, by simply skipping
# this character will simply our logic and improve performance.
i = self._mNeedToSkipCharNum
while i < aLen:
order, charLen = self.get_order(aBuf[i:i + 2])
i += charLen
if i > aLen:
self._mNeedToSkipCharNum = i - aLen
self._mLastCharOrder = -1
else:
if (order != -1) and (self._mLastCharOrder != -1):
self._mTotalRel += 1
if self._mTotalRel > MAX_REL_THRESHOLD:
self._mDone = True
break
self._mRelSample[jp2CharContext[self._mLastCharOrder][order]] += 1
self._mLastCharOrder = order
def got_enough_data(self):
return self._mTotalRel > ENOUGH_REL_THRESHOLD
def get_confidence(self):
# This is just one way to calculate confidence. It works well for me.
if self._mTotalRel > MINIMUM_DATA_THRESHOLD:
return (self._mTotalRel - self._mRelSample[0]) / self._mTotalRel
else:
return DONT_KNOW
def get_order(self, aBuf):
return -1, 1
class SJISContextAnalysis(JapaneseContextAnalysis):
def __init__(self):
self.charset_name = "SHIFT_JIS"
def get_charset_name(self):
return self.charset_name
def get_order(self, aBuf):
if not aBuf:
return -1, 1
# find out current char's byte length
first_char = wrap_ord(aBuf[0])
if ((0x81 <= first_char <= 0x9F) or (0xE0 <= first_char <= 0xFC)):
charLen = 2
if (first_char == 0x87) or (0xFA <= first_char <= 0xFC):
self.charset_name = "CP932"
else:
charLen = 1
# return its order if it is hiragana
if len(aBuf) > 1:
second_char = wrap_ord(aBuf[1])
if (first_char == 202) and (0x9F <= second_char <= 0xF1):
return second_char - 0x9F, charLen
return -1, charLen
class EUCJPContextAnalysis(JapaneseContextAnalysis):
def get_order(self, aBuf):
if not aBuf:
return -1, 1
# find out current char's byte length
first_char = wrap_ord(aBuf[0])
if (first_char == 0x8E) or (0xA1 <= first_char <= 0xFE):
charLen = 2
elif first_char == 0x8F:
charLen = 3
else:
charLen = 1
# return its order if it is hiragana
if len(aBuf) > 1:
second_char = wrap_ord(aBuf[1])
if (first_char == 0xA4) and (0xA1 <= second_char <= 0xF3):
return second_char - 0xA1, charLen
return -1, charLen
# flake8: noqa
|
Microsoft/PTVS
|
refs/heads/master
|
Python/Tests/TestData/Grammar/AllErrors.py
|
7
|
from __future__ import *
from __future__ import braces
from __future__ import unknown
def f(a, x=42, c):
pass
f(a, x=42, c)
f(a, *b, *c)
f(a, **b, **c)
f(a, **abc, x = 42)
@fob
pass
def f(a, (42, c), d):
pass
def f(a, 42, abc):
pass
break
continue
print >> blah,
while True:
try:
pass
finally:
continue
del
del i+1
del +1
del (a or b)
del (a and b)
del {}
del [2,]
del (2,)
[2,] = 'abc'
(2,) = 'abc'
return
def f():
yield 42
return 42
yield 42
def f():
return 42
return 100
yield 42
#x = 42 = y
x, *y, *z = [2,3]
42 += 100
from import abc
def f():
from x import *
from __future__ import division
nonlocal blazzz
raise fob, oar
raise fob from oar
@fob
class X:
pass
def f(a: 42):
pass
def f(a = 42, b):
pass
def f(*abc, d = 42):
pass
def f(*abc, *b):
pass
def f(*abc, *b):
pass
def f(x, *, ):
pass
def f(x, (a, b), y):
pass
def f(x, (42, b), y):
pass
def f(abc, abc):
pass
def f(x, (abc, abc), y):
pass
def f(42):
pass
try:
pass
except:
pass
except Exception, e:
pass
try:
pass
except Exception as e:
pass
try:
pass
except Exception, e:
pass
b'abc' 'abc'
'abc' b'abc'
'abc' 42
b'abc' 42
abc.1
f(42=abc)
def f(42=abc):
pass
x = { 2:3, 3}
x = { 2, 2:3}
|
friendofrobots/air-toolkit
|
refs/heads/master
|
air/air_explorer/views.py
|
1
|
from django.shortcuts import render_to_response
from django.http import HttpResponse, HttpResponseRedirect
from django.template import RequestContext
from django.db.models import Count, Q
from django.core.paginator import Paginator, InvalidPage, EmptyPage
from django.shortcuts import get_object_or_404, redirect
import json
from toolkit.models import *
from toolkit.forms import CategoryCreateForm
from toolkit import tasks
from celery.result import AsyncResult
def home(request, template_name="air_explorer/home.html"):
if request.user.is_authenticated():
try:
if request.user.profile.stage < 3:
return redirect('explore:download')
ready = True
except:
return redirect('explore:download')
else:
ready = False
return render_to_response(template_name, {
'path' : request.path,
'ready' : ready,
}, context_instance=RequestContext(request))
def download(request, template_name="air_explorer/download.html"):
if request.user.is_authenticated():
try:
stage = request.user.profile.stage
except:
stage = None
else:
return redirect('explore:home')
return render_to_response(template_name, {
'path' : request.path,
"stage" : stage,
}, context_instance=RequestContext(request))
def friends(request, page_num=1, template_name="air_explorer/friends.html"):
if request.user.is_authenticated():
profile = request.user.profile
try:
if profile.stage < 3:
return redirect('explore:download')
except:
return redirect('explore:download')
friends = Person.objects.filter(owner=profile).order_by('name')
paginator = Paginator(friends,96)
try:
friend_page = paginator.page(page_num)
except (EmptyPage, InvalidPage):
friend_page = paginator.page(paginator.num_pages)
return render_to_response(template_name, {
'path' : request.path,
'paginate' : friend_page,
}, context_instance=RequestContext(request))
else:
return redirect('explore:home')
def friend(request, person_id, page_num=1, template_name="air_explorer/friend.html"):
if request.user.is_authenticated():
profile = request.user.profile
person = get_object_or_404(Person,id=person_id)
memberships = person.categoryMembership.order_by('-value')[:12]
try:
active = profile.activeCategory
createForm = CategoryCreateForm(initial={'category_id' : active.id,
'startvalue' : active.startvalue,
'threshold' : active.threshold,
'decayrate' : active.decayrate,})
allowNew = False
except Category.DoesNotExist:
active = None
createForm = None
allowNew = True
else:
return redirect('explore:home')
return render_to_response(template_name, {
'path' : request.path,
'friend' : person,
'memberships' : memberships,
'active' : active,
'createForm' : createForm,
'allowNew' : allowNew,
}, context_instance=RequestContext(request))
def likes(request, startsWith=None, page_num=1, template_name="air_explorer/likes.html"):
if request.user.is_authenticated():
profile = request.user.profile
try:
if profile.stage < 3:
return redirect('explore:download')
except:
return redirect('explore:download')
if startsWith == None:
startsWith = 'a'
pages = profile.getActivePages()
if startsWith == u'~':
likes = pages.filter(name__iregex=r'\A\W').order_by('name','fbid')
else:
likes = pages.filter(name__istartswith=startsWith).order_by('name','fbid')
paginator = Paginator(likes,24)
try:
like_page = paginator.page(page_num)
except (EmptyPage, InvalidPage):
like_page = paginator.page(paginator.num_pages)
try:
active = profile.activeCategory
createForm = CategoryCreateForm(initial={'category_id' : active.id,
'startvalue' : active.startvalue,
'threshold' : active.threshold,
'decayrate' : active.decayrate,})
allowNew = False
except Category.DoesNotExist:
active = None
createForm = None
allowNew = True
return render_to_response(template_name, {
'path' : request.path,
'startsWith': startsWith,
'paginate' : like_page,
'active' : active,
'createForm' : createForm,
'allowNew' : allowNew,
}, context_instance=RequestContext(request))
else:
return redirect('explore:home')
def like(request, page_id, page_num=1, template_name="air_explorer/like.html"):
if request.user.is_authenticated():
profile = request.user.profile
page = get_object_or_404(Page,id=page_id)
paginator = Paginator(page.pmisFrom.order_by('-value','toPage__fbid'),24)
try:
pmi_page = paginator.page(page_num)
except (EmptyPage, InvalidPage):
pmi_page = paginator.page(paginator.num_pages)
try:
active = profile.activeCategory
createForm = CategoryCreateForm(initial={'category_id' : active.id,
'startvalue' : active.startvalue,
'threshold' : active.threshold,
'decayrate' : active.decayrate,})
allowNew = False
except Category.DoesNotExist:
active = None
createForm = None
allowNew = True
else:
return redirect('explore:home')
return render_to_response(template_name, {
'path' : request.path,
'like' : page,
'paginate' : pmi_page,
'active' : active,
'createForm' : createForm,
'allowNew' : allowNew,
}, context_instance=RequestContext(request))
def categories(request, template_name="air_explorer/categories.html"):
if request.user.is_authenticated():
profile = request.user.profile
categories = Category.objects.filter(owner=profile).order_by('-last_updated')
try:
active = profile.activeCategory
createForm = CategoryCreateForm(initial={'category_id' : active.id,
'startvalue' : active.startvalue,
'threshold' : active.threshold,
'decayrate' : active.decayrate,})
allowNew = False
except Category.DoesNotExist:
active = None
createForm = None
allowNew = True
else:
return redirect('explore:home')
return render_to_response(template_name, {
'path' : request.path,
'categories' : categories,
'active' : active,
'createForm' : createForm,
'allowNew' : allowNew,
}, context_instance=RequestContext(request))
def category(request, category_id, page_num=1, template_name="air_explorer/category.html"):
if request.user.is_authenticated():
profile = request.user.profile
category = get_object_or_404(Category,id=category_id)
scores = category.scores.filter(value__gt=0).order_by('-value','page__fbid')
paginator = Paginator(scores,24)
try:
score_page = paginator.page(page_num)
except (EmptyPage, InvalidPage):
score_page = paginator.page(paginator.num_pages)
memberships = category.memberships.order_by('-value')[:12]
else:
return redirect('explore:home')
return render_to_response(template_name, {
'path' : request.path,
'category' : category,
'paginate' : score_page,
'memberships' : memberships,
}, context_instance=RequestContext(request))
def category_raw(request, category_id, template_name="air_explorer/category_raw.html"):
if request.user.is_authenticated():
profile = request.user.profile
category = get_object_or_404(Category,id=category_id)
scores = category.scores.filter(page__likedBy__in=category.group.people.all()).distinct()
act = scores.annotate(activity=Count('page__likedBy')).order_by('-activity','page__fbid')
mult = 1./(1.*max(act,key=lambda x : x.activity).activity)
paginator = Paginator(act,24)
score_page = paginator.page(1)
else:
return redirect('explore:home')
return render_to_response(template_name, {
'path' : request.path,
'category' : category,
'score_page' : score_page,
'mult' : mult,
}, context_instance=RequestContext(request))
|
clayz/crazy-quiz-web
|
refs/heads/master
|
lib/requests/requests/packages/urllib3/fields.py
|
1007
|
import email.utils
import mimetypes
from .packages import six
def guess_content_type(filename, default='application/octet-stream'):
"""
Guess the "Content-Type" of a file.
:param filename:
The filename to guess the "Content-Type" of using :mod:`mimetypes`.
:param default:
If no "Content-Type" can be guessed, default to `default`.
"""
if filename:
return mimetypes.guess_type(filename)[0] or default
return default
def format_header_param(name, value):
"""
Helper function to format and quote a single header parameter.
Particularly useful for header parameters which might contain
non-ASCII values, like file names. This follows RFC 2231, as
suggested by RFC 2388 Section 4.4.
:param name:
The name of the parameter, a string expected to be ASCII only.
:param value:
The value of the parameter, provided as a unicode string.
"""
if not any(ch in value for ch in '"\\\r\n'):
result = '%s="%s"' % (name, value)
try:
result.encode('ascii')
except UnicodeEncodeError:
pass
else:
return result
if not six.PY3: # Python 2:
value = value.encode('utf-8')
value = email.utils.encode_rfc2231(value, 'utf-8')
value = '%s*=%s' % (name, value)
return value
class RequestField(object):
"""
A data container for request body parameters.
:param name:
The name of this request field.
:param data:
The data/value body.
:param filename:
An optional filename of the request field.
:param headers:
An optional dict-like object of headers to initially use for the field.
"""
def __init__(self, name, data, filename=None, headers=None):
self._name = name
self._filename = filename
self.data = data
self.headers = {}
if headers:
self.headers = dict(headers)
@classmethod
def from_tuples(cls, fieldname, value):
"""
A :class:`~urllib3.fields.RequestField` factory from old-style tuple parameters.
Supports constructing :class:`~urllib3.fields.RequestField` from
parameter of key/value strings AND key/filetuple. A filetuple is a
(filename, data, MIME type) tuple where the MIME type is optional.
For example::
'foo': 'bar',
'fakefile': ('foofile.txt', 'contents of foofile'),
'realfile': ('barfile.txt', open('realfile').read()),
'typedfile': ('bazfile.bin', open('bazfile').read(), 'image/jpeg'),
'nonamefile': 'contents of nonamefile field',
Field names and filenames must be unicode.
"""
if isinstance(value, tuple):
if len(value) == 3:
filename, data, content_type = value
else:
filename, data = value
content_type = guess_content_type(filename)
else:
filename = None
content_type = None
data = value
request_param = cls(fieldname, data, filename=filename)
request_param.make_multipart(content_type=content_type)
return request_param
def _render_part(self, name, value):
"""
Overridable helper function to format a single header parameter.
:param name:
The name of the parameter, a string expected to be ASCII only.
:param value:
The value of the parameter, provided as a unicode string.
"""
return format_header_param(name, value)
def _render_parts(self, header_parts):
"""
Helper function to format and quote a single header.
Useful for single headers that are composed of multiple items. E.g.,
'Content-Disposition' fields.
:param header_parts:
A sequence of (k, v) typles or a :class:`dict` of (k, v) to format
as `k1="v1"; k2="v2"; ...`.
"""
parts = []
iterable = header_parts
if isinstance(header_parts, dict):
iterable = header_parts.items()
for name, value in iterable:
if value:
parts.append(self._render_part(name, value))
return '; '.join(parts)
def render_headers(self):
"""
Renders the headers for this request field.
"""
lines = []
sort_keys = ['Content-Disposition', 'Content-Type', 'Content-Location']
for sort_key in sort_keys:
if self.headers.get(sort_key, False):
lines.append('%s: %s' % (sort_key, self.headers[sort_key]))
for header_name, header_value in self.headers.items():
if header_name not in sort_keys:
if header_value:
lines.append('%s: %s' % (header_name, header_value))
lines.append('\r\n')
return '\r\n'.join(lines)
def make_multipart(self, content_disposition=None, content_type=None,
content_location=None):
"""
Makes this request field into a multipart request field.
This method overrides "Content-Disposition", "Content-Type" and
"Content-Location" headers to the request parameter.
:param content_type:
The 'Content-Type' of the request body.
:param content_location:
The 'Content-Location' of the request body.
"""
self.headers['Content-Disposition'] = content_disposition or 'form-data'
self.headers['Content-Disposition'] += '; '.join([
'', self._render_parts(
(('name', self._name), ('filename', self._filename))
)
])
self.headers['Content-Type'] = content_type
self.headers['Content-Location'] = content_location
|
opencloudinfra/orchestrator
|
refs/heads/master
|
venv/Lib/encodings/unicode_escape.py
|
852
|
""" Python 'unicode-escape' Codec
Written by Marc-Andre Lemburg ([email protected]).
(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
"""
import codecs
### Codec APIs
class Codec(codecs.Codec):
# Note: Binding these as C functions will result in the class not
# converting them to methods. This is intended.
encode = codecs.unicode_escape_encode
decode = codecs.unicode_escape_decode
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.unicode_escape_encode(input, self.errors)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.unicode_escape_decode(input, self.errors)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='unicode-escape',
encode=Codec.encode,
decode=Codec.decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamwriter=StreamWriter,
streamreader=StreamReader,
)
|
AOKP/external_chromium_org
|
refs/heads/kitkat
|
tools/win/split_link/graph_dependencies.py
|
145
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import shutil
import subprocess
import sys
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
def main():
if len(sys.argv) != 2:
print 'usage: %s <output.html>' % sys.argv[0]
return 1
env = os.environ.copy()
env['GYP_GENERATORS'] = 'dump_dependency_json'
print 'Dumping dependencies...'
popen = subprocess.Popen(
['python', 'build/gyp_chromium'],
shell=True, env=env)
popen.communicate()
if popen.returncode != 0:
return popen.returncode
print 'Finding problems...'
popen = subprocess.Popen(
['python', 'tools/gyp-explain.py', '--dot',
'chrome.gyp:browser#', 'core.gyp:webcore#'],
stdout=subprocess.PIPE,
shell=True)
out, _ = popen.communicate()
if popen.returncode != 0:
return popen.returncode
# Break into pairs to uniq to make graph less of a mess.
print 'Simplifying...'
deduplicated = set()
lines = out.splitlines()[2:-1]
for line in lines:
line = line.strip('\r\n ;')
pairs = line.split(' -> ')
for i in range(len(pairs) - 1):
deduplicated.add('%s -> %s;' % (pairs[i], pairs[i + 1]))
graph = 'strict digraph {\n' + '\n'.join(sorted(deduplicated)) + '\n}'
print 'Writing report to %s...' % sys.argv[1]
path_count = len(out.splitlines())
with open(os.path.join(BASE_DIR, 'viz.js', 'viz.js')) as f:
viz_js = f.read()
with open(sys.argv[1], 'w') as f:
f.write(PREFIX % path_count)
f.write(graph)
f.write(SUFFIX % viz_js)
print 'Done.'
PREFIX = r'''<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<title>Undesirable Dependencies</title>
</head>
<body>
<h1>Undesirable Dependencies</h1>
<h2>browser → webcore</h2>
<h3>%d paths</h3>
<script type="text/vnd.graphviz" id="graph">
'''
SUFFIX = r'''
</script>
<script>%s</script>
<div id="output">Rendering...</div>
<script>
setTimeout(function() {
document.getElementById("output").innerHTML =
Viz(document.getElementById("graph").innerHTML, "svg");
}, 1);
</script>
</body>
</html>
'''
if __name__ == '__main__':
sys.exit(main())
|
sassoftware/rbuild
|
refs/heads/master
|
rbuild_test/unit_test/productstoretest/dirstoretest.py
|
1
|
#!/usr/bin/python
#
# Copyright (c) SAS Institute Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
from rbuild_test import rbuildhelp
from testutils import mock
from conary.lib import util
from conary import state
from rbuild import errors
from rpath_proddef import api1 as proddef
from rbuild.productstore import dirstore
from rbuild.facade import conaryfacade
from rbuild_test.unit_test.facadetest import conaryfacadetest
class DirStoreTest(rbuildhelp.RbuildHelper):
def _prepProductStore(self):
os.chdir(self.workDir)
pd = self._newProduct()
util.mkdirChain('foo/.rbuild/product-definition')
pd.serialize(file(
'foo/.rbuild/product-definition/product-definition.xml', 'w'))
util.mkdirChain('foo/stable')
self.writeFile('foo/stable/.stage', 'stable\n')
from rbuild.productstore import abstract
mock.mock(abstract.ProductStore, 'checkStageIsValid')
return pd
def _newProduct(self, name='Foo', shortName='foo', description='More foo',
version='1.0', versionDescription='Super version 1.0',
conaryRepositoryHostname='cny.tv', conaryNamespace='ns'):
pd = proddef.ProductDefinition()
pd.setProductName(name)
pd.setProductShortname(shortName)
pd.setProductDescription(description)
pd.setProductVersion(version)
pd.setProductVersionDescription(versionDescription)
pd.setConaryRepositoryHostname(conaryRepositoryHostname)
pd.setConaryNamespace(conaryNamespace)
pd.setImageGroup('group-os')
return pd
def testCheckoutProductStore(self):
self._prepProductStore()
util.mkdirChain('foo/stable/package')
os.chdir('foo/stable/package')
handle = self.getRbuildHandle(productStore=mock.MockObject())
productStore = dirstore.CheckoutProductStore(handle)
self.assertEquals(productStore.getBaseDirectory(),
self.workDir + '/foo')
self.assertEquals(productStore.getActiveStageName(), 'stable')
productStore = dirstore.CheckoutProductStore(handle,
baseDirectory=self.workDir + '/foo')
self.assertEquals(productStore.getBaseDirectory(),
self.workDir + '/foo')
def testGetDefaultProductDirectory(self):
self._prepProductStore()
productDirectory = dirstore.getDefaultProductDirectory('foo/stable')
# relative path
self.assertEquals(productDirectory, 'foo')
os.chdir('foo/stable')
# starts with os.getcwd() so will be absolute path
productDirectory = dirstore.getDefaultProductDirectory()
self.assertEquals(productDirectory, self.workDir + '/foo')
self.assertRaises(errors.MissingProductStoreError,
dirstore.getDefaultProductDirectory, 'directoryDoesNotExist')
os.chdir('/')
self.assertRaises(errors.MissingProductStoreError,
dirstore.getDefaultProductDirectory, error=True)
def testGetStageNameFromDirectory(self):
self._prepProductStore()
stageName = dirstore.getStageNameFromDirectory('foo/stable')
assert stageName == 'stable'
os.chdir('foo/stable')
stageName = dirstore.getStageNameFromDirectory('.')
assert stageName == 'stable'
stageName = dirstore.getStageNameFromDirectory()
assert stageName == 'stable'
def testProductStoreError(self):
handle = self.getRbuildHandle()
err = self.assertRaises(errors.RbuildError,
dirstore.CheckoutProductStore, handle, self.workDir)
assert(str(err) == "No product directory at '%s'" % self.workDir)
err = self.assertRaises(errors.RbuildError,
dirstore.CheckoutProductStore, handle)
assert(str(err) == "Could not find product directory")
def testCore(self):
handle = self.getRbuildHandle()
productClass = mock.MockObject(stableReturnValues=True)
stage = mock.MockObject(label='localhost@rpl:1')
productClass().getStage._mock.setReturn(stage, 'foo')
productClass._mock.popCall()
self.mock(proddef, 'ProductDefinition', productClass)
os.chdir(self.workDir)
util.mkdirChain('foo/.rbuild/product-definition')
self.writeFile('foo/.rbuild/product-definition/product-definition.xml',
'')
p = dirstore.CheckoutProductStore(handle, 'foo')
err = self.assertRaises(errors.RbuildError, p.getActiveStageName)
self.assertEquals(str(err), 'No current stage (setActiveStageName)')
mock.mock(dirstore.CheckoutProductStore, 'checkStageIsValid')
p.setActiveStageName('foo')
assert(p.getActiveStageName() == 'foo')
mock.mockMethod(p._getSourceTroveVersion,
returnValue='cny.tv@ns:1/2-3')
proddefObj = p.getProduct()
_, kw = productClass._mock.popCall()
kw = dict(kw)
kw.pop('fromStream')
configPath = self.workDir + '/foo/.rbuild/product-definition/rmakerc'
self.assertEquals(p.getRmakeConfigPath(), configPath)
mock.mockMethod(handle.facade.conary.updateCheckout)
p.update()
rbuildDir = p.getProductDefinitionDirectory()
platformDir = p.getPlatformDefinitionDirectory()
assert(platformDir == self.workDir + '/foo/.rbuild/platform-definition')
handle.facade.conary.updateCheckout._mock.assertCalled(rbuildDir)
proddefObj.getStages._mock.setDefaultReturn(
[mock.MockObject(name='a'),
mock.MockObject(name='b'),
mock.MockObject(name='c')])
stageNames = [x for x in p.iterStageNames()]
self.assertEquals(stageNames, ['a', 'b', 'c'])
def testUpdateError(self):
productStore = mock.MockInstance(dirstore.CheckoutProductStore)
productStore._mock.enableMethod('update')
productStore._handle.facade.conary.updateCheckout._mock.setDefaultReturn(False)
err = self.assertRaises(errors.RbuildError, productStore.update)
assert(str(err) == "Failed to update product definition")
def testProductDirectoryError(self):
productStoreClass = mock.mockClass(dirstore.CheckoutProductStore)
productStore = productStoreClass()
productStore._mock.enable('_testProductDirectory')
err = self.assertRaises(errors.RbuildError,
productStore._testProductDirectory,
self.workDir)
assert(str(err) == "No product directory at %r" %self.workDir)
def testGetEditedRecipeDicts(self):
realListDir = os.listdir
realExists = os.path.exists
def mockListDir(path):
if path.endswith('/qa'):
return ['asdf' ]
return realListDir(path)
def mockExists(path):
if path.endswith('CONARY'):
return True
if path.startswith('/PROD'):
return True
return realExists(path)
self.mock(os, 'listdir', lambda *args: mockListDir(*args))
self.mock(os.path, 'exists', lambda *args: mockExists(*args))
productStore = mock.MockInstance(dirstore.CheckoutProductStore)
productStore._mock.enableMethod('getEditedRecipeDicts')
productStore.getRbuildConfigPath._mock.setReturn(
self.workDir + '/rbuildrc')
handle = self.getRbuildHandle(productStore=productStore)
productStore._handle.facade.conary = mock.MockObject()
stateObj = mock.MockObject()
stateObj.getSourceState().getName._mock.setDefaultReturn('asdf:source')
mock.mock(state, 'ConaryStateFromFile')
state.ConaryStateFromFile._mock.setDefaultReturn(stateObj)
productStore._handle.facade.conary.getNameForCheckout._mock.setDefaultReturn('asdf')
productStore._handle.facade.conary.isGroupName._mock.setDefaultReturn(False)
productStore.getActiveStageName._mock.setDefaultReturn(None)
productStore.getStageDirectory._mock.setDefaultReturn('/PROD/qa')
packageDict, groupDict = productStore.getEditedRecipeDicts('qa')
assert packageDict == {'asdf' : '/PROD/qa/asdf/asdf.recipe'}
assert groupDict == {}
productStore.getActiveStageName._mock.setDefaultReturn('qa')
packageDict, groupDict = productStore.getEditedRecipeDicts()
assert packageDict == {'asdf' : '/PROD/qa/asdf/asdf.recipe'}
assert groupDict == {}
productStore._handle.facade.conary.getNameForCheckout._mock.setDefaultReturn('group-asdf')
productStore._handle.facade.conary.isGroupName._mock.setDefaultReturn(True)
stateObj.getSourceState().getName._mock.setDefaultReturn(
'group-asdf:source')
packageDict, groupDict = productStore.getEditedRecipeDicts('qa')
assert packageDict == {}
assert groupDict == {'group-asdf' : '/PROD/qa/asdf/group-asdf.recipe'}
def testStatusStore(self):
productStore = mock.MockInstance(dirstore.CheckoutProductStore)
productStore._mock.set(statusStore=None)
productStore._mock.enableMethod('setStatus')
productStore._mock.enableMethod('getStatus')
productStore._mock.enableMethod('_getStatusStore')
productStore._mock.enableMethod('getPackageJobId')
productStore._mock.enableMethod('getGroupJobId')
productStore._mock.enableMethod('getImageJobIds')
productStore._mock.enableMethod('setPackageJobId')
productStore._mock.enableMethod('setGroupJobId')
productStore._mock.enableMethod('setImageJobIds')
productStore.iterStageNames._mock.setDefaultReturn(['teststage'])
productStore.getActiveStageName._mock.setDefaultReturn('teststage')
productStore._mock.enable('_baseDirectory')
productStore._baseDirectory = self.workDir
assert(productStore.getGroupJobId() is None)
assert(productStore.getImageJobIds() == [])
assert(productStore.getPackageJobId() is None)
productStore.setGroupJobId(10)
assert(productStore.getGroupJobId() is 10)
productStore.setImageJobIds(15)
assert(productStore.getImageJobIds() == [15])
productStore.setPackageJobId(20)
assert(productStore.getGroupJobId() is 10)
assert(productStore.getImageJobIds() == [15])
assert(productStore.getPackageJobId() is 20)
# key 'foo' is not defined
self.assertRaises(KeyError, productStore.setStatus, 'foo', 'asdf')
def testCheckoutPlatform(self):
productStore = mock.MockInstance(dirstore.CheckoutProductStore)
productStore._handle.product.getProductDefinitionLabel._mock.setDefaultReturn('localhost@rpl:2')
productStore.getPlatformDefinitionDirectory._mock.setDefaultReturn(self.workDir)
productStore._mock.enableMethod('checkoutPlatform')
productStore.checkoutPlatform()
productStore._handle.facade.conary.checkout._mock.assertCalled(
'platform-definition',
'localhost@rpl:2',
targetDir=self.workDir)
def testGetPlatformAutoLoadRecipes(self):
pd = proddef.ProductDefinition()
handle = conaryfacadetest.MockHandle()
handle.product = pd
handle.facade = mock.MockObject()
handle.getConfig().user = ('JeanValjean', 'password')
productStore = mock.MockInstance(dirstore.CheckoutProductStore)
productStore._mock.enableMethod('getPlatformAutoLoadRecipes')
productStore._mock.set(_handle=handle)
facade = conaryfacade.ConaryFacade(handle)
repos = conaryfacadetest.MockRepositoryClient()
self.mock(facade, '_getRepositoryClient', lambda: repos)
handle.facade._mock.set(conary=facade)
alr = productStore.getPlatformAutoLoadRecipes()
self.assertEquals(alr, [])
alRecipes = ['foo', 'bar']
for troveName in alRecipes:
pd.addPlatformAutoLoadRecipe(troveName)
pkg1 = self.makeTroveTuple('foo=/foo.rpath.com@foo:2/2-2-2')
groupTup1 = self.makeTroveTuple('group-foo=/foo.rpath.com@foo:1/1-1-1')
groupTrv1 = mock.MockObject(stableReturnValues=True)
groupTrv1.iterTroveList._mock.setDefaultReturn([pkg1])
self.mock(repos, 'getTroves', lambda *args, **kwargs: [ groupTrv1 ])
pd.addSearchPath(groupTup1[0], label=str(groupTup1[1].branch()),
version=str(groupTup1[1].trailingRevision()))
alr = productStore.getPlatformAutoLoadRecipes()
self.assertEquals(alr, ['foo=/foo.rpath.com@foo:2/2-2-2'])
def testGetStageDirectory(self):
productStore = mock.MockInstance(dirstore.CheckoutProductStore)
productStore._mock.enableMethod('getStageDirectory')
mock.mock(os.path, 'exists')
os.path.exists._mock.setReturn(False, self.workDir + '/foo')
productStore._mock.set(_baseDirectory=self.workDir)
err = self.assertRaises(errors.RbuildError,
productStore.getStageDirectory, 'foo')
assert(str(err) == "Stage directory for 'foo' does not exist")
os.path.exists._mock.setReturn(True, self.workDir + '/foo')
workDir = productStore.getStageDirectory('foo')
assert(workDir == self.workDir + '/foo')
productStore.getActiveStageName._mock.setReturn(None)
workDir = productStore.getStageDirectory()
assert(workDir is None)
productStore.getActiveStageName._mock.assertCalled()
def testGetCheckoutDirectory(self):
productStore = mock.MockInstance(dirstore.CheckoutProductStore)
productStore._mock.enableMethod('getCheckoutDirectory')
productStore.getStageDirectory._mock.setDefaultReturn('/PROD////qa')
self.assertEquals(productStore.getCheckoutDirectory('foo'),
'/PROD/qa/foo')
def testGetPackagePath(self):
realListDir = os.listdir
realExists = os.path.exists
def mockListDir(path):
if path.endswith('/qa'):
return ['asdf' ]
return realListDir(path)
def mockExists(path):
if path.endswith('CONARY'):
return True
if path.startswith('/PROD'):
return True
return realExists(path)
self.mock(os, 'listdir', lambda *args: mockListDir(*args))
self.mock(os.path, 'exists', lambda *args: mockExists(*args))
productStore = mock.MockInstance(dirstore.CheckoutProductStore)
productStore._mock.enableMethod('getPackagePath')
productStore.getStageDirectory._mock.setDefaultReturn('/PROD/qa')
handle = self.getRbuildHandle(productStore=productStore)
productStore._handle.facade.conary = mock.MockObject()
stateObj = mock.MockObject()
stateObj.getSourceState().getName._mock.setDefaultReturn('asdf:source')
mock.mock(state, 'ConaryStateFromFile')
state.ConaryStateFromFile._mock.setDefaultReturn(stateObj)
productStore._handle.facade.conary.getNameForCheckout._mock.setDefaultReturn('asdf')
productStore._handle.facade.conary.isGroupName._mock.setDefaultReturn(False)
packagePath = productStore.getPackagePath('asdf')
assert(packagePath == '/PROD/qa/asdf')
packagePath = productStore.getPackagePath('blah')
assert(packagePath is None)
def testGetConfigData(self):
productStore = mock.MockInstance(dirstore.CheckoutProductStore)
productStore._mock.enable('_baseDirectory')
productStore._baseDirectory = self.workDir
productStore._mock.enableMethod('getRbuildConfigData')
productStore._mock.enableMethod('getRbuildConfigPath')
productStore._mock.enableMethod('getRmakeConfigData')
productStore._mock.enableMethod('getRmakeConfigPath')
productStore.getProductDefinitionDirectory._mock.setDefaultReturn(
self.workDir)
os.chdir(self.workDir)
util.mkdirChain('.rbuild')
self.writeFile('.rbuild/rbuildrc', 'rbuildrcContents\n')
self.writeFile('rmakerc', 'rmakercContents\n')
self.assertEquals(productStore.getRbuildConfigData(),
'rbuildrcContents\n')
self.assertEquals(productStore.getRmakeConfigData(),
'rmakercContents\n')
def testGetProductVersion(self):
productStore = mock.MockInstance(dirstore.CheckoutProductStore)
productStore._mock.enableMethod('getProductVersion')
product = mock.MockObject()
product.getProductVersion._mock.setDefaultReturn('42.42')
productStore.getProduct._mock.setDefaultReturn(product)
self.assertEquals(productStore.getProductVersion(), '42.42')
def testSaveProduct(self):
self._prepProductStore()
os.chdir('foo/stable')
handle = self.getRbuildHandle(productStore=mock.MockObject())
productStore = dirstore.CheckoutProductStore(handle)
mock.mockMethod(productStore._getSourceTroveVersion,
returnValue='cny.tv@ns:1/2-3')
prodDef = productStore.getProduct()
self.assertEqual(prodDef.getProductDescription(), 'More foo')
# Update the product definition, and make sure save will persist it
prodDef.setProductDescription("Even more foo")
productStore.save(prodDef)
prodDef = productStore.getProduct()
self.assertEqual(prodDef.getProductDescription(), 'Even more foo')
|
bpsinc-native/src_tools_gyp
|
refs/heads/master
|
test/win/gyptest-cl-pdbname.py
|
239
|
#!/usr/bin/env python
# Copyright (c) 2013 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Make sure pdb is named as expected (shared between .cc files).
"""
import TestGyp
import sys
if sys.platform == 'win32':
test = TestGyp.TestGyp(formats=['ninja'])
CHDIR = 'compiler-flags'
test.run_gyp('pdbname.gyp', chdir=CHDIR)
test.build('pdbname.gyp', test.ALL, chdir=CHDIR)
# Confirm that the default behaviour is to name the .pdb per-target (rather
# than per .cc file).
test.built_file_must_exist('obj/test_pdbname.cc.pdb', chdir=CHDIR)
# Confirm that there should be a .pdb alongside the executable.
test.built_file_must_exist('test_pdbname.exe', chdir=CHDIR)
test.built_file_must_exist('test_pdbname.exe.pdb', chdir=CHDIR)
test.pass_test()
|
brianlions/python-nebula
|
refs/heads/master
|
nebula/log.py
|
1
|
#!/usr/bin/env python3
#
# Copyright (c) 2012 Brian Yi ZHANG <brianlions at gmail dot com>
#
# This file is part of pynebula.
#
# pynebula is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# pynebula is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with pynebula. If not, see <http://www.gnu.org/licenses/>.
#
import time
import traceback
import os
import sys
class Logger(object):
'''
'''
EMERG, ALERT, CRIT, ERR, WARNING, NOTICE, INFO, DEBUG = range(0, 8)
LOG_LEVELS = frozenset((EMERG, ALERT, CRIT, ERR, WARNING, NOTICE, INFO, DEBUG))
__level_names = {
EMERG: ('eme', 'emerg'),
ALERT: ('ale', 'alert'),
CRIT: ('cri', 'crit'),
ERR: ('err', 'err'),
WARNING: ('war', 'warning'),
NOTICE: ('not', 'notice'),
INFO: ('inf', 'info'),
DEBUG: ('deb', 'debug'),
}
@classmethod
def log_mask(cls, level):
'''Returns log mask for the specified log level.
Args:
level: one of the constants in Logger.LOG_LEVELS.
Returns:
An integer which can be passed to set_log_mask() etc.
'''
if level not in cls.__level_names:
raise ValueError("invalid log level: {:d}".format(level))
return (1 << level)
@classmethod
def mask_upto(cls, level):
'''Returns log mask for all levels through level.
Args:
level: one of the constants in Logger.LOG_LEVELS.
Returns:
An integer which can be passed to set_log_mask() etc.
'''
if level not in cls.__level_names:
raise ValueError("invalid log level: {:d}".format(level))
return (1 << (level + 1)) - 1
@classmethod
def level_name(cls, level, abbr = False):
'''Returns name of the specified log level.
Args:
level: one of the constants in Logger.LOG_LEVELS.
abbr: whether to use the abbreviated name or not.
Returns:
Human-readable string representation of the log level.'''
if level not in cls.__level_names:
raise ValueError("invalid log level: {:d}".format(level))
return cls.__level_names[level][(not abbr) and 1 or 0]
@classmethod
def timestamp_str(cls, now = None, use_gmtime = False, show_timezone = False):
'''Format and return current date and time.
Args:
now: seconds (as float) since the unix epoch, use current
time stamp if value is false.
use_gmtime: whether to use GMT time or not.
show_timezone: whether to display the time zone or not.
Returns:
String representation of date & time, the format of the returned
value is "YYYY.mm.dd-HH:MM:SS.ssssss-ZZZ".
'''
if not now:
now = time.time()
if show_timezone:
tz_format = use_gmtime and '-GMT' or '-%Z'
else:
tz_format = ''
return time.strftime('%Y.%m.%d-%H:%M:%S' + ('.%06d' % ((now - int(now)) * 1000000)) + tz_format,
use_gmtime and time.gmtime(now) or time.localtime(now))
def __init__(self, log_mask = None, use_gmtime = False, show_timezone = True):
self.__log_mask = log_mask and log_mask or self.mask_upto(self.INFO)
self.__use_gmtime = use_gmtime and True or False
self.__show_timezone = show_timezone and True or False
def set_log_mask(self, new_mask):
'''Set log mask, and return previous log mask.
Args:
new_mask: the new log mask to be set to.
Returns:
Previous log mask (as integer).
'''
if new_mask < self.mask_upto(self.EMERG) or new_mask > self.mask_upto(self.DEBUG):
raise ValueError("invalid log mask: {:d}".format(new_mask))
old_mask = self.__log_mask
self.__log_mask = new_mask
return old_mask
def set_max_level(self, max_level):
'''Log all messages through max_level.
Args:
max_level: one of the constants in Logger.LOG_LEVELS.
Returns:
Previous log mask (as integer).
'''
return self.set_log_mask(Logger.mask_upto(max_level))
def is_use_gmtime(self):
'''Whether we are using GMT time representation of not.
Returns:
True if using GMT, False otherwise.
'''
return self.__use_gmtime
def is_show_timezone(self):
'''Whether we are printing the time zone of not.
Returns:
True if printing time zone, False otherwise.
'''
return self.__show_timezone
def log(self, level, msg, use_gmtime = None, show_timezone = None,
stack_limit = 2):
'''Generate one log message.
Args:
level: level of the message
msg: string message to be logged
use_gmtime: whether to use GMT or not, if value is None, use the
value passed to __init__()
show_timezone: whether to log time zone or not, if value is None, use
the value passed to __init__()
stack_limit: passed to traceback.extract_stack(), in order to get
the correct file name, line number, and method name.
Returns:
True if the message was logged, False otherwise.
'''
if self.log_mask(level) & self.__log_mask:
file_name, line_num, func_name = traceback.extract_stack(limit = stack_limit)[0][:3]
# remove current working directory if it is prefix of the file name
cwd = os.getcwd() + os.path.sep
if file_name.startswith(cwd):
file_name = '.' + os.path.sep + file_name[len(cwd):]
if use_gmtime is None:
use_gmtime = self.is_use_gmtime()
if show_timezone is None:
show_timezone = self.is_show_timezone()
self.output_message(level, msg, file_name, line_num, func_name,
use_gmtime = use_gmtime,
show_timezone = show_timezone)
return True
else:
return False
def debug(self, msg, stack_limit = 3):
return self.log(self.DEBUG, msg, use_gmtime = self.__use_gmtime,
show_timezone = self.__show_timezone,
stack_limit = stack_limit)
def info(self, msg, stack_limit = 3):
return self.log(self.INFO, msg, use_gmtime = self.__use_gmtime,
show_timezone = self.__show_timezone,
stack_limit = stack_limit)
def notice(self, msg, stack_limit = 3):
return self.log(self.NOTICE, msg, use_gmtime = self.__use_gmtime,
show_timezone = self.__show_timezone,
stack_limit = stack_limit)
def warning(self, msg, stack_limit = 3):
return self.log(self.WARNING, msg, use_gmtime = self.__use_gmtime,
show_timezone = self.__show_timezone,
stack_limit = stack_limit)
def err(self, msg, stack_limit = 3):
return self.log(self.ERR, msg, use_gmtime = self.__use_gmtime,
show_timezone = self.__show_timezone,
stack_limit = stack_limit)
def crit(self, msg, stack_limit = 3):
return self.log(self.CRIT, msg, use_gmtime = self.__use_gmtime,
show_timezone = self.__show_timezone,
stack_limit = stack_limit)
def alert(self, msg, stack_limit = 3):
return self.log(self.ALERT, msg, use_gmtime = self.__use_gmtime,
show_timezone = self.__show_timezone,
stack_limit = stack_limit)
def emerg(self, msg, stack_limit = 3):
return self.log(self.EMERG, msg, use_gmtime = self.__use_gmtime,
show_timezone = self.__show_timezone,
stack_limit = stack_limit)
def output_message(self, level, msg, file_name, line_num, func_name,
use_gmtime = None, show_timezone = None):
'''Method subclass MUST implement.
Args:
level: (int) level of the message
msg: (str) message to be logged
file_name: (str) in which file the message was generated
line_num: (int) at which line the message was generated
func_name: (str) in which method (or function) the message was
generated
use_gmtime: (bool) whether to use GMT or not
show_timezone: (bool) whether to log the time zone or not
Returns:
(not required)
'''
raise NotImplementedError("{:s}.{:s}: output_message() not implemented".format(self.__class__.__module__,
self.__class__.__name__))
#-------------------------------------------------------------------------------
class ConsoleLogger(Logger):
'''Logger which log messages to console (stdout).'''
def __init__(self, *args, **kwargs):
super(ConsoleLogger, self).__init__(*args, **kwargs)
def output_message(self, level, msg, file_name, line_num, func_name,
use_gmtime = None, show_timezone = None):
'''Implements the abstract method defined in parent class.'''
if use_gmtime is None:
use_gmtime = self.is_use_gmtime()
if show_timezone is None:
show_timezone = self.is_show_timezone()
# time, log level, file name, line number, method name, log message
print("[{:s} {:s} {:s}:{:d}:{:s}] {:s}".format(self.timestamp_str(use_gmtime, show_timezone),
self.level_name(level, abbr = True),
file_name, line_num, func_name, msg))
sys.stdout.flush()
#-------------------------------------------------------------------------------
class WrappedLogger(object):
def __init__(self, log_handle = None):
self.__log_handle = None
self.set_log_handle(log_handle)
def set_log_handle(self, log_handle):
'''Set new log handle to be used.
Args:
log_handle: new log handle to be used
Returns:
Previous log handle, value might be None.
'''
if (log_handle is not None) and (not isinstance(log_handle, Logger)):
raise TypeError("log_handle {:s} is not an instance of {:s}.Logger".format(repr(log_handle),
self.__class__.__module__))
prev_handle = self.__log_handle
self.__log_handle = log_handle
return prev_handle
def get_log_handle(self):
'''Get current log handle current in use.
Returns:
Current log handle in use, value might be None.
'''
return self.__log_handle
def log_debug(self, msg):
if self.__log_handle:
self.__log_handle.debug(msg, stack_limit = 4)
def log_info(self, msg):
if self.__log_handle:
self.__log_handle.info(msg, stack_limit = 4)
def log_notice(self, msg):
if self.__log_handle:
self.__log_handle.notice(msg, stack_limit = 4)
def log_warning(self, msg):
if self.__log_handle:
self.__log_handle.warning(msg, stack_limit = 4)
def log_err(self, msg):
if self.__log_handle:
self.__log_handle.err(msg, stack_limit = 4)
def log_crit(self, msg):
if self.__log_handle:
self.__log_handle.crit(msg, stack_limit = 4)
def log_alert(self, msg):
if self.__log_handle:
self.__log_handle.alert(msg, stack_limit = 4)
def log_emerg(self, msg):
if self.__log_handle:
self.__log_handle.emerg(msg, stack_limit = 4)
#-------------------------------------------------------------------------------
def demo():
logger = ConsoleLogger(show_timezone = True)
for max_level in (Logger.DEBUG, Logger.INFO, Logger.NOTICE, Logger.WARNING, Logger.ERR):
print("max log level: %s" % Logger.level_name(max_level))
logger.set_log_mask(Logger.mask_upto(max_level))
for level in (Logger.DEBUG, Logger.INFO, Logger.NOTICE, Logger.WARNING, Logger.ERR):
logger.log(level, "message level %s" % Logger.level_name(level, abbr = False))
print()
print("max log level: %s" % Logger.level_name(Logger.DEBUG))
logger.set_log_mask(Logger.mask_upto(logger.DEBUG))
logger.debug("debug()")
logger.info("info()")
logger.notice("notice()")
logger.warning("wanring()")
logger.err("err()")
if __name__ == '__main__':
demo()
|
retooth/morse
|
refs/heads/master
|
morse/slots/search.py
|
1
|
#!/usr/bin/python
# This file is part of Morse.
#
# Morse is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Morse is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Morse. If not, see <http://www.gnu.org/licenses/>.
from . import app
from flask import request, jsonify
from ..models.core import User
from ..protocols import ajax_triggered
@app.route('/search/users.json', methods=['GET'])
@ajax_triggered
def get_users ():
"""
Gets a list of users matching GET
parameter pattern.
:rtype: json
"""
pattern = request.args.get('pattern')
if pattern:
users = User.query.filter(User.username.ilike('%' + pattern + '%')).all()
else:
users = User.query.all()
userlist = []
for u in users:
userlist.append([u.id, u.username])
return jsonify(users = userlist)
|
anditto/bitcoin
|
refs/heads/master
|
test/functional/wallet_labels.py
|
5
|
#!/usr/bin/env python3
# Copyright (c) 2016-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test label RPCs.
RPCs tested are:
- getaddressesbylabel
- listaddressgroupings
- setlabel
"""
from collections import defaultdict
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal, assert_raises_rpc_error
from test_framework.wallet_util import test_address
class WalletLabelsTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def run_test(self):
# Check that there's no UTXO on the node
node = self.nodes[0]
assert_equal(len(node.listunspent()), 0)
# Note each time we call generate, all generated coins go into
# the same address, so we call twice to get two addresses w/50 each
node.generatetoaddress(nblocks=1, address=node.getnewaddress(label='coinbase'))
node.generatetoaddress(nblocks=101, address=node.getnewaddress(label='coinbase'))
assert_equal(node.getbalance(), 100)
# there should be 2 address groups
# each with 1 address with a balance of 50 Bitcoins
address_groups = node.listaddressgroupings()
assert_equal(len(address_groups), 2)
# the addresses aren't linked now, but will be after we send to the
# common address
linked_addresses = set()
for address_group in address_groups:
assert_equal(len(address_group), 1)
assert_equal(len(address_group[0]), 3)
assert_equal(address_group[0][1], 50)
assert_equal(address_group[0][2], 'coinbase')
linked_addresses.add(address_group[0][0])
# send 50 from each address to a third address not in this wallet
common_address = "msf4WtN1YQKXvNtvdFYt9JBnUD2FB41kjr"
node.sendmany(
amounts={common_address: 100},
subtractfeefrom=[common_address],
minconf=1,
)
# there should be 1 address group, with the previously
# unlinked addresses now linked (they both have 0 balance)
address_groups = node.listaddressgroupings()
assert_equal(len(address_groups), 1)
assert_equal(len(address_groups[0]), 2)
assert_equal(set([a[0] for a in address_groups[0]]), linked_addresses)
assert_equal([a[1] for a in address_groups[0]], [0, 0])
node.generate(1)
# we want to reset so that the "" label has what's expected.
# otherwise we're off by exactly the fee amount as that's mined
# and matures in the next 100 blocks
amount_to_send = 1.0
# Create labels and make sure subsequent label API calls
# recognize the label/address associations.
labels = [Label(name) for name in ("a", "b", "c", "d", "e")]
for label in labels:
address = node.getnewaddress(label.name)
label.add_receive_address(address)
label.verify(node)
# Check all labels are returned by listlabels.
assert_equal(node.listlabels(), sorted(['coinbase'] + [label.name for label in labels]))
# Send a transaction to each label.
for label in labels:
node.sendtoaddress(label.addresses[0], amount_to_send)
label.verify(node)
# Check the amounts received.
node.generate(1)
for label in labels:
assert_equal(
node.getreceivedbyaddress(label.addresses[0]), amount_to_send)
assert_equal(node.getreceivedbylabel(label.name), amount_to_send)
for i, label in enumerate(labels):
to_label = labels[(i + 1) % len(labels)]
node.sendtoaddress(to_label.addresses[0], amount_to_send)
node.generate(1)
for label in labels:
address = node.getnewaddress(label.name)
label.add_receive_address(address)
label.verify(node)
assert_equal(node.getreceivedbylabel(label.name), 2)
label.verify(node)
node.generate(101)
# Check that setlabel can assign a label to a new unused address.
for label in labels:
address = node.getnewaddress()
node.setlabel(address, label.name)
label.add_address(address)
label.verify(node)
assert_raises_rpc_error(-11, "No addresses with label", node.getaddressesbylabel, "")
# Check that addmultisigaddress can assign labels.
if not self.options.descriptors:
for label in labels:
addresses = []
for _ in range(10):
addresses.append(node.getnewaddress())
multisig_address = node.addmultisigaddress(5, addresses, label.name)['address']
label.add_address(multisig_address)
label.purpose[multisig_address] = "send"
label.verify(node)
node.generate(101)
# Check that setlabel can change the label of an address from a
# different label.
change_label(node, labels[0].addresses[0], labels[0], labels[1])
# Check that setlabel can set the label of an address already
# in the label. This is a no-op.
change_label(node, labels[2].addresses[0], labels[2], labels[2])
self.log.info('Check watchonly labels')
node.createwallet(wallet_name='watch_only', disable_private_keys=True)
wallet_watch_only = node.get_wallet_rpc('watch_only')
BECH32_VALID = {
'✔️_VER15_PROG40': 'bcrt10qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqxkg7fn',
'✔️_VER16_PROG03': 'bcrt1sqqqqq8uhdgr',
'✔️_VER16_PROB02': 'bcrt1sqqqq4wstyw',
}
BECH32_INVALID = {
'❌_VER15_PROG41': 'bcrt1sqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqajlxj8',
'❌_VER16_PROB01': 'bcrt1sqq5r4036',
}
for l in BECH32_VALID:
ad = BECH32_VALID[l]
wallet_watch_only.importaddress(label=l, rescan=False, address=ad)
node.generatetoaddress(1, ad)
assert_equal(wallet_watch_only.getaddressesbylabel(label=l), {ad: {'purpose': 'receive'}})
assert_equal(wallet_watch_only.getreceivedbylabel(label=l), 0)
for l in BECH32_INVALID:
ad = BECH32_INVALID[l]
assert_raises_rpc_error(
-5,
"Address is not valid" if self.options.descriptors else "Invalid Bitcoin address or script",
lambda: wallet_watch_only.importaddress(label=l, rescan=False, address=ad),
)
class Label:
def __init__(self, name):
# Label name
self.name = name
# Current receiving address associated with this label.
self.receive_address = None
# List of all addresses assigned with this label
self.addresses = []
# Map of address to address purpose
self.purpose = defaultdict(lambda: "receive")
def add_address(self, address):
assert_equal(address not in self.addresses, True)
self.addresses.append(address)
def add_receive_address(self, address):
self.add_address(address)
def verify(self, node):
if self.receive_address is not None:
assert self.receive_address in self.addresses
for address in self.addresses:
test_address(node, address, labels=[self.name])
assert self.name in node.listlabels()
assert_equal(
node.getaddressesbylabel(self.name),
{address: {"purpose": self.purpose[address]} for address in self.addresses})
def change_label(node, address, old_label, new_label):
assert_equal(address in old_label.addresses, True)
node.setlabel(address, new_label.name)
old_label.addresses.remove(address)
new_label.add_address(address)
old_label.verify(node)
new_label.verify(node)
if __name__ == '__main__':
WalletLabelsTest().main()
|
jasonbot/django
|
refs/heads/master
|
tests/migrations/test_migrations_squashed_complex/7_auto.py
|
770
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [("migrations", "6_auto")]
operations = [
migrations.RunPython(migrations.RunPython.noop)
]
|
guewen/OpenUpgrade
|
refs/heads/master
|
addons/product_expiry/__openerp__.py
|
61
|
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name' : 'Products Expiry Date',
'version' : '1.0',
'author' : 'OpenERP SA',
'category' : 'Specific Industry Applications',
'depends' : ['stock'],
'demo' : ['product_expiry_demo.xml'],
'description': """
Track different dates on products and production lots.
======================================================
Following dates can be tracked:
-------------------------------
- end of life
- best before date
- removal date
- alert date
Also implements the removal strategy First Expiry First Out (FEFO) widely used, for example, in food industries.
""",
'data' : ['product_expiry_view.xml', 'product_expiry_data.xml'],
'auto_install': False,
'installable': True,
'images': ['images/production_lots_dates.jpeg','images/products_dates.jpeg'],
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
refuge-attic/bigcouch
|
refs/heads/master
|
couchjs/scons/scons-local-2.0.1/SCons/Tool/MSCommon/vc.py
|
53
|
#
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
# TODO:
# * supported arch for versions: for old versions of batch file without
# argument, giving bogus argument cannot be detected, so we have to hardcode
# this here
# * print warning when msvc version specified but not found
# * find out why warning do not print
# * test on 64 bits XP + VS 2005 (and VS 6 if possible)
# * SDK
# * Assembly
__revision__ = "src/engine/SCons/Tool/MSCommon/vc.py 5134 2010/08/16 23:02:40 bdeegan"
__doc__ = """Module for Visual C/C++ detection and configuration.
"""
import SCons.compat
import os
import platform
from string import digits as string_digits
import SCons.Warnings
import common
debug = common.debug
import sdk
get_installed_sdks = sdk.get_installed_sdks
class VisualCException(Exception):
pass
class UnsupportedVersion(VisualCException):
pass
class UnsupportedArch(VisualCException):
pass
class MissingConfiguration(VisualCException):
pass
class NoVersionFound(VisualCException):
pass
class BatchFileExecutionError(VisualCException):
pass
# Dict to 'canonalize' the arch
_ARCH_TO_CANONICAL = {
"amd64" : "amd64",
"emt64" : "amd64",
"i386" : "x86",
"i486" : "x86",
"i586" : "x86",
"i686" : "x86",
"ia64" : "ia64",
"itanium" : "ia64",
"x86" : "x86",
"x86_64" : "amd64",
}
# Given a (host, target) tuple, return the argument for the bat file. Both host
# and targets should be canonalized.
_HOST_TARGET_ARCH_TO_BAT_ARCH = {
("x86", "x86"): "x86",
("x86", "amd64"): "x86_amd64",
("amd64", "amd64"): "amd64",
("amd64", "x86"): "x86",
("x86", "ia64"): "x86_ia64"
}
def get_host_target(env):
debug('vc.py:get_host_target()')
host_platform = env.get('HOST_ARCH')
if not host_platform:
host_platform = platform.machine()
# TODO(2.5): the native Python platform.machine() function returns
# '' on all Python versions before 2.6, after which it also uses
# PROCESSOR_ARCHITECTURE.
if not host_platform:
host_platform = os.environ.get('PROCESSOR_ARCHITECTURE', '')
# Retain user requested TARGET_ARCH
req_target_platform = env.get('TARGET_ARCH')
debug('vc.py:get_host_target() req_target_platform:%s'%req_target_platform)
if req_target_platform:
# If user requested a specific platform then only try that one.
target_platform = req_target_platform
else:
target_platform = host_platform
try:
host = _ARCH_TO_CANONICAL[host_platform.lower()]
except KeyError, e:
msg = "Unrecognized host architecture %s"
raise ValueError(msg % repr(host_platform))
try:
target = _ARCH_TO_CANONICAL[target_platform.lower()]
except KeyError, e:
raise ValueError("Unrecognized target architecture %s" % target_platform)
return (host, target,req_target_platform)
_VCVER = ["10.0", "9.0", "9.0Exp","8.0", "8.0Exp","7.1", "7.0", "6.0"]
_VCVER_TO_PRODUCT_DIR = {
'10.0': [
r'Microsoft\VisualStudio\10.0\Setup\VC\ProductDir'],
'9.0': [
r'Microsoft\VisualStudio\9.0\Setup\VC\ProductDir'],
'9.0Exp' : [
r'Microsoft\VCExpress\9.0\Setup\VC\ProductDir'],
'8.0': [
r'Microsoft\VisualStudio\8.0\Setup\VC\ProductDir'],
'8.0Exp': [
r'Microsoft\VCExpress\8.0\Setup\VC\ProductDir'],
'7.1': [
r'Microsoft\VisualStudio\7.1\Setup\VC\ProductDir'],
'7.0': [
r'Microsoft\VisualStudio\7.0\Setup\VC\ProductDir'],
'6.0': [
r'Microsoft\VisualStudio\6.0\Setup\Microsoft Visual C++\ProductDir']
}
def msvc_version_to_maj_min(msvc_version):
msvc_version_numeric = ''.join([x for x in msvc_version if x in string_digits + '.'])
t = msvc_version_numeric.split(".")
if not len(t) == 2:
raise ValueError("Unrecognized version %s (%s)" % (msvc_version,msvc_version_numeric))
try:
maj = int(t[0])
min = int(t[1])
return maj, min
except ValueError, e:
raise ValueError("Unrecognized version %s (%s)" % (msvc_version,msvc_version_numeric))
def is_host_target_supported(host_target, msvc_version):
"""Return True if the given (host, target) tuple is supported given the
msvc version.
Parameters
----------
host_target: tuple
tuple of (canonalized) host-target, e.g. ("x86", "amd64") for cross
compilation from 32 bits windows to 64 bits.
msvc_version: str
msvc version (major.minor, e.g. 10.0)
Note
----
This only check whether a given version *may* support the given (host,
target), not that the toolchain is actually present on the machine.
"""
# We assume that any Visual Studio version supports x86 as a target
if host_target[1] != "x86":
maj, min = msvc_version_to_maj_min(msvc_version)
if maj < 8:
return False
return True
def find_vc_pdir(msvc_version):
"""Try to find the product directory for the given
version.
Note
----
If for some reason the requested version could not be found, an
exception which inherits from VisualCException will be raised."""
root = 'Software\\'
if common.is_win64():
root = root + 'Wow6432Node\\'
try:
hkeys = _VCVER_TO_PRODUCT_DIR[msvc_version]
except KeyError:
debug("Unknown version of MSVC: %s" % msvc_version)
raise UnsupportedVersion("Unknown version %s" % msvc_version)
for key in hkeys:
key = root + key
try:
comps = common.read_reg(key)
except WindowsError, e:
debug('find_vc_dir(): no VC registry key %s' % repr(key))
else:
debug('find_vc_dir(): found VC in registry: %s' % comps)
if os.path.exists(comps):
return comps
else:
debug('find_vc_dir(): reg says dir is %s, but it does not exist. (ignoring)'\
% comps)
raise MissingConfiguration("registry dir %s not found on the filesystem" % comps)
return None
def find_batch_file(env,msvc_version,host_arch,target_arch):
"""
Find the location of the batch script which should set up the compiler
for any TARGET_ARCH whose compilers were installed by Visual Studio/VCExpress
"""
pdir = find_vc_pdir(msvc_version)
if pdir is None:
raise NoVersionFound("No version of Visual Studio found")
debug('vc.py: find_batch_file() pdir:%s'%pdir)
# filter out e.g. "Exp" from the version name
msvc_ver_numeric = ''.join([x for x in msvc_version if x in string_digits + "."])
vernum = float(msvc_ver_numeric)
if 7 <= vernum < 8:
pdir = os.path.join(pdir, os.pardir, "Common7", "Tools")
batfilename = os.path.join(pdir, "vsvars32.bat")
elif vernum < 7:
pdir = os.path.join(pdir, "Bin")
batfilename = os.path.join(pdir, "vcvars32.bat")
else: # >= 8
batfilename = os.path.join(pdir, "vcvarsall.bat")
if not os.path.exists(batfilename):
debug("Not found: %s" % batfilename)
batfilename = None
installed_sdks=get_installed_sdks()
for _sdk in installed_sdks:
sdk_bat_file=_sdk.get_sdk_vc_script(host_arch,target_arch)
sdk_bat_file_path=os.path.join(pdir,sdk_bat_file)
debug('vc.py:find_batch_file() sdk_bat_file_path:%s'%sdk_bat_file_path)
if os.path.exists(sdk_bat_file_path):
return (batfilename,sdk_bat_file_path)
else:
debug("vc.py:find_batch_file() not found:%s"%sdk_bat_file_path)
else:
return (batfilename,None)
__INSTALLED_VCS_RUN = None
def cached_get_installed_vcs():
global __INSTALLED_VCS_RUN
if __INSTALLED_VCS_RUN is None:
ret = get_installed_vcs()
__INSTALLED_VCS_RUN = ret
return __INSTALLED_VCS_RUN
def get_installed_vcs():
installed_versions = []
for ver in _VCVER:
debug('trying to find VC %s' % ver)
try:
if find_vc_pdir(ver):
debug('found VC %s' % ver)
installed_versions.append(ver)
else:
debug('find_vc_pdir return None for ver %s' % ver)
except VisualCException, e:
debug('did not find VC %s: caught exception %s' % (ver, str(e)))
return installed_versions
def reset_installed_vcs():
"""Make it try again to find VC. This is just for the tests."""
__INSTALLED_VCS_RUN = None
def script_env(script, args=None):
stdout = common.get_output(script, args)
# Stupid batch files do not set return code: we take a look at the
# beginning of the output for an error message instead
olines = stdout.splitlines()
if olines[0].startswith("The specified configuration type is missing"):
raise BatchFileExecutionError("\n".join(olines[:2]))
return common.parse_output(stdout)
def get_default_version(env):
debug('get_default_version()')
msvc_version = env.get('MSVC_VERSION')
msvs_version = env.get('MSVS_VERSION')
debug('get_default_version(): msvc_version:%s msvs_version:%s'%(msvc_version,msvs_version))
if msvs_version and not msvc_version:
SCons.Warnings.warn(
SCons.Warnings.DeprecatedWarning,
"MSVS_VERSION is deprecated: please use MSVC_VERSION instead ")
return msvs_version
elif msvc_version and msvs_version:
if not msvc_version == msvs_version:
SCons.Warnings.warn(
SCons.Warnings.VisualVersionMismatch,
"Requested msvc version (%s) and msvs version (%s) do " \
"not match: please use MSVC_VERSION only to request a " \
"visual studio version, MSVS_VERSION is deprecated" \
% (msvc_version, msvs_version))
return msvs_version
if not msvc_version:
installed_vcs = cached_get_installed_vcs()
debug('installed_vcs:%s' % installed_vcs)
if not installed_vcs:
msg = 'No installed VCs'
debug('msv %s\n' % repr(msg))
SCons.Warnings.warn(SCons.Warnings.VisualCMissingWarning, msg)
return None
msvc_version = installed_vcs[0]
debug('msvc_setup_env: using default installed MSVC version %s\n' % repr(msvc_version))
return msvc_version
def msvc_setup_env_once(env):
try:
has_run = env["MSVC_SETUP_RUN"]
except KeyError:
has_run = False
if not has_run:
msvc_setup_env(env)
env["MSVC_SETUP_RUN"] = True
def msvc_find_valid_batch_script(env,version):
debug('vc.py:msvc_find_valid_batch_script()')
# Find the host platform, target platform, and if present the requested
# target platform
(host_platform, target_platform,req_target_platform) = get_host_target(env)
# If the user hasn't specifically requested a TARGET_ARCH, and
# The TARGET_ARCH is amd64 then also try 32 bits if there are no viable
# 64 bit tools installed
try_target_archs = [target_platform]
if not req_target_platform and target_platform=='amd64':
try_target_archs.append('x86')
d = None
for tp in try_target_archs:
# Set to current arch.
env['TARGET_ARCH']=tp
debug("vc.py:msvc_find_valid_batch_script() trying target_platform:%s"%tp)
host_target = (host_platform, tp)
if not is_host_target_supported(host_target, version):
warn_msg = "host, target = %s not supported for MSVC version %s" % \
(host_target, version)
SCons.Warnings.warn(SCons.Warnings.VisualCMissingWarning, warn_msg)
arg = _HOST_TARGET_ARCH_TO_BAT_ARCH[host_target]
# Try to locate a batch file for this host/target platform combo
try:
(vc_script,sdk_script) = find_batch_file(env,version,host_platform,tp)
debug('vc.py:msvc_find_valid_batch_script() vc_script:%s sdk_script:%s'%(vc_script,sdk_script))
except VisualCException, e:
msg = str(e)
debug('Caught exception while looking for batch file (%s)' % msg)
warn_msg = "VC version %s not installed. " + \
"C/C++ compilers are most likely not set correctly.\n" + \
" Installed versions are: %s"
warn_msg = warn_msg % (version, cached_get_installed_vcs())
SCons.Warnings.warn(SCons.Warnings.VisualCMissingWarning, warn_msg)
continue
# Try to use the located batch file for this host/target platform combo
debug('vc.py:msvc_find_valid_batch_script() use_script 2 %s, args:%s\n' % (repr(vc_script), arg))
if vc_script:
try:
d = script_env(vc_script, args=arg)
except BatchFileExecutionError, e:
debug('vc.py:msvc_find_valid_batch_script() use_script 3: failed running VC script %s: %s: Error:%s'%(repr(vc_script),arg,e))
vc_script=None
if not vc_script and sdk_script:
debug('vc.py:msvc_find_valid_batch_script() use_script 4: trying sdk script: %s'%(sdk_script))
try:
d = script_env(sdk_script,args=[])
except BatchFileExecutionError,e:
debug('vc.py:msvc_find_valid_batch_script() use_script 5: failed running SDK script %s: Error:%s'%(repr(sdk_script),e))
continue
elif not vc_script and not sdk_script:
debug('vc.py:msvc_find_valid_batch_script() use_script 6: Neither VC script nor SDK script found')
continue
# If we cannot find a viable installed compiler, reset the TARGET_ARCH
# To it's initial value
if not d:
env['TARGET_ARCH']=req_target_platform
return d
def msvc_setup_env(env):
debug('msvc_setup_env()')
version = get_default_version(env)
if version is None:
warn_msg = "No version of Visual Studio compiler found - C/C++ " \
"compilers most likely not set correctly"
SCons.Warnings.warn(SCons.Warnings.VisualCMissingWarning, warn_msg)
return None
debug('msvc_setup_env: using specified MSVC version %s\n' % repr(version))
# XXX: we set-up both MSVS version for backward
# compatibility with the msvs tool
env['MSVC_VERSION'] = version
env['MSVS_VERSION'] = version
env['MSVS'] = {}
use_script = env.get('MSVC_USE_SCRIPT', True)
if SCons.Util.is_String(use_script):
debug('vc.py:msvc_setup_env() use_script 1 %s\n' % repr(use_script))
d = script_env(use_script)
elif use_script:
d = msvc_find_valid_batch_script(env,version)
debug('vc.py:msvc_setup_env() use_script 2 %s\n' % d)
if not d:
return d
else:
debug('MSVC_USE_SCRIPT set to False')
warn_msg = "MSVC_USE_SCRIPT set to False, assuming environment " \
"set correctly."
SCons.Warnings.warn(SCons.Warnings.VisualCMissingWarning, warn_msg)
return None
for k, v in d.items():
debug('vc.py:msvc_setup_env() env:%s -> %s'%(k,v))
env.PrependENVPath(k, v, delete_existing=True)
def msvc_exists(version=None):
vcs = cached_get_installed_vcs()
if version is None:
return len(vcs) > 0
return version in vcs
|
asen6/amartyasenguptadotcom
|
refs/heads/master
|
django/contrib/gis/geos/io.py
|
623
|
"""
Module that holds classes for performing I/O operations on GEOS geometry
objects. Specifically, this has Python implementations of WKB/WKT
reader and writer classes.
"""
from django.contrib.gis.geos.geometry import GEOSGeometry
from django.contrib.gis.geos.prototypes.io import _WKTReader, _WKBReader, WKBWriter, WKTWriter
# Public classes for (WKB|WKT)Reader, which return GEOSGeometry
class WKBReader(_WKBReader):
def read(self, wkb):
"Returns a GEOSGeometry for the given WKB buffer."
return GEOSGeometry(super(WKBReader, self).read(wkb))
class WKTReader(_WKTReader):
def read(self, wkt):
"Returns a GEOSGeometry for the given WKT string."
return GEOSGeometry(super(WKTReader, self).read(wkt))
|
csrg-utfsm/acscb
|
refs/heads/master
|
LGPL/CommonSoftware/acspy/test/test_Acspy_Nc_CDBProperties.py
|
4
|
#! /usr/bin/env python
#******************************************************************************
# ALMA - Atacama Large Millimiter Array
# (c) Associated Universities Inc., 2010
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# "@(#) $Id: test_Acspy_Nc_CDBProperties.py,v 1.2 2010/03/20 22:46:40 agrimstrup Exp $"
#
# who when what
# -------- -------- ----------------------------------------------
# agrimstrup 2010-02-05 created
#
#------------------------------------------------------------------------------
__revision__ = "$Id: test_Acspy_Nc_CDBProperties.py,v 1.2 2010/03/20 22:46:40 agrimstrup Exp $"
#--REGULAR IMPORTS-------------------------------------------------------------
import unittest
import mock
import CORBA
import CosNotification
import Acspy.Nc.CDBProperties as CDBP
class TestBase(unittest.TestCase):
def setUp(self):
CDBP._cdb_access = mock.Mock(spec=CDBP.CDBaccess)
def tearDown(self):
CDBP._cdb_access = None
def set_element_return_value(self, value):
CDBP._cdb_access.getElement.return_value = value
def set_element_exception(self, exception):
def raiser():
raise exception()
CDBP._cdb_access.getElement.side_effect = raiser
def set_field_return_value(self, value):
CDBP._cdb_access.getField.return_value = value
def set_field_exception(self, exception):
def raiser():
raise exception()
CDBP._cdb_access.getField.side_effect = raiser
class TestGetNotificationServiceMapping(TestBase):
def test_ok(self):
self.set_element_return_value(['Stuff'])
self.assertEqual(['Stuff'],
CDBP.get_notification_service_mapping('Channel'))
def test_exception_handling(self):
self.set_element_exception(Exception)
self.assertEqual([], CDBP.get_notification_service_mapping('Channel'))
class TestChannelConfigExists(TestBase):
def test_ok(self):
self.set_field_return_value(['Stuff'])
self.assertEqual(1, CDBP.cdb_channel_config_exists('Channel'))
def test_exception_handling(self):
self.set_field_exception(Exception)
self.assertEqual(0, CDBP.cdb_channel_config_exists('Channel'))
class TestGetIntegrationLogs(TestBase):
def test_log_exists(self):
CDBP.INTEGRATION_LOGS = { 'Key':True }
self.assertEqual(True, CDBP.get_integration_logs('Key'))
CDBP.INTEGRATION_LOGS = {}
def test_no_channel(self):
self.set_field_exception(Exception)
self.assertEqual(0, CDBP.get_integration_logs('Key'))
def test_channel_false(self):
self.set_element_return_value([{"IntegrationLogs":"false"}])
self.set_field_return_value(True)
self.assertEqual(0, CDBP.get_integration_logs('Key'))
CDBP.INTEGRATION_LOGS = {}
def test_channel_true(self):
self.set_element_return_value([{"IntegrationLogs":"true"}])
self.set_field_return_value(True)
self.assertEqual(1, CDBP.get_integration_logs('Key'))
CDBP.INTEGRATION_LOGS = {}
def test_channel_unknown(self):
self.set_element_return_value([{"IntegrationLogs":"frob"}])
self.set_field_return_value(True)
self.assertEqual(0, CDBP.get_integration_logs('Key'))
CDBP.INTEGRATION_LOGS = {}
class TestGetChannelQofSProps(TestBase):
def test_qofs_start_false_stop_false(self):
self.set_element_return_value([{"DiscardPolicy":"AnyOrder",
"EventReliability":"BestEffort",
"ConnectionReliability":"BestEffort",
"Priority":"0",
"Timeout":"0",
"OrderPolicy":"AnyOrder",
"StartTimeSupported":"false",
"StopTimeSupported":"false",
"MaxEventsPerConsumer":"0"}])
self.set_field_return_value(True)
self.assertEqual(5,
len(CDBP.get_channel_qofs_props('Key')))
def test_qofs_start_true_stop_true(self):
self.set_element_return_value([{"DiscardPolicy":"AnyOrder",
"EventReliability":"BestEffort",
"ConnectionReliability":"BestEffort",
"Priority":"0",
"Timeout":"0",
"OrderPolicy":"AnyOrder",
"StartTimeSupported":"true",
"StopTimeSupported":"true",
"MaxEventsPerConsumer":"0"}])
self.set_field_return_value(True)
self.assertEqual(5,
len(CDBP.get_channel_qofs_props('Key')))
def test_empty_config(self):
self.set_element_return_value([{}])
self.set_field_return_value(True)
self.assertRaises(KeyError,
CDBP.get_channel_qofs_props,
'Key')
class TestGetChannelAdminProps(TestBase):
def test_reject_false(self):
self.set_element_return_value([{"MaxQueueLength":"0",
"MaxConsumers":"0",
"MaxSuppliers":"0",
"RejectNewEvents":"false"}])
self.set_field_return_value(True)
self.assertEqual(4,
len(CDBP.get_channel_admin_props('Key')))
def test_reject_true(self):
self.set_element_return_value([{"MaxQueueLength":"0",
"MaxConsumers":"0",
"MaxSuppliers":"0",
"RejectNewEvents":"true"}])
self.set_field_return_value(True)
self.assertEqual(4,
len(CDBP.get_channel_admin_props('Key')))
def test_empty_config(self):
self.set_element_return_value([{}])
self.set_field_return_value(True)
self.assertRaises(KeyError,
CDBP.get_channel_admin_props,
'Key')
class TestGetEventHandlerTimeoutDict(TestBase):
def test_no_channel(self):
self.set_field_exception(Exception)
self.assertEqual({}, CDBP.getEventHandlerTimeoutDict('Key'))
def test_no_events(self):
self.set_field_return_value( \
'<?xml version="1.0" encoding="ISO-8859-1"?>' \
'<EventChannel xmlns="urn:schemas-cosylab-com:EventChannel:1.0"' \
' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"' \
' IntegrationLogs="true">' \
'</EventChannel>')
self.assertEqual({}, CDBP.getEventHandlerTimeoutDict('Key'))
def test_events(self):
self.set_field_return_value( \
'<?xml version="1.0" encoding="ISO-8859-1"?>' \
'<EventChannel xmlns="urn:schemas-cosylab-com:EventChannel:1.0"' \
' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"' \
' IntegrationLogs="true">' \
' <Events>' \
' <_ Name="EventDescription" MaxProcessTime="1.25" />' \
' </Events>' \
'</EventChannel>')
self.assertEqual({'EventDescription': 1.25},
CDBP.getEventHandlerTimeoutDict('Key'))
if __name__ == "__main__":
unittest.main()
#
# ___oOo___
|
lfairchild/PmagPy
|
refs/heads/master
|
programs/pt_rot.py
|
2
|
#!/usr/bin/env python
# define some variables
from __future__ import print_function
import sys
import matplotlib
if matplotlib.get_backend() != "TKAgg":
matplotlib.use("TKAgg")
import pmagpy.pmag as pmag
import pmagpy.frp as frp
def main():
"""
NAME
pt_rot.py
DESCRIPTION
rotates pt according to specified age and plate
SYNTAX
pt_rot.py [command line options]
OPTIONS
-h prints help and quits
-f file with lon lat plate age Dplate as space delimited input
Dplate is the destination plate coordinates desires
- default is "fixed south africa"
Dplate should be one of: [nwaf, neaf,saf,aus, eur, ind, sam, ant, grn, nam]
-ff file Efile, file has lat lon data file and Efile has sequential rotation poles: Elat Elon Omega
-F OFILE, output sites (pmag_results) formatted file with rotated points stored in pole_lon, pole_lat (vgp_lon, vgp_lat). (data_model=2.5)
default is to print out rotated lon, lat to standard output
-dm [2.5,3] set data model for output. Default is 3
"""
dir_path='.'
PTS=[]
ResRecs=[]
ofile=""
data_model=3
Dplates=['nwaf', 'neaf','saf','aus', 'eur', 'ind', 'sam', 'ant', 'grn', 'nam']
if '-WD' in sys.argv:
ind = sys.argv.index('-WD')
dir_path=sys.argv[ind+1]
if '-h' in sys.argv:
print(main.__doc__)
sys.exit()
if '-F' in sys.argv:
ind = sys.argv.index('-F')
ofile=dir_path+'/'+sys.argv[ind+1]
if '-dm' in sys.argv:
ind = sys.argv.index('-dm')
data_model=dir_path+'/'+sys.argv[ind+1]
if '-f' in sys.argv:
ind = sys.argv.index('-f')
file=dir_path+'/'+sys.argv[ind+1]
f=open(file,'r')
data=f.readlines()
elif '-ff' in sys.argv:
ind = sys.argv.index('-ff')
file=dir_path+'/'+sys.argv[ind+1]
f=open(file,'r')
data=f.readlines()
Efile=dir_path+'/'+sys.argv[ind+2]
f=open(Efile,'r')
edata=f.readlines()
Poles=[]
for p in edata:
rec=p.split()
pole=[float(rec[0]),float(rec[1]),float(rec[2])] # pole is lat/lon/omega
Poles.append(pole)
else:
data=sys.stdin.readlines()
polelatkey,polelonkey='pole_lat','pole_lon'
if data_model!=3:
polelatkey,polelonkey='vgp_lat','vgp_lon'
for line in data:
PtRec={}
rec=line.split()
PtRec['site_lon']=rec[0]
PtRec['site_lat']=rec[1]
if '-ff' in sys.argv:
pt_lat,pt_lon=float(rec[1]),float(rec[0])
for pole in Poles:
ptrot= pmag.pt_rot(pole,[pt_lat],[pt_lon])
pt_lat=ptrot[0][0]
pt_lon=ptrot[1][0]
if ofile=="":
print(ptrot[1][0], ptrot[0][0])
else:
ResRec={polelonkey: '%7.1f'%(ptrot[0][0]),polelatkey:'%7.1f'%( ptrot[1][0])}
ResRecs.append(ResRec)
else:
PtRec['cont']=rec[2]
if PtRec['cont']=='af':PtRec['cont']='saf' # use fixed south africa
PtRec['age']=rec[3]
if len(rec)>4:
PtRec['dcont']=rec[4]
PTS.append(PtRec)
if '-ff' not in sys.argv:
for pt in PTS:
pole='not specified'
pt_lat=float(pt['site_lat'])
pt_lon=float(pt['site_lon'])
age=float(pt['age'])
ptrot=[[pt_lat],[pt_lon]]
if pt['cont']=='ib':
pole=frp.get_pole(pt['cont'],age)
ptrot= pmag.pt_rot(pole,[pt_lat],[pt_lon])
pt_lat=ptrot[0][0]
pt_lon=ptrot[1][0]
pt['cont']='eur'
if pt['cont']!='saf':
pole1=frp.get_pole(pt['cont'],age)
ptrot= pmag.pt_rot(pole1,[pt_lat],[pt_lon])
if 'dcont' in list(pt.keys()):
pt_lat=ptrot[0][0]
pt_lon=ptrot[1][0]
pole=frp.get_pole(pt['dcont'],age)
pole[2]=-pole[2]
ptrot= pmag.pt_rot(pole,[pt_lat],[pt_lon])
if ofile=="":
print(ptrot[1][0], ptrot[0][0])
else:
ResRec={polelonkey: '%7.1f'%(ptrot[0][0]),polelatkey:'%7.1f'%( ptrot[1][0])}
ResRecs.append(ResRec)
else:
if 'dcont' in list(pt.keys()):
pole=frp.get_pole(pt['dcont'],age)
pole[2]=-pole[2]
ptrot= pmag.pt_rot(pole,[pt_lat],[pt_lon])
print(ptrot)
if ofile=="":
print(ptrot[1][0], ptrot[0][0])
else:
ResRec={polelonkey: '%7.1f'%(ptrot[0][0]),polelatkey:'%7.1f'%( ptrot[1][0])}
ResRecs.append(ResRec)
else:
if ofile=="":
print(ptrot[1][0], ptrot[0][0])
else:
ResRec={polelonkey: '%7.1f'%(ptrot[0][0]),polelatkey:'%7.1f'%( ptrot[1][0])}
ResRecs.append(ResRec)
if len(ResRecs)>0:
if data_model==3:
pmag.magic_write(ofile,ResRecs,'locations')
else:
pmag.magic_write(ofile,ResRecs,'pmag_results')
if __name__ == "__main__":
main()
|
etataurov/pytest
|
refs/heads/master
|
testing/test_junitxml.py
|
4
|
# -*- coding: utf-8 -*-
from xml.dom import minidom
import py
import sys
import os
from _pytest.junitxml import LogXML
import pytest
def runandparse(testdir, *args):
resultpath = testdir.tmpdir.join("junit.xml")
result = testdir.runpytest("--junitxml=%s" % resultpath, *args)
xmldoc = minidom.parse(str(resultpath))
return result, DomNode(xmldoc)
def assert_attr(node, **kwargs):
__tracebackhide__ = True
def nodeval(node, name):
anode = node.getAttributeNode(name)
if anode is not None:
return anode.value
expected = dict((name, str(value)) for name, value in kwargs.items())
on_node = dict((name, nodeval(node, name)) for name in expected)
assert on_node == expected
class DomNode(object):
def __init__(self, dom):
self.__node = dom
def __repr__(self):
return self.__node.toxml()
def find_first_by_tag(self, tag):
return self.find_nth_by_tag(tag, 0)
def _by_tag(self, tag):
return self.__node.getElementsByTagName(tag)
def find_nth_by_tag(self, tag, n):
items = self._by_tag(tag)
try:
nth = items[n]
except IndexError:
pass
else:
return type(self)(nth)
def find_by_tag(self, tag):
t = type(self)
return [t(x) for x in self.__node.getElementsByTagName(tag)]
def __getitem__(self, key):
node = self.__node.getAttributeNode(key)
if node is not None:
return node.value
def assert_attr(self, **kwargs):
__tracebackhide__ = True
return assert_attr(self.__node, **kwargs)
def toxml(self):
return self.__node.toxml()
@property
def text(self):
return self.__node.childNodes[0].wholeText
@property
def tag(self):
return self.__node.tagName
@property
def next_siebling(self):
return type(self)(self.__node.nextSibling)
class TestPython:
def test_summing_simple(self, testdir):
testdir.makepyfile("""
import pytest
def test_pass():
pass
def test_fail():
assert 0
def test_skip():
pytest.skip("")
@pytest.mark.xfail
def test_xfail():
assert 0
@pytest.mark.xfail
def test_xpass():
assert 1
""")
result, dom = runandparse(testdir)
assert result.ret
node = dom.find_first_by_tag("testsuite")
node.assert_attr(name="pytest", errors=0, failures=1, skips=2, tests=5)
def test_summing_simple_with_errors(self, testdir):
testdir.makepyfile("""
import pytest
@pytest.fixture
def fixture():
raise Exception()
def test_pass():
pass
def test_fail():
assert 0
def test_error(fixture):
pass
@pytest.mark.xfail
def test_xfail():
assert False
@pytest.mark.xfail(strict=True)
def test_xpass():
assert True
""")
result, dom = runandparse(testdir)
assert result.ret
node = dom.find_first_by_tag("testsuite")
node.assert_attr(name="pytest", errors=1, failures=2, skips=1, tests=5)
def test_timing_function(self, testdir):
testdir.makepyfile("""
import time, pytest
def setup_module():
time.sleep(0.01)
def teardown_module():
time.sleep(0.01)
def test_sleep():
time.sleep(0.01)
""")
result, dom = runandparse(testdir)
node = dom.find_first_by_tag("testsuite")
tnode = node.find_first_by_tag("testcase")
val = tnode["time"]
assert round(float(val), 2) >= 0.03
def test_setup_error(self, testdir):
testdir.makepyfile("""
import pytest
@pytest.fixture
def arg(request):
raise ValueError()
def test_function(arg):
pass
""")
result, dom = runandparse(testdir)
assert result.ret
node = dom.find_first_by_tag("testsuite")
node.assert_attr(errors=1, tests=1)
tnode = node.find_first_by_tag("testcase")
tnode.assert_attr(
file="test_setup_error.py",
line="5",
classname="test_setup_error",
name="test_function")
fnode = tnode.find_first_by_tag("error")
fnode.assert_attr(message="test setup failure")
assert "ValueError" in fnode.toxml()
def test_teardown_error(self, testdir):
testdir.makepyfile("""
import pytest
@pytest.fixture
def arg():
yield
raise ValueError()
def test_function(arg):
pass
""")
result, dom = runandparse(testdir)
assert result.ret
node = dom.find_first_by_tag("testsuite")
tnode = node.find_first_by_tag("testcase")
tnode.assert_attr(
file="test_teardown_error.py",
line="6",
classname="test_teardown_error",
name="test_function")
fnode = tnode.find_first_by_tag("error")
fnode.assert_attr(message="test teardown failure")
assert "ValueError" in fnode.toxml()
def test_skip_contains_name_reason(self, testdir):
testdir.makepyfile("""
import pytest
def test_skip():
pytest.skip("hello23")
""")
result, dom = runandparse(testdir)
assert result.ret == 0
node = dom.find_first_by_tag("testsuite")
node.assert_attr(skips=1)
tnode = node.find_first_by_tag("testcase")
tnode.assert_attr(
file="test_skip_contains_name_reason.py",
line="1",
classname="test_skip_contains_name_reason",
name="test_skip")
snode = tnode.find_first_by_tag("skipped")
snode.assert_attr(type="pytest.skip", message="hello23", )
def test_mark_skip_contains_name_reason(self, testdir):
testdir.makepyfile("""
import pytest
@pytest.mark.skip(reason="hello24")
def test_skip():
assert True
""")
result, dom = runandparse(testdir)
assert result.ret == 0
node = dom.find_first_by_tag("testsuite")
node.assert_attr(skips=1)
tnode = node.find_first_by_tag("testcase")
tnode.assert_attr(
file="test_mark_skip_contains_name_reason.py",
line="1",
classname="test_mark_skip_contains_name_reason",
name="test_skip")
snode = tnode.find_first_by_tag("skipped")
snode.assert_attr(type="pytest.skip", message="hello24", )
def test_mark_skipif_contains_name_reason(self, testdir):
testdir.makepyfile("""
import pytest
GLOBAL_CONDITION = True
@pytest.mark.skipif(GLOBAL_CONDITION, reason="hello25")
def test_skip():
assert True
""")
result, dom = runandparse(testdir)
assert result.ret == 0
node = dom.find_first_by_tag("testsuite")
node.assert_attr(skips=1)
tnode = node.find_first_by_tag("testcase")
tnode.assert_attr(
file="test_mark_skipif_contains_name_reason.py",
line="2",
classname="test_mark_skipif_contains_name_reason",
name="test_skip")
snode = tnode.find_first_by_tag("skipped")
snode.assert_attr(type="pytest.skip", message="hello25", )
def test_mark_skip_doesnt_capture_output(self, testdir):
testdir.makepyfile("""
import pytest
@pytest.mark.skip(reason="foo")
def test_skip():
print("bar!")
""")
result, dom = runandparse(testdir)
assert result.ret == 0
node_xml = dom.find_first_by_tag("testsuite").toxml()
assert "bar!" not in node_xml
def test_classname_instance(self, testdir):
testdir.makepyfile("""
class TestClass:
def test_method(self):
assert 0
""")
result, dom = runandparse(testdir)
assert result.ret
node = dom.find_first_by_tag("testsuite")
node.assert_attr(failures=1)
tnode = node.find_first_by_tag("testcase")
tnode.assert_attr(
file="test_classname_instance.py",
line="1",
classname="test_classname_instance.TestClass",
name="test_method")
def test_classname_nested_dir(self, testdir):
p = testdir.tmpdir.ensure("sub", "test_hello.py")
p.write("def test_func(): 0/0")
result, dom = runandparse(testdir)
assert result.ret
node = dom.find_first_by_tag("testsuite")
node.assert_attr(failures=1)
tnode = node.find_first_by_tag("testcase")
tnode.assert_attr(
file=os.path.join("sub", "test_hello.py"),
line="0",
classname="sub.test_hello",
name="test_func")
def test_internal_error(self, testdir):
testdir.makeconftest("def pytest_runtest_protocol(): 0 / 0")
testdir.makepyfile("def test_function(): pass")
result, dom = runandparse(testdir)
assert result.ret
node = dom.find_first_by_tag("testsuite")
node.assert_attr(errors=1, tests=1)
tnode = node.find_first_by_tag("testcase")
tnode.assert_attr(classname="pytest", name="internal")
fnode = tnode.find_first_by_tag("error")
fnode.assert_attr(message="internal error")
assert "Division" in fnode.toxml()
def test_failure_function(self, testdir):
testdir.makepyfile("""
import sys
def test_fail():
print ("hello-stdout")
sys.stderr.write("hello-stderr\\n")
raise ValueError(42)
""")
result, dom = runandparse(testdir)
assert result.ret
node = dom.find_first_by_tag("testsuite")
node.assert_attr(failures=1, tests=1)
tnode = node.find_first_by_tag("testcase")
tnode.assert_attr(
file="test_failure_function.py",
line="1",
classname="test_failure_function",
name="test_fail")
fnode = tnode.find_first_by_tag("failure")
fnode.assert_attr(message="ValueError: 42")
assert "ValueError" in fnode.toxml()
systemout = fnode.next_siebling
assert systemout.tag == "system-out"
assert "hello-stdout" in systemout.toxml()
systemerr = systemout.next_siebling
assert systemerr.tag == "system-err"
assert "hello-stderr" in systemerr.toxml()
def test_failure_verbose_message(self, testdir):
testdir.makepyfile("""
import sys
def test_fail():
assert 0, "An error"
""")
result, dom = runandparse(testdir)
node = dom.find_first_by_tag("testsuite")
tnode = node.find_first_by_tag("testcase")
fnode = tnode.find_first_by_tag("failure")
fnode.assert_attr(message="AssertionError: An error assert 0")
def test_failure_escape(self, testdir):
testdir.makepyfile("""
import pytest
@pytest.mark.parametrize('arg1', "<&'", ids="<&'")
def test_func(arg1):
print(arg1)
assert 0
""")
result, dom = runandparse(testdir)
assert result.ret
node = dom.find_first_by_tag("testsuite")
node.assert_attr(failures=3, tests=3)
for index, char in enumerate("<&'"):
tnode = node.find_nth_by_tag("testcase", index)
tnode.assert_attr(
file="test_failure_escape.py",
line="1",
classname="test_failure_escape",
name="test_func[%s]" % char)
sysout = tnode.find_first_by_tag('system-out')
text = sysout.text
assert text == '%s\n' % char
def test_junit_prefixing(self, testdir):
testdir.makepyfile("""
def test_func():
assert 0
class TestHello:
def test_hello(self):
pass
""")
result, dom = runandparse(testdir, "--junitprefix=xyz")
assert result.ret
node = dom.find_first_by_tag("testsuite")
node.assert_attr(failures=1, tests=2)
tnode = node.find_first_by_tag("testcase")
tnode.assert_attr(
file="test_junit_prefixing.py",
line="0",
classname="xyz.test_junit_prefixing",
name="test_func")
tnode = node.find_nth_by_tag("testcase", 1)
tnode.assert_attr(
file="test_junit_prefixing.py",
line="3",
classname="xyz.test_junit_prefixing."
"TestHello",
name="test_hello")
def test_xfailure_function(self, testdir):
testdir.makepyfile("""
import pytest
def test_xfail():
pytest.xfail("42")
""")
result, dom = runandparse(testdir)
assert not result.ret
node = dom.find_first_by_tag("testsuite")
node.assert_attr(skips=1, tests=1)
tnode = node.find_first_by_tag("testcase")
tnode.assert_attr(
file="test_xfailure_function.py",
line="1",
classname="test_xfailure_function",
name="test_xfail")
fnode = tnode.find_first_by_tag("skipped")
fnode.assert_attr(message="expected test failure")
# assert "ValueError" in fnode.toxml()
def test_xfailure_xpass(self, testdir):
testdir.makepyfile("""
import pytest
@pytest.mark.xfail
def test_xpass():
pass
""")
result, dom = runandparse(testdir)
# assert result.ret
node = dom.find_first_by_tag("testsuite")
node.assert_attr(skips=0, tests=1)
tnode = node.find_first_by_tag("testcase")
tnode.assert_attr(
file="test_xfailure_xpass.py",
line="1",
classname="test_xfailure_xpass",
name="test_xpass")
def test_xfailure_xpass_strict(self, testdir):
testdir.makepyfile("""
import pytest
@pytest.mark.xfail(strict=True, reason="This needs to fail!")
def test_xpass():
pass
""")
result, dom = runandparse(testdir)
# assert result.ret
node = dom.find_first_by_tag("testsuite")
node.assert_attr(skips=0, tests=1)
tnode = node.find_first_by_tag("testcase")
tnode.assert_attr(
file="test_xfailure_xpass_strict.py",
line="1",
classname="test_xfailure_xpass_strict",
name="test_xpass")
fnode = tnode.find_first_by_tag("failure")
fnode.assert_attr(message="[XPASS(strict)] This needs to fail!")
def test_collect_error(self, testdir):
testdir.makepyfile("syntax error")
result, dom = runandparse(testdir)
assert result.ret
node = dom.find_first_by_tag("testsuite")
node.assert_attr(errors=1, tests=1)
tnode = node.find_first_by_tag("testcase")
tnode.assert_attr(
file="test_collect_error.py",
name="test_collect_error")
assert tnode["line"] is None
fnode = tnode.find_first_by_tag("error")
fnode.assert_attr(message="collection failure")
assert "SyntaxError" in fnode.toxml()
def test_unicode(self, testdir):
value = 'hx\xc4\x85\xc4\x87\n'
testdir.makepyfile("""
# coding: latin1
def test_hello():
print (%r)
assert 0
""" % value)
result, dom = runandparse(testdir)
assert result.ret == 1
tnode = dom.find_first_by_tag("testcase")
fnode = tnode.find_first_by_tag("failure")
if not sys.platform.startswith("java"):
assert "hx" in fnode.toxml()
def test_assertion_binchars(self, testdir):
"""this test did fail when the escaping wasnt strict"""
testdir.makepyfile("""
M1 = '\x01\x02\x03\x04'
M2 = '\x01\x02\x03\x05'
def test_str_compare():
assert M1 == M2
""")
result, dom = runandparse(testdir)
print(dom.toxml())
def test_pass_captures_stdout(self, testdir):
testdir.makepyfile("""
def test_pass():
print('hello-stdout')
""")
result, dom = runandparse(testdir)
node = dom.find_first_by_tag("testsuite")
pnode = node.find_first_by_tag("testcase")
systemout = pnode.find_first_by_tag("system-out")
assert "hello-stdout" in systemout.toxml()
def test_pass_captures_stderr(self, testdir):
testdir.makepyfile("""
import sys
def test_pass():
sys.stderr.write('hello-stderr')
""")
result, dom = runandparse(testdir)
node = dom.find_first_by_tag("testsuite")
pnode = node.find_first_by_tag("testcase")
systemout = pnode.find_first_by_tag("system-err")
assert "hello-stderr" in systemout.toxml()
def test_setup_error_captures_stdout(self, testdir):
testdir.makepyfile("""
import pytest
@pytest.fixture
def arg(request):
print('hello-stdout')
raise ValueError()
def test_function(arg):
pass
""")
result, dom = runandparse(testdir)
node = dom.find_first_by_tag("testsuite")
pnode = node.find_first_by_tag("testcase")
systemout = pnode.find_first_by_tag("system-out")
assert "hello-stdout" in systemout.toxml()
def test_setup_error_captures_stderr(self, testdir):
testdir.makepyfile("""
import sys
import pytest
@pytest.fixture
def arg(request):
sys.stderr.write('hello-stderr')
raise ValueError()
def test_function(arg):
pass
""")
result, dom = runandparse(testdir)
node = dom.find_first_by_tag("testsuite")
pnode = node.find_first_by_tag("testcase")
systemout = pnode.find_first_by_tag("system-err")
assert "hello-stderr" in systemout.toxml()
def test_avoid_double_stdout(self, testdir):
testdir.makepyfile("""
import sys
import pytest
@pytest.fixture
def arg(request):
yield
sys.stdout.write('hello-stdout teardown')
raise ValueError()
def test_function(arg):
sys.stdout.write('hello-stdout call')
""")
result, dom = runandparse(testdir)
node = dom.find_first_by_tag("testsuite")
pnode = node.find_first_by_tag("testcase")
systemout = pnode.find_first_by_tag("system-out")
assert "hello-stdout call" in systemout.toxml()
assert "hello-stdout teardown" in systemout.toxml()
def test_mangle_test_address():
from _pytest.junitxml import mangle_test_address
address = '::'.join(
["a/my.py.thing.py", "Class", "()", "method", "[a-1-::]"])
newnames = mangle_test_address(address)
assert newnames == ["a.my.py.thing", "Class", "method", "[a-1-::]"]
def test_dont_configure_on_slaves(tmpdir):
gotten = []
class FakeConfig:
def __init__(self):
self.pluginmanager = self
self.option = self
junitprefix = None
# XXX: shouldnt need tmpdir ?
xmlpath = str(tmpdir.join('junix.xml'))
register = gotten.append
fake_config = FakeConfig()
from _pytest import junitxml
junitxml.pytest_configure(fake_config)
assert len(gotten) == 1
FakeConfig.slaveinput = None
junitxml.pytest_configure(fake_config)
assert len(gotten) == 1
class TestNonPython:
def test_summing_simple(self, testdir):
testdir.makeconftest("""
import pytest
def pytest_collect_file(path, parent):
if path.ext == ".xyz":
return MyItem(path, parent)
class MyItem(pytest.Item):
def __init__(self, path, parent):
super(MyItem, self).__init__(path.basename, parent)
self.fspath = path
def runtest(self):
raise ValueError(42)
def repr_failure(self, excinfo):
return "custom item runtest failed"
""")
testdir.tmpdir.join("myfile.xyz").write("hello")
result, dom = runandparse(testdir)
assert result.ret
node = dom.find_first_by_tag("testsuite")
node.assert_attr(errors=0, failures=1, skips=0, tests=1)
tnode = node.find_first_by_tag("testcase")
tnode.assert_attr(name="myfile.xyz")
fnode = tnode.find_first_by_tag("failure")
fnode.assert_attr(message="custom item runtest failed")
assert "custom item runtest failed" in fnode.toxml()
def test_nullbyte(testdir):
# A null byte can not occur in XML (see section 2.2 of the spec)
testdir.makepyfile("""
import sys
def test_print_nullbyte():
sys.stdout.write('Here the null -->' + chr(0) + '<--')
sys.stdout.write('In repr form -->' + repr(chr(0)) + '<--')
assert False
""")
xmlf = testdir.tmpdir.join('junit.xml')
testdir.runpytest('--junitxml=%s' % xmlf)
text = xmlf.read()
assert '\x00' not in text
assert '#x00' in text
def test_nullbyte_replace(testdir):
# Check if the null byte gets replaced
testdir.makepyfile("""
import sys
def test_print_nullbyte():
sys.stdout.write('Here the null -->' + chr(0) + '<--')
sys.stdout.write('In repr form -->' + repr(chr(0)) + '<--')
assert False
""")
xmlf = testdir.tmpdir.join('junit.xml')
testdir.runpytest('--junitxml=%s' % xmlf)
text = xmlf.read()
assert '#x0' in text
def test_invalid_xml_escape():
# Test some more invalid xml chars, the full range should be
# tested really but let's just thest the edges of the ranges
# intead.
# XXX This only tests low unicode character points for now as
# there are some issues with the testing infrastructure for
# the higher ones.
# XXX Testing 0xD (\r) is tricky as it overwrites the just written
# line in the output, so we skip it too.
global unichr
try:
unichr(65)
except NameError:
unichr = chr
invalid = (0x00, 0x1, 0xB, 0xC, 0xE, 0x19, 27, # issue #126
0xD800, 0xDFFF, 0xFFFE, 0x0FFFF) # , 0x110000)
valid = (0x9, 0xA, 0x20, )
# 0xD, 0xD7FF, 0xE000, 0xFFFD, 0x10000, 0x10FFFF)
from _pytest.junitxml import bin_xml_escape
for i in invalid:
got = bin_xml_escape(unichr(i)).uniobj
if i <= 0xFF:
expected = '#x%02X' % i
else:
expected = '#x%04X' % i
assert got == expected
for i in valid:
assert chr(i) == bin_xml_escape(unichr(i)).uniobj
def test_logxml_path_expansion(tmpdir, monkeypatch):
home_tilde = py.path.local(os.path.expanduser('~')).join('test.xml')
xml_tilde = LogXML('~%stest.xml' % tmpdir.sep, None)
assert xml_tilde.logfile == home_tilde
# this is here for when $HOME is not set correct
monkeypatch.setenv("HOME", tmpdir)
home_var = os.path.normpath(os.path.expandvars('$HOME/test.xml'))
xml_var = LogXML('$HOME%stest.xml' % tmpdir.sep, None)
assert xml_var.logfile == home_var
def test_logxml_changingdir(testdir):
testdir.makepyfile("""
def test_func():
import os
os.chdir("a")
""")
testdir.tmpdir.mkdir("a")
result = testdir.runpytest("--junitxml=a/x.xml")
assert result.ret == 0
assert testdir.tmpdir.join("a/x.xml").check()
def test_logxml_makedir(testdir):
"""--junitxml should automatically create directories for the xml file"""
testdir.makepyfile("""
def test_pass():
pass
""")
result = testdir.runpytest("--junitxml=path/to/results.xml")
assert result.ret == 0
assert testdir.tmpdir.join("path/to/results.xml").check()
def test_logxml_check_isdir(testdir):
"""Give an error if --junit-xml is a directory (#2089)"""
result = testdir.runpytest("--junit-xml=.")
result.stderr.fnmatch_lines(["*--junitxml must be a filename*"])
def test_escaped_parametrized_names_xml(testdir):
testdir.makepyfile("""
import pytest
@pytest.mark.parametrize('char', [u"\\x00"])
def test_func(char):
assert char
""")
result, dom = runandparse(testdir)
assert result.ret == 0
node = dom.find_first_by_tag("testcase")
node.assert_attr(name="test_func[\\x00]")
def test_double_colon_split_function_issue469(testdir):
testdir.makepyfile("""
import pytest
@pytest.mark.parametrize('param', ["double::colon"])
def test_func(param):
pass
""")
result, dom = runandparse(testdir)
assert result.ret == 0
node = dom.find_first_by_tag("testcase")
node.assert_attr(classname="test_double_colon_split_function_issue469")
node.assert_attr(name='test_func[double::colon]')
def test_double_colon_split_method_issue469(testdir):
testdir.makepyfile("""
import pytest
class TestClass:
@pytest.mark.parametrize('param', ["double::colon"])
def test_func(self, param):
pass
""")
result, dom = runandparse(testdir)
assert result.ret == 0
node = dom.find_first_by_tag("testcase")
node.assert_attr(
classname="test_double_colon_split_method_issue469.TestClass")
node.assert_attr(name='test_func[double::colon]')
def test_unicode_issue368(testdir):
path = testdir.tmpdir.join("test.xml")
log = LogXML(str(path), None)
ustr = py.builtin._totext("ВНИ!", "utf-8")
from _pytest.runner import BaseReport
class Report(BaseReport):
longrepr = ustr
sections = []
nodeid = "something"
location = 'tests/filename.py', 42, 'TestClass.method'
test_report = Report()
# hopefully this is not too brittle ...
log.pytest_sessionstart()
node_reporter = log._opentestcase(test_report)
node_reporter.append_failure(test_report)
node_reporter.append_collect_error(test_report)
node_reporter.append_collect_skipped(test_report)
node_reporter.append_error(test_report)
test_report.longrepr = "filename", 1, ustr
node_reporter.append_skipped(test_report)
test_report.longrepr = "filename", 1, "Skipped: 卡嘣嘣"
node_reporter.append_skipped(test_report)
test_report.wasxfail = ustr
node_reporter.append_skipped(test_report)
log.pytest_sessionfinish()
def test_record_property(testdir):
testdir.makepyfile("""
import pytest
@pytest.fixture
def other(record_xml_property):
record_xml_property("bar", 1)
def test_record(record_xml_property, other):
record_xml_property("foo", "<1");
""")
result, dom = runandparse(testdir, '-rw')
node = dom.find_first_by_tag("testsuite")
tnode = node.find_first_by_tag("testcase")
psnode = tnode.find_first_by_tag('properties')
pnodes = psnode.find_by_tag('property')
pnodes[0].assert_attr(name="bar", value="1")
pnodes[1].assert_attr(name="foo", value="<1")
result.stdout.fnmatch_lines('*C3*test_record_property.py*experimental*')
def test_record_property_same_name(testdir):
testdir.makepyfile("""
def test_record_with_same_name(record_xml_property):
record_xml_property("foo", "bar")
record_xml_property("foo", "baz")
""")
result, dom = runandparse(testdir, '-rw')
node = dom.find_first_by_tag("testsuite")
tnode = node.find_first_by_tag("testcase")
psnode = tnode.find_first_by_tag('properties')
pnodes = psnode.find_by_tag('property')
pnodes[0].assert_attr(name="foo", value="bar")
pnodes[1].assert_attr(name="foo", value="baz")
def test_random_report_log_xdist(testdir):
"""xdist calls pytest_runtest_logreport as they are executed by the slaves,
with nodes from several nodes overlapping, so junitxml must cope with that
to produce correct reports. #1064
"""
pytest.importorskip('xdist')
testdir.makepyfile("""
import pytest, time
@pytest.mark.parametrize('i', list(range(30)))
def test_x(i):
assert i != 22
""")
_, dom = runandparse(testdir, '-n2')
suite_node = dom.find_first_by_tag("testsuite")
failed = []
for case_node in suite_node.find_by_tag("testcase"):
if case_node.find_first_by_tag('failure'):
failed.append(case_node['name'])
assert failed == ['test_x[22]']
def test_runs_twice(testdir):
f = testdir.makepyfile('''
def test_pass():
pass
''')
result, dom = runandparse(testdir, f, f)
assert 'INTERNALERROR' not in result.stdout.str()
first, second = [x['classname'] for x in dom.find_by_tag("testcase")]
assert first == second
@pytest.mark.xfail(reason='hangs', run=False)
def test_runs_twice_xdist(testdir):
pytest.importorskip('xdist')
f = testdir.makepyfile('''
def test_pass():
pass
''')
result, dom = runandparse(
testdir, f,
'--dist', 'each', '--tx', '2*popen',)
assert 'INTERNALERROR' not in result.stdout.str()
first, second = [x['classname'] for x in dom.find_by_tag("testcase")]
assert first == second
def test_fancy_items_regression(testdir):
# issue 1259
testdir.makeconftest("""
import pytest
class FunItem(pytest.Item):
def runtest(self):
pass
class NoFunItem(pytest.Item):
def runtest(self):
pass
class FunCollector(pytest.File):
def collect(self):
return [
FunItem('a', self),
NoFunItem('a', self),
NoFunItem('b', self),
]
def pytest_collect_file(path, parent):
if path.check(ext='.py'):
return FunCollector(path, parent)
""")
testdir.makepyfile('''
def test_pass():
pass
''')
result, dom = runandparse(testdir)
assert 'INTERNALERROR' not in result.stdout.str()
items = sorted(
'%(classname)s %(name)s %(file)s' % x
for x in dom.find_by_tag("testcase"))
import pprint
pprint.pprint(items)
assert items == [
u'conftest a conftest.py',
u'conftest a conftest.py',
u'conftest b conftest.py',
u'test_fancy_items_regression a test_fancy_items_regression.py',
u'test_fancy_items_regression a test_fancy_items_regression.py',
u'test_fancy_items_regression b test_fancy_items_regression.py',
u'test_fancy_items_regression test_pass'
u' test_fancy_items_regression.py',
]
def test_global_properties(testdir):
path = testdir.tmpdir.join("test_global_properties.xml")
log = LogXML(str(path), None)
from _pytest.runner import BaseReport
class Report(BaseReport):
sections = []
nodeid = "test_node_id"
log.pytest_sessionstart()
log.add_global_property('foo', 1)
log.add_global_property('bar', 2)
log.pytest_sessionfinish()
dom = minidom.parse(str(path))
properties = dom.getElementsByTagName('properties')
assert (properties.length == 1), "There must be one <properties> node"
property_list = dom.getElementsByTagName('property')
assert (property_list.length == 2), "There most be only 2 property nodes"
expected = {'foo': '1', 'bar': '2'}
actual = {}
for p in property_list:
k = str(p.getAttribute('name'))
v = str(p.getAttribute('value'))
actual[k] = v
assert actual == expected
|
Justin-Yuan/Image2Music-Generator
|
refs/heads/master
|
library/jython2.5.3/Lib/encodings/iso8859_5.py
|
593
|
""" Python Character Mapping Codec iso8859_5 generated from 'MAPPINGS/ISO8859/8859-5.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='iso8859-5',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
u'\x00' # 0x00 -> NULL
u'\x01' # 0x01 -> START OF HEADING
u'\x02' # 0x02 -> START OF TEXT
u'\x03' # 0x03 -> END OF TEXT
u'\x04' # 0x04 -> END OF TRANSMISSION
u'\x05' # 0x05 -> ENQUIRY
u'\x06' # 0x06 -> ACKNOWLEDGE
u'\x07' # 0x07 -> BELL
u'\x08' # 0x08 -> BACKSPACE
u'\t' # 0x09 -> HORIZONTAL TABULATION
u'\n' # 0x0A -> LINE FEED
u'\x0b' # 0x0B -> VERTICAL TABULATION
u'\x0c' # 0x0C -> FORM FEED
u'\r' # 0x0D -> CARRIAGE RETURN
u'\x0e' # 0x0E -> SHIFT OUT
u'\x0f' # 0x0F -> SHIFT IN
u'\x10' # 0x10 -> DATA LINK ESCAPE
u'\x11' # 0x11 -> DEVICE CONTROL ONE
u'\x12' # 0x12 -> DEVICE CONTROL TWO
u'\x13' # 0x13 -> DEVICE CONTROL THREE
u'\x14' # 0x14 -> DEVICE CONTROL FOUR
u'\x15' # 0x15 -> NEGATIVE ACKNOWLEDGE
u'\x16' # 0x16 -> SYNCHRONOUS IDLE
u'\x17' # 0x17 -> END OF TRANSMISSION BLOCK
u'\x18' # 0x18 -> CANCEL
u'\x19' # 0x19 -> END OF MEDIUM
u'\x1a' # 0x1A -> SUBSTITUTE
u'\x1b' # 0x1B -> ESCAPE
u'\x1c' # 0x1C -> FILE SEPARATOR
u'\x1d' # 0x1D -> GROUP SEPARATOR
u'\x1e' # 0x1E -> RECORD SEPARATOR
u'\x1f' # 0x1F -> UNIT SEPARATOR
u' ' # 0x20 -> SPACE
u'!' # 0x21 -> EXCLAMATION MARK
u'"' # 0x22 -> QUOTATION MARK
u'#' # 0x23 -> NUMBER SIGN
u'$' # 0x24 -> DOLLAR SIGN
u'%' # 0x25 -> PERCENT SIGN
u'&' # 0x26 -> AMPERSAND
u"'" # 0x27 -> APOSTROPHE
u'(' # 0x28 -> LEFT PARENTHESIS
u')' # 0x29 -> RIGHT PARENTHESIS
u'*' # 0x2A -> ASTERISK
u'+' # 0x2B -> PLUS SIGN
u',' # 0x2C -> COMMA
u'-' # 0x2D -> HYPHEN-MINUS
u'.' # 0x2E -> FULL STOP
u'/' # 0x2F -> SOLIDUS
u'0' # 0x30 -> DIGIT ZERO
u'1' # 0x31 -> DIGIT ONE
u'2' # 0x32 -> DIGIT TWO
u'3' # 0x33 -> DIGIT THREE
u'4' # 0x34 -> DIGIT FOUR
u'5' # 0x35 -> DIGIT FIVE
u'6' # 0x36 -> DIGIT SIX
u'7' # 0x37 -> DIGIT SEVEN
u'8' # 0x38 -> DIGIT EIGHT
u'9' # 0x39 -> DIGIT NINE
u':' # 0x3A -> COLON
u';' # 0x3B -> SEMICOLON
u'<' # 0x3C -> LESS-THAN SIGN
u'=' # 0x3D -> EQUALS SIGN
u'>' # 0x3E -> GREATER-THAN SIGN
u'?' # 0x3F -> QUESTION MARK
u'@' # 0x40 -> COMMERCIAL AT
u'A' # 0x41 -> LATIN CAPITAL LETTER A
u'B' # 0x42 -> LATIN CAPITAL LETTER B
u'C' # 0x43 -> LATIN CAPITAL LETTER C
u'D' # 0x44 -> LATIN CAPITAL LETTER D
u'E' # 0x45 -> LATIN CAPITAL LETTER E
u'F' # 0x46 -> LATIN CAPITAL LETTER F
u'G' # 0x47 -> LATIN CAPITAL LETTER G
u'H' # 0x48 -> LATIN CAPITAL LETTER H
u'I' # 0x49 -> LATIN CAPITAL LETTER I
u'J' # 0x4A -> LATIN CAPITAL LETTER J
u'K' # 0x4B -> LATIN CAPITAL LETTER K
u'L' # 0x4C -> LATIN CAPITAL LETTER L
u'M' # 0x4D -> LATIN CAPITAL LETTER M
u'N' # 0x4E -> LATIN CAPITAL LETTER N
u'O' # 0x4F -> LATIN CAPITAL LETTER O
u'P' # 0x50 -> LATIN CAPITAL LETTER P
u'Q' # 0x51 -> LATIN CAPITAL LETTER Q
u'R' # 0x52 -> LATIN CAPITAL LETTER R
u'S' # 0x53 -> LATIN CAPITAL LETTER S
u'T' # 0x54 -> LATIN CAPITAL LETTER T
u'U' # 0x55 -> LATIN CAPITAL LETTER U
u'V' # 0x56 -> LATIN CAPITAL LETTER V
u'W' # 0x57 -> LATIN CAPITAL LETTER W
u'X' # 0x58 -> LATIN CAPITAL LETTER X
u'Y' # 0x59 -> LATIN CAPITAL LETTER Y
u'Z' # 0x5A -> LATIN CAPITAL LETTER Z
u'[' # 0x5B -> LEFT SQUARE BRACKET
u'\\' # 0x5C -> REVERSE SOLIDUS
u']' # 0x5D -> RIGHT SQUARE BRACKET
u'^' # 0x5E -> CIRCUMFLEX ACCENT
u'_' # 0x5F -> LOW LINE
u'`' # 0x60 -> GRAVE ACCENT
u'a' # 0x61 -> LATIN SMALL LETTER A
u'b' # 0x62 -> LATIN SMALL LETTER B
u'c' # 0x63 -> LATIN SMALL LETTER C
u'd' # 0x64 -> LATIN SMALL LETTER D
u'e' # 0x65 -> LATIN SMALL LETTER E
u'f' # 0x66 -> LATIN SMALL LETTER F
u'g' # 0x67 -> LATIN SMALL LETTER G
u'h' # 0x68 -> LATIN SMALL LETTER H
u'i' # 0x69 -> LATIN SMALL LETTER I
u'j' # 0x6A -> LATIN SMALL LETTER J
u'k' # 0x6B -> LATIN SMALL LETTER K
u'l' # 0x6C -> LATIN SMALL LETTER L
u'm' # 0x6D -> LATIN SMALL LETTER M
u'n' # 0x6E -> LATIN SMALL LETTER N
u'o' # 0x6F -> LATIN SMALL LETTER O
u'p' # 0x70 -> LATIN SMALL LETTER P
u'q' # 0x71 -> LATIN SMALL LETTER Q
u'r' # 0x72 -> LATIN SMALL LETTER R
u's' # 0x73 -> LATIN SMALL LETTER S
u't' # 0x74 -> LATIN SMALL LETTER T
u'u' # 0x75 -> LATIN SMALL LETTER U
u'v' # 0x76 -> LATIN SMALL LETTER V
u'w' # 0x77 -> LATIN SMALL LETTER W
u'x' # 0x78 -> LATIN SMALL LETTER X
u'y' # 0x79 -> LATIN SMALL LETTER Y
u'z' # 0x7A -> LATIN SMALL LETTER Z
u'{' # 0x7B -> LEFT CURLY BRACKET
u'|' # 0x7C -> VERTICAL LINE
u'}' # 0x7D -> RIGHT CURLY BRACKET
u'~' # 0x7E -> TILDE
u'\x7f' # 0x7F -> DELETE
u'\x80' # 0x80 -> <control>
u'\x81' # 0x81 -> <control>
u'\x82' # 0x82 -> <control>
u'\x83' # 0x83 -> <control>
u'\x84' # 0x84 -> <control>
u'\x85' # 0x85 -> <control>
u'\x86' # 0x86 -> <control>
u'\x87' # 0x87 -> <control>
u'\x88' # 0x88 -> <control>
u'\x89' # 0x89 -> <control>
u'\x8a' # 0x8A -> <control>
u'\x8b' # 0x8B -> <control>
u'\x8c' # 0x8C -> <control>
u'\x8d' # 0x8D -> <control>
u'\x8e' # 0x8E -> <control>
u'\x8f' # 0x8F -> <control>
u'\x90' # 0x90 -> <control>
u'\x91' # 0x91 -> <control>
u'\x92' # 0x92 -> <control>
u'\x93' # 0x93 -> <control>
u'\x94' # 0x94 -> <control>
u'\x95' # 0x95 -> <control>
u'\x96' # 0x96 -> <control>
u'\x97' # 0x97 -> <control>
u'\x98' # 0x98 -> <control>
u'\x99' # 0x99 -> <control>
u'\x9a' # 0x9A -> <control>
u'\x9b' # 0x9B -> <control>
u'\x9c' # 0x9C -> <control>
u'\x9d' # 0x9D -> <control>
u'\x9e' # 0x9E -> <control>
u'\x9f' # 0x9F -> <control>
u'\xa0' # 0xA0 -> NO-BREAK SPACE
u'\u0401' # 0xA1 -> CYRILLIC CAPITAL LETTER IO
u'\u0402' # 0xA2 -> CYRILLIC CAPITAL LETTER DJE
u'\u0403' # 0xA3 -> CYRILLIC CAPITAL LETTER GJE
u'\u0404' # 0xA4 -> CYRILLIC CAPITAL LETTER UKRAINIAN IE
u'\u0405' # 0xA5 -> CYRILLIC CAPITAL LETTER DZE
u'\u0406' # 0xA6 -> CYRILLIC CAPITAL LETTER BYELORUSSIAN-UKRAINIAN I
u'\u0407' # 0xA7 -> CYRILLIC CAPITAL LETTER YI
u'\u0408' # 0xA8 -> CYRILLIC CAPITAL LETTER JE
u'\u0409' # 0xA9 -> CYRILLIC CAPITAL LETTER LJE
u'\u040a' # 0xAA -> CYRILLIC CAPITAL LETTER NJE
u'\u040b' # 0xAB -> CYRILLIC CAPITAL LETTER TSHE
u'\u040c' # 0xAC -> CYRILLIC CAPITAL LETTER KJE
u'\xad' # 0xAD -> SOFT HYPHEN
u'\u040e' # 0xAE -> CYRILLIC CAPITAL LETTER SHORT U
u'\u040f' # 0xAF -> CYRILLIC CAPITAL LETTER DZHE
u'\u0410' # 0xB0 -> CYRILLIC CAPITAL LETTER A
u'\u0411' # 0xB1 -> CYRILLIC CAPITAL LETTER BE
u'\u0412' # 0xB2 -> CYRILLIC CAPITAL LETTER VE
u'\u0413' # 0xB3 -> CYRILLIC CAPITAL LETTER GHE
u'\u0414' # 0xB4 -> CYRILLIC CAPITAL LETTER DE
u'\u0415' # 0xB5 -> CYRILLIC CAPITAL LETTER IE
u'\u0416' # 0xB6 -> CYRILLIC CAPITAL LETTER ZHE
u'\u0417' # 0xB7 -> CYRILLIC CAPITAL LETTER ZE
u'\u0418' # 0xB8 -> CYRILLIC CAPITAL LETTER I
u'\u0419' # 0xB9 -> CYRILLIC CAPITAL LETTER SHORT I
u'\u041a' # 0xBA -> CYRILLIC CAPITAL LETTER KA
u'\u041b' # 0xBB -> CYRILLIC CAPITAL LETTER EL
u'\u041c' # 0xBC -> CYRILLIC CAPITAL LETTER EM
u'\u041d' # 0xBD -> CYRILLIC CAPITAL LETTER EN
u'\u041e' # 0xBE -> CYRILLIC CAPITAL LETTER O
u'\u041f' # 0xBF -> CYRILLIC CAPITAL LETTER PE
u'\u0420' # 0xC0 -> CYRILLIC CAPITAL LETTER ER
u'\u0421' # 0xC1 -> CYRILLIC CAPITAL LETTER ES
u'\u0422' # 0xC2 -> CYRILLIC CAPITAL LETTER TE
u'\u0423' # 0xC3 -> CYRILLIC CAPITAL LETTER U
u'\u0424' # 0xC4 -> CYRILLIC CAPITAL LETTER EF
u'\u0425' # 0xC5 -> CYRILLIC CAPITAL LETTER HA
u'\u0426' # 0xC6 -> CYRILLIC CAPITAL LETTER TSE
u'\u0427' # 0xC7 -> CYRILLIC CAPITAL LETTER CHE
u'\u0428' # 0xC8 -> CYRILLIC CAPITAL LETTER SHA
u'\u0429' # 0xC9 -> CYRILLIC CAPITAL LETTER SHCHA
u'\u042a' # 0xCA -> CYRILLIC CAPITAL LETTER HARD SIGN
u'\u042b' # 0xCB -> CYRILLIC CAPITAL LETTER YERU
u'\u042c' # 0xCC -> CYRILLIC CAPITAL LETTER SOFT SIGN
u'\u042d' # 0xCD -> CYRILLIC CAPITAL LETTER E
u'\u042e' # 0xCE -> CYRILLIC CAPITAL LETTER YU
u'\u042f' # 0xCF -> CYRILLIC CAPITAL LETTER YA
u'\u0430' # 0xD0 -> CYRILLIC SMALL LETTER A
u'\u0431' # 0xD1 -> CYRILLIC SMALL LETTER BE
u'\u0432' # 0xD2 -> CYRILLIC SMALL LETTER VE
u'\u0433' # 0xD3 -> CYRILLIC SMALL LETTER GHE
u'\u0434' # 0xD4 -> CYRILLIC SMALL LETTER DE
u'\u0435' # 0xD5 -> CYRILLIC SMALL LETTER IE
u'\u0436' # 0xD6 -> CYRILLIC SMALL LETTER ZHE
u'\u0437' # 0xD7 -> CYRILLIC SMALL LETTER ZE
u'\u0438' # 0xD8 -> CYRILLIC SMALL LETTER I
u'\u0439' # 0xD9 -> CYRILLIC SMALL LETTER SHORT I
u'\u043a' # 0xDA -> CYRILLIC SMALL LETTER KA
u'\u043b' # 0xDB -> CYRILLIC SMALL LETTER EL
u'\u043c' # 0xDC -> CYRILLIC SMALL LETTER EM
u'\u043d' # 0xDD -> CYRILLIC SMALL LETTER EN
u'\u043e' # 0xDE -> CYRILLIC SMALL LETTER O
u'\u043f' # 0xDF -> CYRILLIC SMALL LETTER PE
u'\u0440' # 0xE0 -> CYRILLIC SMALL LETTER ER
u'\u0441' # 0xE1 -> CYRILLIC SMALL LETTER ES
u'\u0442' # 0xE2 -> CYRILLIC SMALL LETTER TE
u'\u0443' # 0xE3 -> CYRILLIC SMALL LETTER U
u'\u0444' # 0xE4 -> CYRILLIC SMALL LETTER EF
u'\u0445' # 0xE5 -> CYRILLIC SMALL LETTER HA
u'\u0446' # 0xE6 -> CYRILLIC SMALL LETTER TSE
u'\u0447' # 0xE7 -> CYRILLIC SMALL LETTER CHE
u'\u0448' # 0xE8 -> CYRILLIC SMALL LETTER SHA
u'\u0449' # 0xE9 -> CYRILLIC SMALL LETTER SHCHA
u'\u044a' # 0xEA -> CYRILLIC SMALL LETTER HARD SIGN
u'\u044b' # 0xEB -> CYRILLIC SMALL LETTER YERU
u'\u044c' # 0xEC -> CYRILLIC SMALL LETTER SOFT SIGN
u'\u044d' # 0xED -> CYRILLIC SMALL LETTER E
u'\u044e' # 0xEE -> CYRILLIC SMALL LETTER YU
u'\u044f' # 0xEF -> CYRILLIC SMALL LETTER YA
u'\u2116' # 0xF0 -> NUMERO SIGN
u'\u0451' # 0xF1 -> CYRILLIC SMALL LETTER IO
u'\u0452' # 0xF2 -> CYRILLIC SMALL LETTER DJE
u'\u0453' # 0xF3 -> CYRILLIC SMALL LETTER GJE
u'\u0454' # 0xF4 -> CYRILLIC SMALL LETTER UKRAINIAN IE
u'\u0455' # 0xF5 -> CYRILLIC SMALL LETTER DZE
u'\u0456' # 0xF6 -> CYRILLIC SMALL LETTER BYELORUSSIAN-UKRAINIAN I
u'\u0457' # 0xF7 -> CYRILLIC SMALL LETTER YI
u'\u0458' # 0xF8 -> CYRILLIC SMALL LETTER JE
u'\u0459' # 0xF9 -> CYRILLIC SMALL LETTER LJE
u'\u045a' # 0xFA -> CYRILLIC SMALL LETTER NJE
u'\u045b' # 0xFB -> CYRILLIC SMALL LETTER TSHE
u'\u045c' # 0xFC -> CYRILLIC SMALL LETTER KJE
u'\xa7' # 0xFD -> SECTION SIGN
u'\u045e' # 0xFE -> CYRILLIC SMALL LETTER SHORT U
u'\u045f' # 0xFF -> CYRILLIC SMALL LETTER DZHE
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
|
adaur/SickRage
|
refs/heads/master
|
tests/sickrage_tests/providers/nzb_provider_tests.py
|
4
|
# coding=utf-8
# This file is part of SickRage.
#
# URL: https://SickRage.GitHub.io
# Git: https://github.com/SickRage/SickRage.git
#
# SickRage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
"""
Test NZBProvider
"""
from __future__ import print_function
import os
import sys
import unittest
sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '../../../lib')))
sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '../../..')))
import sickbeard
from generic_provider_tests import GenericProviderTests
from sickrage.providers.GenericProvider import GenericProvider
from sickrage.providers.nzb.NZBProvider import NZBProvider
class NZBProviderTests(GenericProviderTests):
"""
Test NZBProvider
"""
def test___init__(self):
"""
Test __init__
"""
self.assertEqual(NZBProvider('Test Provider').provider_type, GenericProvider.NZB)
def test_is_active(self):
"""
Test is_active
"""
test_cases = {
(False, False): False,
(False, None): False,
(False, True): False,
(None, False): False,
(None, None): False,
(None, True): False,
(True, False): False,
(True, None): False,
(True, True): True,
}
for ((use_nzb, enabled), result) in test_cases.iteritems():
sickbeard.USE_NZBS = use_nzb
provider = NZBProvider('Test Provider')
provider.enabled = enabled
self.assertEqual(provider.is_active(), result)
def test__get_size(self):
"""
Test _get_size
"""
items_list = [
None, {}, {'links': None}, {'links': []}, {'links': [{}]},
{'links': [{'length': 1}, {'length': None}, {'length': 3}]},
{'links': [{'length': 1}, {'length': ''}, {'length': 3}]},
{'links': [{'length': 1}, {'length': '0'}, {'length': 3}]},
{'links': [{'length': 1}, {'length': '123'}, {'length': 3}]},
{'links': [{'length': 1}, {'length': '12.3'}, {'length': 3}]},
{'links': [{'length': 1}, {'length': '-123'}, {'length': 3}]},
{'links': [{'length': 1}, {'length': '-12.3'}, {'length': 3}]},
{'links': [{'length': 1}, {'length': 0}, {'length': 3}]},
{'links': [{'length': 1}, {'length': 123}, {'length': 3}]},
{'links': [{'length': 1}, {'length': 12.3}, {'length': 3}]},
{'links': [{'length': 1}, {'length': -123}, {'length': 3}]},
{'links': [{'length': 1}, {'length': -12.3}, {'length': 3}]},
]
results_list = [
-1, -1, -1, -1, -1, -1, -1, 0, 123, -1, -123, -1, 0, 123, 12, -123, -12
]
unicode_items_list = [
{u'links': None}, {u'links': []}, {u'links': [{}]},
{u'links': [{u'length': 1}, {u'length': None}, {u'length': 3}]},
{u'links': [{u'length': 1}, {u'length': u''}, {u'length': 3}]},
{u'links': [{u'length': 1}, {u'length': u'0'}, {u'length': 3}]},
{u'links': [{u'length': 1}, {u'length': u'123'}, {u'length': 3}]},
{u'links': [{u'length': 1}, {u'length': u'12.3'}, {u'length': 3}]},
{u'links': [{u'length': 1}, {u'length': u'-123'}, {u'length': 3}]},
{u'links': [{u'length': 1}, {u'length': u'-12.3'}, {u'length': 3}]},
{u'links': [{u'length': 1}, {u'length': 0}, {u'length': 3}]},
{u'links': [{u'length': 1}, {u'length': 123}, {u'length': 3}]},
{u'links': [{u'length': 1}, {u'length': 12.3}, {u'length': 3}]},
{u'links': [{u'length': 1}, {u'length': -123}, {u'length': 3}]},
{u'links': [{u'length': 1}, {u'length': -12.3}, {u'length': 3}]},
]
unicode_results_list = [
-1, -1, -1, -1, -1, 0, 123, -1, -123, -1, 0, 123, 12, -123, -12
]
self.assertEqual(
len(items_list), len(results_list),
'Number of parameters (%d) and results (%d) does not match' % (len(items_list), len(results_list))
)
self.assertEqual(
len(unicode_items_list), len(unicode_results_list),
'Number of parameters (%d) and results (%d) does not match' % (
len(unicode_items_list), len(unicode_results_list))
)
for (index, item) in enumerate(items_list):
self.assertEqual(NZBProvider('Test Provider')._get_size(item), results_list[index])
for (index, item) in enumerate(unicode_items_list):
self.assertEqual(NZBProvider('Test Provider')._get_size(item), unicode_results_list[index])
def test__get_storage_dir(self):
"""
Test _get_storage_dir
"""
test_cases = [
None, 123, 12.3, '', os.path.join('some', 'path', 'to', 'folder')
]
for nzb_dir in test_cases:
sickbeard.NZB_DIR = nzb_dir
self.assertEqual(NZBProvider('Test Provider')._get_storage_dir(), nzb_dir)
if __name__ == '__main__':
print('=====> Testing %s' % __file__)
SUITE = unittest.TestLoader().loadTestsFromTestCase(NZBProviderTests)
unittest.TextTestRunner(verbosity=2).run(SUITE)
|
YathishReddy/Robust_ECN_Signalling_With_Nonces
|
refs/heads/master
|
src/tap-bridge/examples/tap-csma-virtual-machine.py
|
46
|
# -*- Mode: Python; -*-
#
# Copyright 2010 University of Washington
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation;
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
import sys
import ns.core
import ns.csma
import ns.internet
import ns.network
import ns.tap_bridge
def main(argv):
ns.core.CommandLine().Parse(argv)
#
# We are interacting with the outside, real, world. This means we have to
# interact in real-time and therefore we have to use the real-time simulator
# and take the time to calculate checksums.
#
ns.core.GlobalValue.Bind("SimulatorImplementationType", ns.core.StringValue("ns3::RealtimeSimulatorImpl"))
ns.core.GlobalValue.Bind("ChecksumEnabled", ns.core.BooleanValue("true"))
#
# Create two ghost nodes. The first will represent the virtual machine host
# on the left side of the network; and the second will represent the VM on
# the right side.
#
nodes = ns.network.NodeContainer()
nodes.Create (2)
#
# Use a CsmaHelper to get a CSMA channel created, and the needed net
# devices installed on both of the nodes. The data rate and delay for the
# channel can be set through the command-line parser.
#
csma = ns.csma.CsmaHelper()
devices = csma.Install(nodes)
#
# Use the TapBridgeHelper to connect to the pre-configured tap devices for
# the left side. We go with "UseLocal" mode since the wifi devices do not
# support promiscuous mode (because of their natures0. This is a special
# case mode that allows us to extend a linux bridge into ns-3 IFF we will
# only see traffic from one other device on that bridge. That is the case
# for this configuration.
#
tapBridge = ns.tap_bridge.TapBridgeHelper()
tapBridge.SetAttribute ("Mode", ns.core.StringValue ("UseLocal"))
tapBridge.SetAttribute ("DeviceName", ns.core.StringValue ("tap-left"))
tapBridge.Install (nodes.Get (0), devices.Get (0))
#
# Connect the right side tap to the right side wifi device on the right-side
# ghost node.
#
tapBridge.SetAttribute ("DeviceName", ns.core.StringValue ("tap-right"))
tapBridge.Install (nodes.Get (1), devices.Get (1))
#
# Run the simulation for ten minutes to give the user time to play around
#
ns.core.Simulator.Stop (ns.core.Seconds (600))
ns.core.Simulator.Run(signal_check_frequency = -1)
ns.core.Simulator.Destroy()
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
doduytrung/odoo-8.0
|
refs/heads/master
|
openerp/addons/test_access_rights/models.py
|
299
|
from openerp import fields, models
class SomeObj(models.Model):
_name = 'test_access_right.some_obj'
val = fields.Integer()
|
msebire/intellij-community
|
refs/heads/master
|
python/testData/refactoring/makeFunctionTopLevel/recursiveMethod.after.py
|
44
|
class C:
def __init__(self, foo):
self.foo = foo
def method(foo1, foo):
method(foo1, foo1)
method(C(1).foo, 2)
|
xzYue/odoo
|
refs/heads/8.0
|
openerp/service/model.py
|
62
|
# -*- coding: utf-8 -*-
from functools import wraps
import logging
from psycopg2 import IntegrityError, OperationalError, errorcodes
import random
import threading
import time
import openerp
from openerp.tools.translate import translate
from openerp.osv.orm import except_orm
from contextlib import contextmanager
import security
_logger = logging.getLogger(__name__)
PG_CONCURRENCY_ERRORS_TO_RETRY = (errorcodes.LOCK_NOT_AVAILABLE, errorcodes.SERIALIZATION_FAILURE, errorcodes.DEADLOCK_DETECTED)
MAX_TRIES_ON_CONCURRENCY_FAILURE = 5
def dispatch(method, params):
(db, uid, passwd ) = params[0:3]
# set uid tracker - cleaned up at the WSGI
# dispatching phase in openerp.service.wsgi_server.application
threading.current_thread().uid = uid
params = params[3:]
if method == 'obj_list':
raise NameError("obj_list has been discontinued via RPC as of 6.0, please query ir.model directly!")
if method not in ['execute', 'execute_kw', 'exec_workflow']:
raise NameError("Method not available %s" % method)
security.check(db,uid,passwd)
openerp.modules.registry.RegistryManager.check_registry_signaling(db)
fn = globals()[method]
res = fn(db, uid, *params)
openerp.modules.registry.RegistryManager.signal_caches_change(db)
return res
def check(f):
@wraps(f)
def wrapper(___dbname, *args, **kwargs):
""" Wraps around OSV functions and normalises a few exceptions
"""
dbname = ___dbname # NOTE: this forbid to use "___dbname" as arguments in http routes
def tr(src, ttype):
# We try to do the same as the _(), but without the frame
# inspection, since we aready are wrapping an osv function
# trans_obj = self.get('ir.translation') cannot work yet :(
ctx = {}
if not kwargs:
if args and isinstance(args[-1], dict):
ctx = args[-1]
elif isinstance(kwargs, dict):
if 'context' in kwargs:
ctx = kwargs['context']
elif 'kwargs' in kwargs:
# http entry points such as call_kw()
ctx = kwargs['kwargs'].get('context')
uid = 1
if args and isinstance(args[0], (long, int)):
uid = args[0]
lang = ctx and ctx.get('lang')
if not (lang or hasattr(src, '__call__')):
return src
# We open a *new* cursor here, one reason is that failed SQL
# queries (as in IntegrityError) will invalidate the current one.
cr = False
if hasattr(src, '__call__'):
# callable. We need to find the right parameters to call
# the orm._sql_message(self, cr, uid, ids, context) function,
# or we skip..
# our signature is f(registry, dbname [,uid, obj, method, args])
try:
if args and len(args) > 1:
# TODO self doesn't exist, but was already wrong before (it was not a registry but just the object_service.
obj = self.get(args[1])
if len(args) > 3 and isinstance(args[3], (long, int, list)):
ids = args[3]
else:
ids = []
cr = openerp.sql_db.db_connect(dbname).cursor()
return src(obj, cr, uid, ids, context=(ctx or {}))
except Exception:
pass
finally:
if cr: cr.close()
return False # so that the original SQL error will
# be returned, it is the best we have.
try:
cr = openerp.sql_db.db_connect(dbname).cursor()
res = translate(cr, name=False, source_type=ttype,
lang=lang, source=src)
if res:
return res
else:
return src
finally:
if cr: cr.close()
def _(src):
return tr(src, 'code')
tries = 0
while True:
try:
if openerp.registry(dbname)._init and not openerp.tools.config['test_enable']:
raise openerp.exceptions.Warning('Currently, this database is not fully loaded and can not be used.')
return f(dbname, *args, **kwargs)
except OperationalError, e:
# Automatically retry the typical transaction serialization errors
if e.pgcode not in PG_CONCURRENCY_ERRORS_TO_RETRY:
raise
if tries >= MAX_TRIES_ON_CONCURRENCY_FAILURE:
_logger.warning("%s, maximum number of tries reached" % errorcodes.lookup(e.pgcode))
raise
wait_time = random.uniform(0.0, 2 ** tries)
tries += 1
_logger.info("%s, retry %d/%d in %.04f sec..." % (errorcodes.lookup(e.pgcode), tries, MAX_TRIES_ON_CONCURRENCY_FAILURE, wait_time))
time.sleep(wait_time)
except IntegrityError, inst:
registry = openerp.registry(dbname)
for key in registry._sql_error.keys():
if key in inst[0]:
raise openerp.osv.orm.except_orm(_('Constraint Error'), tr(registry._sql_error[key], 'sql_constraint') or inst[0])
if inst.pgcode in (errorcodes.NOT_NULL_VIOLATION, errorcodes.FOREIGN_KEY_VIOLATION, errorcodes.RESTRICT_VIOLATION):
msg = _('The operation cannot be completed, probably due to the following:\n- deletion: you may be trying to delete a record while other records still reference it\n- creation/update: a mandatory field is not correctly set')
_logger.debug("IntegrityError", exc_info=True)
try:
errortxt = inst.pgerror.replace('«','"').replace('»','"')
if '"public".' in errortxt:
context = errortxt.split('"public".')[1]
model_name = table = context.split('"')[1]
else:
last_quote_end = errortxt.rfind('"')
last_quote_begin = errortxt.rfind('"', 0, last_quote_end)
model_name = table = errortxt[last_quote_begin+1:last_quote_end].strip()
model = table.replace("_",".")
if model in registry:
model_obj = registry[model]
model_name = model_obj._description or model_obj._name
msg += _('\n\n[object with reference: %s - %s]') % (model_name, model)
except Exception:
pass
raise openerp.osv.orm.except_orm(_('Integrity Error'), msg)
else:
raise openerp.osv.orm.except_orm(_('Integrity Error'), inst[0])
return wrapper
def execute_cr(cr, uid, obj, method, *args, **kw):
object = openerp.registry(cr.dbname).get(obj)
if object is None:
raise except_orm('Object Error', "Object %s doesn't exist" % obj)
return getattr(object, method)(cr, uid, *args, **kw)
def execute_kw(db, uid, obj, method, args, kw=None):
return execute(db, uid, obj, method, *args, **kw or {})
@check
def execute(db, uid, obj, method, *args, **kw):
threading.currentThread().dbname = db
with openerp.registry(db).cursor() as cr:
if method.startswith('_'):
raise except_orm('Access Denied', 'Private methods (such as %s) cannot be called remotely.' % (method,))
res = execute_cr(cr, uid, obj, method, *args, **kw)
if res is None:
_logger.warning('The method %s of the object %s can not return `None` !', method, obj)
return res
def exec_workflow_cr(cr, uid, obj, signal, *args):
res_id = args[0]
return execute_cr(cr, uid, obj, 'signal_workflow', [res_id], signal)[res_id]
@check
def exec_workflow(db, uid, obj, signal, *args):
with openerp.registry(db).cursor() as cr:
return exec_workflow_cr(cr, uid, obj, signal, *args)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
manti-by/Telonius
|
refs/heads/master
|
app/bearing/migrations/0025_bearingiso_value.py
|
2
|
import logging
from django.db import migrations, models
logger = logging.getLogger()
def update_value(apps, schema_editor):
BearingISO = apps.get_model('bearing', 'BearingISO')
for item in BearingISO.objects.all():
try:
item.value = item.name
item.save()
except Exception as e:
logger.error(e)
class Migration(migrations.Migration):
dependencies = [
('bearing', '0024_auto_20170726_1028'),
]
operations = [
migrations.AddField(
model_name='bearingiso',
name='value',
field=models.CharField(blank=True, max_length=255, null=True, verbose_name='Value'),
),
migrations.RunPython(update_value)
]
|
nozuono/calibre-webserver
|
refs/heads/master
|
src/calibre/gui2/store/config/search/search_widget.py
|
9
|
# -*- coding: utf-8 -*-
from __future__ import (unicode_literals, division, absolute_import, print_function)
__license__ = 'GPL 3'
__copyright__ = '2011, John Schember <[email protected]>'
__docformat__ = 'restructuredtext en'
from PyQt4.Qt import QWidget
from calibre.gui2 import JSONConfig
from calibre.gui2.store.config.search.search_widget_ui import Ui_Form
class StoreConfigWidget(QWidget, Ui_Form):
def __init__(self, config=None):
QWidget.__init__(self)
self.setupUi(self)
self.config = JSONConfig('store/search') if not config else config
# These default values should be the same as in
# calibre.gui2.store.search.search:SearchDialog.load_settings
# Seconds
self.opt_timeout.setValue(self.config.get('timeout', 75))
self.opt_hang_time.setValue(self.config.get('hang_time', 75))
self.opt_max_results.setValue(self.config.get('max_results', 10))
self.opt_open_external.setChecked(self.config.get('open_external', True))
# Number of threads to run for each type of operation
self.opt_search_thread_count.setValue(self.config.get('search_thread_count', 4))
self.opt_cache_thread_count.setValue(self.config.get('cache_thread_count', 2))
self.opt_cover_thread_count.setValue(self.config.get('cover_thread_count', 2))
self.opt_details_thread_count.setValue(self.config.get('details_thread_count', 4))
def save_settings(self):
self.config['timeout'] = self.opt_timeout.value()
self.config['hang_time'] = self.opt_hang_time.value()
self.config['max_results'] = self.opt_max_results.value()
self.config['open_external'] = self.opt_open_external.isChecked()
self.config['search_thread_count'] = self.opt_search_thread_count.value()
self.config['cache_thread_count'] = self.opt_cache_thread_count.value()
self.config['cover_thread_count'] = self.opt_cover_thread_count.value()
self.config['details_thread_count'] = self.opt_details_thread_count.value()
|
TuSimple/mxnet
|
refs/heads/master
|
example/rcnn/test.py
|
41
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import argparse
import mxnet as mx
from rcnn.logger import logger
from rcnn.config import config, default, generate_config
from rcnn.tools.test_rcnn import test_rcnn
def parse_args():
parser = argparse.ArgumentParser(description='Test a Faster R-CNN network')
# general
parser.add_argument('--network', help='network name', default=default.network, type=str)
parser.add_argument('--dataset', help='dataset name', default=default.dataset, type=str)
args, rest = parser.parse_known_args()
generate_config(args.network, args.dataset)
parser.add_argument('--image_set', help='image_set name', default=default.test_image_set, type=str)
parser.add_argument('--root_path', help='output data folder', default=default.root_path, type=str)
parser.add_argument('--dataset_path', help='dataset path', default=default.dataset_path, type=str)
# testing
parser.add_argument('--prefix', help='model to test with', default=default.e2e_prefix, type=str)
parser.add_argument('--epoch', help='model to test with', default=default.e2e_epoch, type=int)
parser.add_argument('--gpu', help='GPU device to test with', default=0, type=int)
# rcnn
parser.add_argument('--vis', help='turn on visualization', action='store_true')
parser.add_argument('--thresh', help='valid detection threshold', default=1e-3, type=float)
parser.add_argument('--shuffle', help='shuffle data on visualization', action='store_true')
parser.add_argument('--has_rpn', help='generate proposals on the fly', action='store_true', default=True)
parser.add_argument('--proposal', help='can be ss for selective search or rpn', default='rpn', type=str)
args = parser.parse_args()
return args
def main():
args = parse_args()
logger.info('Called with argument: %s' % args)
ctx = mx.gpu(args.gpu)
test_rcnn(args.network, args.dataset, args.image_set, args.root_path, args.dataset_path,
ctx, args.prefix, args.epoch,
args.vis, args.shuffle, args.has_rpn, args.proposal, args.thresh)
if __name__ == '__main__':
main()
|
LUTAN/tensorflow
|
refs/heads/master
|
tensorflow/tools/test/system_info.py
|
170
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Library for getting system information during TensorFlow tests."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.platform import app
from tensorflow.tools.test import system_info_lib
def main(unused_args):
config = system_info_lib.gather_machine_configuration()
print(config)
if __name__ == "__main__":
app.run()
|
ostree/plaso
|
refs/heads/master
|
tests/filters/test_lib.py
|
2
|
# -*- coding: utf-8 -*-
import unittest
from plaso.filters import interface
class TestEventFilter(interface.FilterObject):
"""Class to define a filter for a test event."""
def CompileFilter(self, unused_filter_expression):
"""Compiles the filter expression.
Args:
filter_expression: string that contains the filter expression.
Raises:
WrongPlugin: if the filter could not be compiled.
"""
pass
class FilterTestCase(unittest.TestCase):
"""The unit test case for an event filter."""
|
AlexPereverzyev/spidy
|
refs/heads/master
|
spidy/language/log_node.py
|
1
|
''' 'log' statement parsing and evaluation. '''
import syntax
import exp_parser
from spidy.common import *
from nodes import Node
class LogNode(Node):
'''
Logs string to file or *stdout*, ignores empty strings.
Example::
log 'loading next page'
.. note:: The statement logs messages as *INFO*.
'''
_string = None
def get_string(self):
return self._string
def set_string(self, string):
self._string = string
def evaluate(self):
log.debug(self._id, 'LogNode: evaluating')
s = self._string.evaluate()
if s != None and isinstance(s, basestring) and len(s) > 0:
log.info(self._id, s)
else:
log.warning(self._id, 'LogNode: attempt to write empty message to log, line {0}'.format(self._sline.number+1))
def parse(self, line_num):
log.debug(self._id, 'LogNode: parsing')
self._sline = self._context.get_script()[line_num]
line = self._sline.string
idx = line.index(syntax.OP_LOG) + len(syntax.OP_LOG)
self._string = exp_parser.parse_expression(self._context, line_num, line[idx:])
def __str__(self):
return syntax.OP_LOG + syntax.WHITESPACE + str(self._string)
|
marclaporte/clearskies_core
|
refs/heads/master
|
tools/gyp/test/intermediate_dir/gyptest-intermediate-dir.py
|
243
|
#!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that targets have independent INTERMEDIATE_DIRs.
"""
import TestGyp
test = TestGyp.TestGyp()
test.run_gyp('test.gyp', chdir='src')
test.build('test.gyp', 'target1', chdir='src')
# Check stuff exists.
intermediate_file1 = test.read('src/outfile.txt')
test.must_contain(intermediate_file1, 'target1')
shared_intermediate_file1 = test.read('src/shared_outfile.txt')
test.must_contain(shared_intermediate_file1, 'shared_target1')
test.run_gyp('test2.gyp', chdir='src')
# Force the shared intermediate to be rebuilt.
test.sleep()
test.touch('src/shared_infile.txt')
test.build('test2.gyp', 'target2', chdir='src')
# Check INTERMEDIATE_DIR file didn't get overwritten but SHARED_INTERMEDIATE_DIR
# file did.
intermediate_file2 = test.read('src/outfile.txt')
test.must_contain(intermediate_file1, 'target1')
test.must_contain(intermediate_file2, 'target2')
shared_intermediate_file2 = test.read('src/shared_outfile.txt')
if shared_intermediate_file1 != shared_intermediate_file2:
test.fail_test(shared_intermediate_file1 + ' != ' + shared_intermediate_file2)
test.must_contain(shared_intermediate_file1, 'shared_target2')
test.must_contain(shared_intermediate_file2, 'shared_target2')
test.pass_test()
|
square/pants
|
refs/heads/master
|
src/python/pants/backend/jvm/tasks/jvm_tool_task_mixin.py
|
2
|
# coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (nested_scopes, generators, division, absolute_import, with_statement,
print_function, unicode_literals)
from pants.backend.jvm.jvm_tool_bootstrapper import JvmToolBootstrapper
class JvmToolTaskMixin(object):
_jvm_tool_bootstrapper = None
@property
def jvm_tool_bootstrapper(self):
if self._jvm_tool_bootstrapper is None:
self._jvm_tool_bootstrapper = JvmToolBootstrapper(self.context.products)
return self._jvm_tool_bootstrapper
def register_jvm_tool(self, key, target_addrs, ini_section=None, ini_key=None):
self.jvm_tool_bootstrapper.register_jvm_tool(key, target_addrs,
ini_section=ini_section, ini_key=ini_key)
def register_jvm_tool_from_config(self, key, config, ini_section, ini_key, default):
self.jvm_tool_bootstrapper.register_jvm_tool_from_config(key, config,
ini_section=ini_section,
ini_key=ini_key,
default=default)
def tool_classpath(self, key, executor=None):
return self.jvm_tool_bootstrapper.get_jvm_tool_classpath(key, executor)
def lazy_tool_classpath(self, key, executor=None):
return self.jvm_tool_bootstrapper.get_lazy_jvm_tool_classpath(key, executor)
|
ntymtsiv/CloudFerry
|
refs/heads/master
|
cloudferrylib/os/actions/convert_image_to_file.py
|
5
|
from fabric.api import run, settings, env
from cloudferrylib.base.action import action
from cloudferrylib.utils import forward_agent
class ConvertImageToFile(action.Action):
def run(self, image_id=None, base_filename=None, **kwargs):
cfg = self.cloud.cloud_config.cloud
with settings(host_string=cfg.host):
with forward_agent(env.key_filename):
run(("glance --os-username=%s --os-password=%s --os-tenant-name=%s " +
"--os-auth-url=%s " +
"image-download %s > %s") %
(cfg.user,
cfg.password,
cfg.tenant,
cfg.auth_url,
image_id,
base_filename))
|
gopal1cloud/neutron
|
refs/heads/master
|
neutron/plugins/vmware/dhcpmeta_modes.py
|
5
|
# Copyright 2013 VMware, Inc.
#
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
from oslo.config import cfg
from neutron.api.rpc.agentnotifiers import dhcp_rpc_agent_api
from neutron.common import constants as const
from neutron.common import rpc as n_rpc
from neutron.common import topics
from neutron.db import agents_db
from neutron.openstack.common import importutils
from neutron.openstack.common import log as logging
from neutron.plugins.vmware.common import config
from neutron.plugins.vmware.common import exceptions as nsx_exc
from neutron.plugins.vmware.dhcp_meta import combined
from neutron.plugins.vmware.dhcp_meta import lsnmanager
from neutron.plugins.vmware.dhcp_meta import migration
from neutron.plugins.vmware.dhcp_meta import nsx as nsx_svc
from neutron.plugins.vmware.dhcp_meta import rpc as nsx_rpc
from neutron.plugins.vmware.extensions import lsn
LOG = logging.getLogger(__name__)
class DhcpMetadataAccess(object):
def setup_dhcpmeta_access(self):
"""Initialize support for DHCP and Metadata services."""
self._init_extensions()
if cfg.CONF.NSX.agent_mode == config.AgentModes.AGENT:
self._setup_rpc_dhcp_metadata()
mod = nsx_rpc
elif cfg.CONF.NSX.agent_mode == config.AgentModes.AGENTLESS:
self._setup_nsx_dhcp_metadata()
mod = nsx_svc
elif cfg.CONF.NSX.agent_mode == config.AgentModes.COMBINED:
notifier = self._setup_nsx_dhcp_metadata()
self._setup_rpc_dhcp_metadata(notifier=notifier)
mod = combined
else:
error = _("Invalid agent_mode: %s") % cfg.CONF.NSX.agent_mode
LOG.error(error)
raise nsx_exc.NsxPluginException(err_msg=error)
self.handle_network_dhcp_access_delegate = (
mod.handle_network_dhcp_access
)
self.handle_port_dhcp_access_delegate = (
mod.handle_port_dhcp_access
)
self.handle_port_metadata_access_delegate = (
mod.handle_port_metadata_access
)
self.handle_metadata_access_delegate = (
mod.handle_router_metadata_access
)
def _setup_rpc_dhcp_metadata(self, notifier=None):
self.topic = topics.PLUGIN
self.conn = n_rpc.create_connection(new=True)
self.endpoints = [nsx_rpc.NSXRpcCallbacks(),
agents_db.AgentExtRpcCallback()]
self.conn.create_consumer(self.topic, self.endpoints, fanout=False)
self.agent_notifiers[const.AGENT_TYPE_DHCP] = (
notifier or dhcp_rpc_agent_api.DhcpAgentNotifyAPI())
self.conn.consume_in_threads()
self.network_scheduler = importutils.import_object(
cfg.CONF.network_scheduler_driver
)
self.supported_extension_aliases.extend(
['agent', 'dhcp_agent_scheduler'])
def _setup_nsx_dhcp_metadata(self):
self._check_services_requirements()
nsx_svc.register_dhcp_opts(cfg)
nsx_svc.register_metadata_opts(cfg)
lsnmanager.register_lsn_opts(cfg)
lsn_manager = lsnmanager.PersistentLsnManager(self.safe_reference)
self.lsn_manager = lsn_manager
if cfg.CONF.NSX.agent_mode == config.AgentModes.AGENTLESS:
notifier = nsx_svc.DhcpAgentNotifyAPI(self.safe_reference,
lsn_manager)
self.agent_notifiers[const.AGENT_TYPE_DHCP] = notifier
# In agentless mode, ports whose owner is DHCP need to
# be special cased; so add it to the list of special
# owners list
if const.DEVICE_OWNER_DHCP not in self.port_special_owners:
self.port_special_owners.append(const.DEVICE_OWNER_DHCP)
elif cfg.CONF.NSX.agent_mode == config.AgentModes.COMBINED:
# This becomes ineffective, as all new networks creations
# are handled by Logical Services Nodes in NSX
cfg.CONF.set_override('network_auto_schedule', False)
LOG.warn(_('network_auto_schedule has been disabled'))
notifier = combined.DhcpAgentNotifyAPI(self.safe_reference,
lsn_manager)
self.supported_extension_aliases.append(lsn.EXT_ALIAS)
# Add the capability to migrate dhcp and metadata services over
self.migration_manager = (
migration.MigrationManager(
self.safe_reference, lsn_manager, notifier))
return notifier
def _init_extensions(self):
extensions = (lsn.EXT_ALIAS, 'agent', 'dhcp_agent_scheduler')
for ext in extensions:
if ext in self.supported_extension_aliases:
self.supported_extension_aliases.remove(ext)
def _check_services_requirements(self):
try:
error = None
nsx_svc.check_services_requirements(self.cluster)
except nsx_exc.InvalidVersion:
error = _("Unable to run Neutron with config option '%s', as NSX "
"does not support it") % cfg.CONF.NSX.agent_mode
except nsx_exc.ServiceClusterUnavailable:
error = _("Unmet dependency for config option "
"'%s'") % cfg.CONF.NSX.agent_mode
if error:
LOG.exception(error)
raise nsx_exc.NsxPluginException(err_msg=error)
def get_lsn(self, context, network_id, fields=None):
report = self.migration_manager.report(context, network_id)
return {'network': network_id, 'report': report}
def create_lsn(self, context, lsn):
network_id = lsn['lsn']['network']
subnet = self.migration_manager.validate(context, network_id)
subnet_id = None if not subnet else subnet['id']
self.migration_manager.migrate(context, network_id, subnet)
r = self.migration_manager.report(context, network_id, subnet_id)
return {'network': network_id, 'report': r}
def handle_network_dhcp_access(self, context, network, action):
self.handle_network_dhcp_access_delegate(self.safe_reference, context,
network, action)
def handle_port_dhcp_access(self, context, port_data, action):
self.handle_port_dhcp_access_delegate(self.safe_reference, context,
port_data, action)
def handle_port_metadata_access(self, context, port, is_delete=False):
self.handle_port_metadata_access_delegate(self.safe_reference, context,
port, is_delete)
def handle_router_metadata_access(self, context,
router_id, interface=None):
self.handle_metadata_access_delegate(self.safe_reference, context,
router_id, interface)
|
ds-hwang/chromium-crosswalk
|
refs/heads/master
|
third_party/closure_linter/closure_linter/not_strict_test.py
|
129
|
#!/usr/bin/env python
#
# Copyright 2011 The Closure Linter Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for gjslint --nostrict.
Tests errors that can be thrown by gjslint when not in strict mode.
"""
import os
import sys
import unittest
import gflags as flags
import unittest as googletest
from closure_linter import errors
from closure_linter import runner
from closure_linter.common import filetestcase
_RESOURCE_PREFIX = 'closure_linter/testdata'
flags.FLAGS.strict = False
flags.FLAGS.custom_jsdoc_tags = ('customtag', 'requires')
flags.FLAGS.closurized_namespaces = ('goog', 'dummy')
flags.FLAGS.limited_doc_files = ('externs.js', 'dummy.js',
'limited_doc_checks.js')
# List of files under testdata to test.
# We need to list files explicitly since pyglib can't list directories.
_TEST_FILES = [
'not_strict.js'
]
class GJsLintTestSuite(unittest.TestSuite):
"""Test suite to run a GJsLintTest for each of several files.
If sys.argv[1:] is non-empty, it is interpreted as a list of filenames in
testdata to test. Otherwise, _TEST_FILES is used.
"""
def __init__(self, tests=()):
unittest.TestSuite.__init__(self, tests)
argv = sys.argv and sys.argv[1:] or []
if argv:
test_files = argv
else:
test_files = _TEST_FILES
for test_file in test_files:
resource_path = os.path.join(_RESOURCE_PREFIX, test_file)
self.addTest(filetestcase.AnnotatedFileTestCase(resource_path,
runner.Run,
errors.ByName))
if __name__ == '__main__':
# Don't let main parse args; it happens in the TestSuite.
googletest.main(argv=sys.argv[0:1], defaultTest='GJsLintTestSuite')
|
tboyce021/home-assistant
|
refs/heads/dev
|
homeassistant/components/vera/climate.py
|
7
|
"""Support for Vera thermostats."""
from typing import Any, Callable, List, Optional
import pyvera as veraApi
from homeassistant.components.climate import (
DOMAIN as PLATFORM_DOMAIN,
ENTITY_ID_FORMAT,
ClimateEntity,
)
from homeassistant.components.climate.const import (
FAN_AUTO,
FAN_ON,
HVAC_MODE_COOL,
HVAC_MODE_HEAT,
HVAC_MODE_HEAT_COOL,
HVAC_MODE_OFF,
SUPPORT_FAN_MODE,
SUPPORT_TARGET_TEMPERATURE,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import ATTR_TEMPERATURE, TEMP_CELSIUS, TEMP_FAHRENHEIT
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity import Entity
from homeassistant.util import convert
from . import VeraDevice
from .common import ControllerData, get_controller_data
FAN_OPERATION_LIST = [FAN_ON, FAN_AUTO]
SUPPORT_FLAGS = SUPPORT_TARGET_TEMPERATURE | SUPPORT_FAN_MODE
SUPPORT_HVAC = [HVAC_MODE_COOL, HVAC_MODE_HEAT, HVAC_MODE_HEAT_COOL, HVAC_MODE_OFF]
async def async_setup_entry(
hass: HomeAssistant,
entry: ConfigEntry,
async_add_entities: Callable[[List[Entity], bool], None],
) -> None:
"""Set up the sensor config entry."""
controller_data = get_controller_data(hass, entry)
async_add_entities(
[
VeraThermostat(device, controller_data)
for device in controller_data.devices.get(PLATFORM_DOMAIN)
]
)
class VeraThermostat(VeraDevice[veraApi.VeraThermostat], ClimateEntity):
"""Representation of a Vera Thermostat."""
def __init__(
self, vera_device: veraApi.VeraThermostat, controller_data: ControllerData
):
"""Initialize the Vera device."""
VeraDevice.__init__(self, vera_device, controller_data)
self.entity_id = ENTITY_ID_FORMAT.format(self.vera_id)
@property
def supported_features(self) -> Optional[int]:
"""Return the list of supported features."""
return SUPPORT_FLAGS
@property
def hvac_mode(self) -> str:
"""Return hvac operation ie. heat, cool mode.
Need to be one of HVAC_MODE_*.
"""
mode = self.vera_device.get_hvac_mode()
if mode == "HeatOn":
return HVAC_MODE_HEAT
if mode == "CoolOn":
return HVAC_MODE_COOL
if mode == "AutoChangeOver":
return HVAC_MODE_HEAT_COOL
return HVAC_MODE_OFF
@property
def hvac_modes(self) -> List[str]:
"""Return the list of available hvac operation modes.
Need to be a subset of HVAC_MODES.
"""
return SUPPORT_HVAC
@property
def fan_mode(self) -> Optional[str]:
"""Return the fan setting."""
mode = self.vera_device.get_fan_mode()
if mode == "ContinuousOn":
return FAN_ON
return FAN_AUTO
@property
def fan_modes(self) -> Optional[List[str]]:
"""Return a list of available fan modes."""
return FAN_OPERATION_LIST
def set_fan_mode(self, fan_mode) -> None:
"""Set new target temperature."""
if fan_mode == FAN_ON:
self.vera_device.fan_on()
else:
self.vera_device.fan_auto()
self.schedule_update_ha_state()
@property
def current_power_w(self) -> Optional[float]:
"""Return the current power usage in W."""
power = self.vera_device.power
if power:
return convert(power, float, 0.0)
@property
def temperature_unit(self) -> str:
"""Return the unit of measurement."""
vera_temp_units = self.vera_device.vera_controller.temperature_units
if vera_temp_units == "F":
return TEMP_FAHRENHEIT
return TEMP_CELSIUS
@property
def current_temperature(self) -> Optional[float]:
"""Return the current temperature."""
return self.vera_device.get_current_temperature()
@property
def operation(self) -> str:
"""Return current operation ie. heat, cool, idle."""
return self.vera_device.get_hvac_mode()
@property
def target_temperature(self) -> Optional[float]:
"""Return the temperature we try to reach."""
return self.vera_device.get_current_goal_temperature()
def set_temperature(self, **kwargs: Any) -> None:
"""Set new target temperatures."""
if kwargs.get(ATTR_TEMPERATURE) is not None:
self.vera_device.set_temperature(kwargs.get(ATTR_TEMPERATURE))
self.schedule_update_ha_state()
def set_hvac_mode(self, hvac_mode) -> None:
"""Set new target hvac mode."""
if hvac_mode == HVAC_MODE_OFF:
self.vera_device.turn_off()
elif hvac_mode == HVAC_MODE_HEAT_COOL:
self.vera_device.turn_auto_on()
elif hvac_mode == HVAC_MODE_COOL:
self.vera_device.turn_cool_on()
elif hvac_mode == HVAC_MODE_HEAT:
self.vera_device.turn_heat_on()
self.schedule_update_ha_state()
|
doismellburning/edx-platform
|
refs/heads/master
|
openedx/core/djangoapps/content/__init__.py
|
118
|
"""
Setup the signals on startup.
"""
import openedx.core.djangoapps.content.course_structures.signals
|
facebook/fbthrift
|
refs/heads/master
|
thrift/lib/py/server/__init__.py
|
1
|
# Copyright (c) Facebook, Inc. and its affiliates.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
__all__ = ['TServer', 'TAsyncioServer']
|
ahamilton55/ansible
|
refs/heads/devel
|
lib/ansible/modules/cloud/amazon/ec2_asg_facts.py
|
44
|
#!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: ec2_asg_facts
short_description: Gather facts about ec2 Auto Scaling Groups (ASGs) in AWS
description:
- Gather facts about ec2 Auto Scaling Groups (ASGs) in AWS
version_added: "2.2"
author: "Rob White (@wimnat)"
options:
name:
description:
- The prefix or name of the auto scaling group(s) you are searching for.
- "Note: This is a regular expression match with implicit '^' (beginning of string). Append '$' for a complete name match."
required: false
tags:
description:
- >
A dictionary/hash of tags in the format { tag1_name: 'tag1_value', tag2_name: 'tag2_value' } to match against the auto scaling
group(s) you are searching for.
required: false
extends_documentation_fragment:
- aws
- ec2
'''
EXAMPLES = '''
# Note: These examples do not set authentication details, see the AWS Guide for details.
# Find all groups
- ec2_asg_facts:
register: asgs
# Find a group with matching name/prefix
- ec2_asg_facts:
name: public-webserver-asg
register: asgs
# Find a group with matching tags
- ec2_asg_facts:
tags:
project: webapp
env: production
register: asgs
# Find a group with matching name/prefix and tags
- ec2_asg_facts:
name: myproject
tags:
env: production
register: asgs
# Fail if no groups are found
- ec2_asg_facts:
name: public-webserver-asg
register: asgs
failed_when: "{{ asgs.results | length == 0 }}"
# Fail if more than 1 group is found
- ec2_asg_facts:
name: public-webserver-asg
register: asgs
failed_when: "{{ asgs.results | length > 1 }}"
'''
RETURN = '''
---
auto_scaling_group_arn:
description: The Amazon Resource Name of the ASG
returned: success
type: string
sample: "arn:aws:autoscaling:us-west-2:1234567890:autoScalingGroup:10787c52-0bcb-427d-82ba-c8e4b008ed2e:autoScalingGroupName/public-webapp-production-1"
auto_scaling_group_name:
description: Name of autoscaling group
returned: success
type: str
sample: "public-webapp-production-1"
availability_zones:
description: List of Availability Zones that are enabled for this ASG.
returned: success
type: list
sample: ["us-west-2a", "us-west-2b", "us-west-2a"]
created_time:
description: The date and time this ASG was created, in ISO 8601 format.
returned: success
type: string
sample: "2015-11-25T00:05:36.309Z"
default_cooldown:
description: The default cooldown time in seconds.
returned: success
type: int
sample: 300
desired_capacity:
description: The number of EC2 instances that should be running in this group.
returned: success
type: int
sample: 3
health_check_period:
description: Length of time in seconds after a new EC2 instance comes into service that Auto Scaling starts checking its health.
returned: success
type: int
sample: 30
health_check_type:
description: The service you want the health status from, one of "EC2" or "ELB".
returned: success
type: str
sample: "ELB"
instances:
description: List of EC2 instances and their status as it relates to the ASG.
returned: success
type: list
sample: [
{
"availability_zone": "us-west-2a",
"health_status": "Healthy",
"instance_id": "i-es22ad25",
"launch_configuration_name": "public-webapp-production-1",
"lifecycle_state": "InService",
"protected_from_scale_in": "false"
}
]
launch_configuration_name:
description: Name of launch configuration associated with the ASG.
returned: success
type: str
sample: "public-webapp-production-1"
load_balancer_names:
description: List of load balancers names attached to the ASG.
returned: success
type: list
sample: ["elb-webapp-prod"]
max_size:
description: Maximum size of group
returned: success
type: int
sample: 3
min_size:
description: Minimum size of group
returned: success
type: int
sample: 1
new_instances_protected_from_scale_in:
description: Whether or not new instances a protected from automatic scaling in.
returned: success
type: boolean
sample: "false"
placement_group:
description: Placement group into which instances are launched, if any.
returned: success
type: str
sample: None
status:
description: The current state of the group when DeleteAutoScalingGroup is in progress.
returned: success
type: str
sample: None
tags:
description: List of tags for the ASG, and whether or not each tag propagates to instances at launch.
returned: success
type: list
sample: [
{
"key": "Name",
"value": "public-webapp-production-1",
"resource_id": "public-webapp-production-1",
"resource_type": "auto-scaling-group",
"propagate_at_launch": "true"
},
{
"key": "env",
"value": "production",
"resource_id": "public-webapp-production-1",
"resource_type": "auto-scaling-group",
"propagate_at_launch": "true"
}
]
termination_policies:
description: A list of termination policies for the group.
returned: success
type: str
sample: ["Default"]
'''
try:
import boto3
from botocore.exceptions import ClientError
HAS_BOTO3 = True
except ImportError:
HAS_BOTO3 = False
def match_asg_tags(tags_to_match, asg):
for key, value in tags_to_match.items():
for tag in asg['Tags']:
if key == tag['Key'] and value == tag['Value']:
break
else:
return False
return True
def find_asgs(conn, module, name=None, tags=None):
"""
Args:
conn (boto3.AutoScaling.Client): Valid Boto3 ASG client.
name (str): Optional name of the ASG you are looking for.
tags (dict): Optional dictionary of tags and values to search for.
Basic Usage:
>>> name = 'public-webapp-production'
>>> tags = { 'env': 'production' }
>>> conn = boto3.client('autoscaling', region_name='us-west-2')
>>> results = find_asgs(name, conn)
Returns:
List
[
{
"auto_scaling_group_arn": (
"arn:aws:autoscaling:us-west-2:275977225706:autoScalingGroup:58abc686-9783-4528-b338-3ad6f1cbbbaf:"
"autoScalingGroupName/public-webapp-production"
),
"auto_scaling_group_name": "public-webapp-production",
"availability_zones": ["us-west-2c", "us-west-2b", "us-west-2a"],
"created_time": "2016-02-02T23:28:42.481000+00:00",
"default_cooldown": 300,
"desired_capacity": 2,
"enabled_metrics": [],
"health_check_grace_period": 300,
"health_check_type": "ELB",
"instances":
[
{
"availability_zone": "us-west-2c",
"health_status": "Healthy",
"instance_id": "i-047a12cb",
"launch_configuration_name": "public-webapp-production-1",
"lifecycle_state": "InService",
"protected_from_scale_in": false
},
{
"availability_zone": "us-west-2a",
"health_status": "Healthy",
"instance_id": "i-7a29df2c",
"launch_configuration_name": "public-webapp-production-1",
"lifecycle_state": "InService",
"protected_from_scale_in": false
}
],
"launch_configuration_name": "public-webapp-production-1",
"load_balancer_names": ["public-webapp-production-lb"],
"max_size": 4,
"min_size": 2,
"new_instances_protected_from_scale_in": false,
"placement_group": None,
"status": None,
"suspended_processes": [],
"tags":
[
{
"key": "Name",
"propagate_at_launch": true,
"resource_id": "public-webapp-production",
"resource_type": "auto-scaling-group",
"value": "public-webapp-production"
},
{
"key": "env",
"propagate_at_launch": true,
"resource_id": "public-webapp-production",
"resource_type": "auto-scaling-group",
"value": "production"
}
],
"termination_policies":
[
"Default"
],
"vpc_zone_identifier":
[
"subnet-a1b1c1d1",
"subnet-a2b2c2d2",
"subnet-a3b3c3d3"
]
}
]
"""
try:
asgs_paginator = conn.get_paginator('describe_auto_scaling_groups')
asgs = asgs_paginator.paginate().build_full_result()
except ClientError as e:
module.fail_json(msg=e.message, **camel_dict_to_snake_dict(e.response))
matched_asgs = []
if name is not None:
# if the user didn't specify a name
name_prog = re.compile(r'^' + name)
for asg in asgs['AutoScalingGroups']:
if name:
matched_name = name_prog.search(asg['AutoScalingGroupName'])
else:
matched_name = True
if tags:
matched_tags = match_asg_tags(tags, asg)
else:
matched_tags = True
if matched_name and matched_tags:
matched_asgs.append(camel_dict_to_snake_dict(asg))
return matched_asgs
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(
dict(
name=dict(type='str'),
tags=dict(type='dict'),
)
)
module = AnsibleModule(argument_spec=argument_spec)
if not HAS_BOTO3:
module.fail_json(msg='boto3 required for this module')
asg_name = module.params.get('name')
asg_tags = module.params.get('tags')
try:
region, ec2_url, aws_connect_kwargs = get_aws_connection_info(module, boto3=True)
autoscaling = boto3_conn(module, conn_type='client', resource='autoscaling', region=region, endpoint=ec2_url, **aws_connect_kwargs)
except ClientError as e:
module.fail_json(msg=e.message, **camel_dict_to_snake_dict(e.response))
results = find_asgs(autoscaling, module, name=asg_name, tags=asg_tags)
module.exit_json(results=results)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.ec2 import *
if __name__ == '__main__':
main()
|
bottompawn/kbengine
|
refs/heads/master
|
kbe/src/lib/python/Lib/curses/has_key.py
|
195
|
#
# Emulation of has_key() function for platforms that don't use ncurses
#
import _curses
# Table mapping curses keys to the terminfo capability name
_capability_names = {
_curses.KEY_A1: 'ka1',
_curses.KEY_A3: 'ka3',
_curses.KEY_B2: 'kb2',
_curses.KEY_BACKSPACE: 'kbs',
_curses.KEY_BEG: 'kbeg',
_curses.KEY_BTAB: 'kcbt',
_curses.KEY_C1: 'kc1',
_curses.KEY_C3: 'kc3',
_curses.KEY_CANCEL: 'kcan',
_curses.KEY_CATAB: 'ktbc',
_curses.KEY_CLEAR: 'kclr',
_curses.KEY_CLOSE: 'kclo',
_curses.KEY_COMMAND: 'kcmd',
_curses.KEY_COPY: 'kcpy',
_curses.KEY_CREATE: 'kcrt',
_curses.KEY_CTAB: 'kctab',
_curses.KEY_DC: 'kdch1',
_curses.KEY_DL: 'kdl1',
_curses.KEY_DOWN: 'kcud1',
_curses.KEY_EIC: 'krmir',
_curses.KEY_END: 'kend',
_curses.KEY_ENTER: 'kent',
_curses.KEY_EOL: 'kel',
_curses.KEY_EOS: 'ked',
_curses.KEY_EXIT: 'kext',
_curses.KEY_F0: 'kf0',
_curses.KEY_F1: 'kf1',
_curses.KEY_F10: 'kf10',
_curses.KEY_F11: 'kf11',
_curses.KEY_F12: 'kf12',
_curses.KEY_F13: 'kf13',
_curses.KEY_F14: 'kf14',
_curses.KEY_F15: 'kf15',
_curses.KEY_F16: 'kf16',
_curses.KEY_F17: 'kf17',
_curses.KEY_F18: 'kf18',
_curses.KEY_F19: 'kf19',
_curses.KEY_F2: 'kf2',
_curses.KEY_F20: 'kf20',
_curses.KEY_F21: 'kf21',
_curses.KEY_F22: 'kf22',
_curses.KEY_F23: 'kf23',
_curses.KEY_F24: 'kf24',
_curses.KEY_F25: 'kf25',
_curses.KEY_F26: 'kf26',
_curses.KEY_F27: 'kf27',
_curses.KEY_F28: 'kf28',
_curses.KEY_F29: 'kf29',
_curses.KEY_F3: 'kf3',
_curses.KEY_F30: 'kf30',
_curses.KEY_F31: 'kf31',
_curses.KEY_F32: 'kf32',
_curses.KEY_F33: 'kf33',
_curses.KEY_F34: 'kf34',
_curses.KEY_F35: 'kf35',
_curses.KEY_F36: 'kf36',
_curses.KEY_F37: 'kf37',
_curses.KEY_F38: 'kf38',
_curses.KEY_F39: 'kf39',
_curses.KEY_F4: 'kf4',
_curses.KEY_F40: 'kf40',
_curses.KEY_F41: 'kf41',
_curses.KEY_F42: 'kf42',
_curses.KEY_F43: 'kf43',
_curses.KEY_F44: 'kf44',
_curses.KEY_F45: 'kf45',
_curses.KEY_F46: 'kf46',
_curses.KEY_F47: 'kf47',
_curses.KEY_F48: 'kf48',
_curses.KEY_F49: 'kf49',
_curses.KEY_F5: 'kf5',
_curses.KEY_F50: 'kf50',
_curses.KEY_F51: 'kf51',
_curses.KEY_F52: 'kf52',
_curses.KEY_F53: 'kf53',
_curses.KEY_F54: 'kf54',
_curses.KEY_F55: 'kf55',
_curses.KEY_F56: 'kf56',
_curses.KEY_F57: 'kf57',
_curses.KEY_F58: 'kf58',
_curses.KEY_F59: 'kf59',
_curses.KEY_F6: 'kf6',
_curses.KEY_F60: 'kf60',
_curses.KEY_F61: 'kf61',
_curses.KEY_F62: 'kf62',
_curses.KEY_F63: 'kf63',
_curses.KEY_F7: 'kf7',
_curses.KEY_F8: 'kf8',
_curses.KEY_F9: 'kf9',
_curses.KEY_FIND: 'kfnd',
_curses.KEY_HELP: 'khlp',
_curses.KEY_HOME: 'khome',
_curses.KEY_IC: 'kich1',
_curses.KEY_IL: 'kil1',
_curses.KEY_LEFT: 'kcub1',
_curses.KEY_LL: 'kll',
_curses.KEY_MARK: 'kmrk',
_curses.KEY_MESSAGE: 'kmsg',
_curses.KEY_MOVE: 'kmov',
_curses.KEY_NEXT: 'knxt',
_curses.KEY_NPAGE: 'knp',
_curses.KEY_OPEN: 'kopn',
_curses.KEY_OPTIONS: 'kopt',
_curses.KEY_PPAGE: 'kpp',
_curses.KEY_PREVIOUS: 'kprv',
_curses.KEY_PRINT: 'kprt',
_curses.KEY_REDO: 'krdo',
_curses.KEY_REFERENCE: 'kref',
_curses.KEY_REFRESH: 'krfr',
_curses.KEY_REPLACE: 'krpl',
_curses.KEY_RESTART: 'krst',
_curses.KEY_RESUME: 'kres',
_curses.KEY_RIGHT: 'kcuf1',
_curses.KEY_SAVE: 'ksav',
_curses.KEY_SBEG: 'kBEG',
_curses.KEY_SCANCEL: 'kCAN',
_curses.KEY_SCOMMAND: 'kCMD',
_curses.KEY_SCOPY: 'kCPY',
_curses.KEY_SCREATE: 'kCRT',
_curses.KEY_SDC: 'kDC',
_curses.KEY_SDL: 'kDL',
_curses.KEY_SELECT: 'kslt',
_curses.KEY_SEND: 'kEND',
_curses.KEY_SEOL: 'kEOL',
_curses.KEY_SEXIT: 'kEXT',
_curses.KEY_SF: 'kind',
_curses.KEY_SFIND: 'kFND',
_curses.KEY_SHELP: 'kHLP',
_curses.KEY_SHOME: 'kHOM',
_curses.KEY_SIC: 'kIC',
_curses.KEY_SLEFT: 'kLFT',
_curses.KEY_SMESSAGE: 'kMSG',
_curses.KEY_SMOVE: 'kMOV',
_curses.KEY_SNEXT: 'kNXT',
_curses.KEY_SOPTIONS: 'kOPT',
_curses.KEY_SPREVIOUS: 'kPRV',
_curses.KEY_SPRINT: 'kPRT',
_curses.KEY_SR: 'kri',
_curses.KEY_SREDO: 'kRDO',
_curses.KEY_SREPLACE: 'kRPL',
_curses.KEY_SRIGHT: 'kRIT',
_curses.KEY_SRSUME: 'kRES',
_curses.KEY_SSAVE: 'kSAV',
_curses.KEY_SSUSPEND: 'kSPD',
_curses.KEY_STAB: 'khts',
_curses.KEY_SUNDO: 'kUND',
_curses.KEY_SUSPEND: 'kspd',
_curses.KEY_UNDO: 'kund',
_curses.KEY_UP: 'kcuu1'
}
def has_key(ch):
if isinstance(ch, str):
ch = ord(ch)
# Figure out the correct capability name for the keycode.
capability_name = _capability_names.get(ch)
if capability_name is None:
return False
#Check the current terminal description for that capability;
#if present, return true, else return false.
if _curses.tigetstr( capability_name ):
return True
else:
return False
if __name__ == '__main__':
# Compare the output of this implementation and the ncurses has_key,
# on platforms where has_key is already available
try:
L = []
_curses.initscr()
for key in _capability_names.keys():
system = _curses.has_key(key)
python = has_key(key)
if system != python:
L.append( 'Mismatch for key %s, system=%i, Python=%i'
% (_curses.keyname( key ), system, python) )
finally:
_curses.endwin()
for i in L: print(i)
|
etkirsch/legends-of-erukar
|
refs/heads/master
|
erukar/system/engine/overland/Sector.py
|
1
|
from erukar.system.engine import EnvironmentProfile, ErukarObject
from .Location import Location
import operator, re
class Sector(ErukarObject):
def __init__(self, region, economic_seed_fn=None):
self.coordinates = ""
self.environment_profile = EnvironmentProfile()
self.region = region
self.adjacent_sectors = set()
self.locations = set()
self.name = 'Random Sector'
self.use_day_night_cycle = False
self.economic_profile = region.economic_profile\
if not economic_seed_fn\
else economic_seed_fn(self)
def alias(self):
return self.name
def set_coordinates(self, new_coords):
self.coordinates = Sector.autocorrect(new_coords)
def get_coordinates(self):
return self.coordinates
def adjacent(self):
for sector in self.adjacent_sectors:
yield sector
def neighbors(self):
return list(self.adjacent())
def distance_to(self, sector):
'''The sum of all coordinates adds up to zero. By taking the absolute
value and summing them, you get twice the total distance between two coords.'''
return -1
def location(self):
if len(self.locations) > 0:
return list(self.locations)[0]
new_loc = Location(self)
new_loc.name = self.name
new_loc.environment_profile = self.environment_profile
self.locations.add(new_loc)
return new_loc
def is_overland(coords):
if coords is not str: coords = str(coords).replace(' ','')
return re.match(r'\(([-+]*\d+),([-+]*\d+),([-+]*\d+)\)', coords) is not None
def autocorrect(coord_string):
if Sector.is_overland(coord_string):
return Sector.to_overland(coord_string)
return coord_string
def to_overland(coords):
out = coords
if isinstance(coords,str):
out = coords\
.strip()\
.replace(' ','')\
.replace('(','')\
.replace(')','')\
.split(',')
elif not isinstance(coords, tuple) and not isinstance(coords, list):
raise ValueError('Malformed Overland Coordinates: Unable to parse a non-str non-list non-tuple input (received {})'.format(type(coords)))
if len(out) != 3:
raise ValueError('Malformed Overland Coordinates String: Received "{}", which returned "{}"'.format(coords, out))
return tuple(int(x) for x in out)
def supply_and_demand_scalar(self, good):
return self.economic_profile.supply_and_demand_scalar(good)
def register_transaction(self, good, at_price, supply_shift):
self.economic_profile.register_transaction(good, at_price, supply_shift)
|
GregMilway/Exercism
|
refs/heads/master
|
python/binary/binary_test.py
|
4
|
"""Tests for the binary exercise
Implementation note:
If the argument to parse_binary isn't a valid binary number the
function should raise a ValueError with a meaningful error message.
"""
import unittest
from binary import parse_binary
class BinaryTests(unittest.TestCase):
def test_binary_1_is_decimal_1(self):
self.assertEqual(1, parse_binary("1"))
def test_binary_10_is_decimal_2(self):
self.assertEqual(2, parse_binary("10"))
def test_binary_11_is_decimal_3(self):
self.assertEqual(3, parse_binary("11"))
def test_binary_100_is_decimal_4(self):
self.assertEqual(4, parse_binary("100"))
def test_binary_1001_is_decimal_9(self):
self.assertEqual(9, parse_binary("1001"))
def test_binary_11010_is_decimal_26(self):
self.assertEqual(26, parse_binary("11010"))
def test_binary_10001101000_is_decimal_1128(self):
self.assertEqual(1128, parse_binary("10001101000"))
def test_invalid_binary_text_only(self):
self.assertRaises(ValueError, parse_binary, "carrot")
def test_invalid_binary_number_not_base2(self):
self.assertRaises(ValueError, parse_binary, "102011")
def test_invalid_binary_numbers_with_text(self):
self.assertRaises(ValueError, parse_binary, "10nope")
def test_invalid_binary_text_with_numbers(self):
self.assertRaises(ValueError, parse_binary, "nope10")
if __name__ == '__main__':
unittest.main()
|
aipescience/django-daiquiri
|
refs/heads/master
|
daiquiri/stats/migrations/0003_data_migration.py
|
1
|
from __future__ import unicode_literals
from django.db import migrations
def run_data_migration(apps, schema_editor):
QueryJob = apps.get_model('daiquiri_query', 'QueryJob')
Record = apps.get_model('daiquiri_stats', 'Record')
for record in Record.objects.all():
if record.resource_type == 'QUERY_JOB':
try:
job = QueryJob.objects.get(pk=record.resource['job_id'])
record.resource['query'] = job.query
record.resource['query_language'] = job.query_language
except QueryJob.DoesNotExist:
record.resource['job_id'] = None
record.resource['query'] = None
record.resource['query_language'] = None
record.resource_type = 'QUERY'
record.save()
class Migration(migrations.Migration):
dependencies = [
('daiquiri_stats', '0002_data_migration'),
('daiquiri_query', '0013_refactoring'),
]
operations = [
migrations.RunPython(run_data_migration)
]
|
julien78910/CouchPotatoServer
|
refs/heads/develop
|
libs/requests/packages/charade/langhebrewmodel.py
|
2762
|
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Simon Montagu
# Portions created by the Initial Developer are Copyright (C) 2005
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
# Shy Shalom - original C code
# Shoshannah Forbes - original C code (?)
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
# 255: Control characters that usually does not exist in any text
# 254: Carriage/Return
# 253: symbol (punctuation) that does not belong to word
# 252: 0 - 9
# Windows-1255 language model
# Character Mapping Table:
win1255_CharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253, 69, 91, 79, 80, 92, 89, 97, 90, 68,111,112, 82, 73, 95, 85, # 40
78,121, 86, 71, 67,102,107, 84,114,103,115,253,253,253,253,253, # 50
253, 50, 74, 60, 61, 42, 76, 70, 64, 53,105, 93, 56, 65, 54, 49, # 60
66,110, 51, 43, 44, 63, 81, 77, 98, 75,108,253,253,253,253,253, # 70
124,202,203,204,205, 40, 58,206,207,208,209,210,211,212,213,214,
215, 83, 52, 47, 46, 72, 32, 94,216,113,217,109,218,219,220,221,
34,116,222,118,100,223,224,117,119,104,125,225,226, 87, 99,227,
106,122,123,228, 55,229,230,101,231,232,120,233, 48, 39, 57,234,
30, 59, 41, 88, 33, 37, 36, 31, 29, 35,235, 62, 28,236,126,237,
238, 38, 45,239,240,241,242,243,127,244,245,246,247,248,249,250,
9, 8, 20, 16, 3, 2, 24, 14, 22, 1, 25, 15, 4, 11, 6, 23,
12, 19, 13, 26, 18, 27, 21, 17, 7, 10, 5,251,252,128, 96,253,
)
# Model Table:
# total sequences: 100%
# first 512 sequences: 98.4004%
# first 1024 sequences: 1.5981%
# rest sequences: 0.087%
# negative sequences: 0.0015%
HebrewLangModel = (
0,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,2,3,2,1,2,0,1,0,0,
3,0,3,1,0,0,1,3,2,0,1,1,2,0,2,2,2,1,1,1,1,2,1,1,1,2,0,0,2,2,0,1,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,
1,2,1,2,1,2,0,0,2,0,0,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,
1,2,1,3,1,1,0,0,2,0,0,0,1,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,0,1,2,2,1,3,
1,2,1,1,2,2,0,0,2,2,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,1,0,1,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,2,2,2,2,3,2,
1,2,1,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,2,3,2,2,3,2,2,2,1,2,2,2,2,
1,2,1,1,2,2,0,1,2,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,0,2,2,2,2,2,
0,2,0,2,2,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,0,2,2,2,
0,2,1,2,2,2,0,0,2,1,0,0,0,0,1,0,1,0,0,0,0,0,0,2,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,2,1,2,3,2,2,2,
1,2,1,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,1,0,
3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,1,0,2,0,2,
0,2,1,2,2,2,0,0,1,2,0,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,2,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,2,3,2,2,3,2,1,2,1,1,1,
0,1,1,1,1,1,3,0,1,0,0,0,0,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,1,1,0,0,1,0,0,1,0,0,0,0,
0,0,1,0,0,0,0,0,2,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,2,2,2,
0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,2,3,3,3,2,1,2,3,3,2,3,3,3,3,2,3,2,1,2,0,2,1,2,
0,2,0,2,2,2,0,0,1,2,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,
3,3,3,3,3,3,3,3,3,2,3,3,3,1,2,2,3,3,2,3,2,3,2,2,3,1,2,2,0,2,2,2,
0,2,1,2,2,2,0,0,1,2,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,1,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,2,2,2,3,3,3,3,1,3,2,2,2,
0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,3,3,3,2,3,2,2,2,1,2,2,0,2,2,2,2,
0,2,0,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,1,3,2,3,3,2,3,3,2,2,1,2,2,2,2,2,2,
0,2,1,2,1,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,2,3,2,3,3,2,3,3,3,3,2,3,2,3,3,3,3,3,2,2,2,2,2,2,2,1,
0,2,0,1,2,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,2,1,2,3,3,3,3,3,3,3,2,3,2,3,2,1,2,3,0,2,1,2,2,
0,2,1,1,2,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,2,0,
3,3,3,3,3,3,3,3,3,2,3,3,3,3,2,1,3,1,2,2,2,1,2,3,3,1,2,1,2,2,2,2,
0,1,1,1,1,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,0,2,3,3,3,1,3,3,3,1,2,2,2,2,1,1,2,2,2,2,2,2,
0,2,0,1,1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,2,3,3,3,2,2,3,3,3,2,1,2,3,2,3,2,2,2,2,1,2,1,1,1,2,2,
0,2,1,1,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,1,0,0,0,0,0,
1,0,1,0,0,0,0,0,2,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,2,3,3,2,3,1,2,2,2,2,3,2,3,1,1,2,2,1,2,2,1,1,0,2,2,2,2,
0,1,0,1,2,2,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
3,0,0,1,1,0,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,2,2,0,
0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,1,0,1,0,1,1,0,1,1,0,0,0,1,1,0,1,1,1,0,0,0,0,0,0,1,0,0,0,0,0,
0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,0,0,1,1,0,1,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
3,2,2,1,2,2,2,2,2,2,2,1,2,2,1,2,2,1,1,1,1,1,1,1,1,2,1,1,0,3,3,3,
0,3,0,2,2,2,2,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
2,2,2,3,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,1,2,2,1,2,2,2,1,1,1,2,0,1,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,2,2,2,2,2,2,2,2,2,2,1,2,2,2,2,2,2,2,2,2,2,2,0,2,2,0,0,0,0,0,0,
0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,3,1,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,1,2,1,0,2,1,0,
0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,
0,3,1,1,2,2,2,2,2,1,2,2,2,1,1,2,2,2,2,2,2,2,1,2,2,1,0,1,1,1,1,0,
0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,2,1,1,1,1,2,1,1,2,1,0,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,0,0,0,0,
0,0,2,0,0,0,0,0,0,0,0,1,1,0,0,0,0,1,1,0,0,1,1,0,0,0,0,0,0,1,0,0,
2,1,1,2,2,2,2,2,2,2,2,2,2,2,1,2,2,2,2,2,1,2,1,2,1,1,1,1,0,0,0,0,
0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,2,1,2,2,2,2,2,2,2,2,2,2,1,2,1,2,1,1,2,1,1,1,2,1,2,1,2,0,1,0,1,
0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,3,1,2,2,2,1,2,2,2,2,2,2,2,2,1,2,1,1,1,1,1,1,2,1,2,1,1,0,1,0,1,
0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,1,2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2,
0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,1,1,1,1,1,1,1,0,1,1,0,1,0,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,2,0,1,1,1,0,1,0,0,0,1,1,0,1,1,0,0,0,0,0,1,1,0,0,
0,1,1,1,2,1,2,2,2,0,2,0,2,0,1,1,2,1,1,1,1,2,1,0,1,1,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,1,0,0,0,0,0,1,0,1,2,2,0,1,0,0,1,1,2,2,1,2,0,2,0,0,0,1,2,0,1,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,2,0,2,1,2,0,2,0,0,1,1,1,1,1,1,0,1,0,0,0,1,0,0,1,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,1,0,0,0,0,0,1,0,2,1,1,0,1,0,0,1,1,1,2,2,0,0,1,0,0,0,1,0,0,1,
1,1,2,1,0,1,1,1,0,1,0,1,1,1,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,2,2,1,
0,2,0,1,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,1,0,0,1,0,1,1,1,1,0,0,0,0,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,1,1,1,1,1,1,1,1,2,1,0,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,1,1,1,0,0,0,0,1,1,1,0,1,1,0,1,0,0,0,1,1,0,1,
2,0,1,0,1,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,1,0,1,1,1,0,1,0,0,1,1,2,1,1,2,0,1,0,0,0,1,1,0,1,
1,0,0,1,0,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,0,0,2,1,1,2,0,2,0,0,0,1,1,0,1,
1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,1,0,2,1,1,0,1,0,0,2,2,1,2,1,1,0,1,0,0,0,1,1,0,1,
2,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,1,2,2,0,0,0,0,0,1,1,0,1,0,0,1,0,0,0,0,1,0,1,
1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,1,2,2,0,0,0,0,2,1,1,1,0,2,1,1,0,0,0,2,1,0,1,
1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,1,1,0,2,1,1,0,1,0,0,0,1,1,0,1,
2,2,1,1,1,0,1,1,0,1,1,0,1,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,1,0,2,1,1,0,1,0,0,1,1,0,1,2,1,0,2,0,0,0,1,1,0,1,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,
0,1,0,0,2,0,2,1,1,0,1,0,1,0,0,1,0,0,0,0,1,0,0,0,1,0,0,0,0,0,1,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,1,1,1,0,1,0,0,1,0,0,0,1,0,0,1,
1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,0,0,0,0,0,0,1,0,1,1,0,0,1,0,0,2,1,1,1,1,1,0,1,0,0,0,0,1,0,1,
0,1,1,1,2,1,1,1,1,0,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,1,2,1,0,0,0,0,0,1,1,1,1,1,0,1,0,0,0,1,1,0,0,
)
Win1255HebrewModel = {
'charToOrderMap': win1255_CharToOrderMap,
'precedenceMatrix': HebrewLangModel,
'mTypicalPositiveRatio': 0.984004,
'keepEnglishLetter': False,
'charsetName': "windows-1255"
}
# flake8: noqa
|
zackmore/shadowsocks
|
refs/heads/master
|
utils/autoban.py
|
1033
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (c) 2015 clowwindy
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from __future__ import absolute_import, division, print_function, \
with_statement
import os
import sys
import argparse
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='See README')
parser.add_argument('-c', '--count', default=3, type=int,
help='with how many failure times it should be '
'considered as an attack')
config = parser.parse_args()
ips = {}
banned = set()
for line in sys.stdin:
if 'can not parse header when' in line:
ip = line.split()[-1].split(':')[0]
if ip not in ips:
ips[ip] = 1
print(ip)
sys.stdout.flush()
else:
ips[ip] += 1
if ip not in banned and ips[ip] >= config.count:
banned.add(ip)
cmd = 'iptables -A INPUT -s %s -j DROP' % ip
print(cmd, file=sys.stderr)
sys.stderr.flush()
os.system(cmd)
|
bqbn/addons-server
|
refs/heads/master
|
src/olympia/discovery/tests/test_models.py
|
1
|
from unittest import mock
from django.http import QueryDict
from django.test.utils import override_settings
from olympia import amo
from olympia.amo.tests import addon_factory, TestCase, user_factory
from olympia.discovery.models import DiscoveryItem
class TestDiscoveryItem(TestCase):
def test_heading_multiple_authors(self):
addon = addon_factory(slug=u'somé-slug', name=u'Sôme Name')
user1 = user_factory(display_name=u'Bàr')
addon.addonuser_set.create(user=user1, position=1)
user2 = user_factory(username=u'Fôo', id=345)
addon.addonuser_set.create(user=user2, position=2)
user3 = user_factory(username=u'Nôpe')
addon.addonuser_set.create(user=user3, listed=False)
item = DiscoveryItem.objects.create(
addon=addon,
custom_heading=(u'Fancy Héading {start_sub_heading}with '
u'{addon_name}{end_sub_heading}'))
assert item.heading == (
u'Fancy Héading <span>with '
u'<a href="http://testserver/en-US/firefox/addon/som%C3%A9-slug/'
u'?{}">'
u'Sôme Name by Bàr, Firefox user 345</a></span>').format(
item.build_querystring())
def test_heading_custom(self):
addon = addon_factory(slug=u'somé-slug', name=u'Sôme Name')
user = user_factory(display_name=u'Fløp')
addon.addonuser_set.create(user=user)
item = DiscoveryItem.objects.create(
addon=addon,
custom_heading=(u'Fancy Héading {start_sub_heading}with '
u'{addon_name}{end_sub_heading}'))
assert item.heading == (
u'Fancy Héading <span>with '
u'<a href="http://testserver/en-US/firefox/addon/som%C3%A9-slug/'
u'?{}">'
u'Sôme Name by Fløp</a></span>').format(item.build_querystring())
def test_heading_custom_xss(self):
# Custom heading itself should not contain HTML; only the special {xxx}
# tags we explicitely support.
addon = addon_factory(
slug=u'somé-slug', name=u'<script>alert(42)</script>')
user = user_factory(display_name=u'<script>alert(666)</script>')
addon.addonuser_set.create(user=user)
item = DiscoveryItem.objects.create(
addon=addon,
custom_heading=u'<script>alert(0)</script>{addon_name}')
assert item.heading == (
u'<script>alert(0)</script>'
u'<a href="http://testserver/en-US/firefox/addon/som%C3%A9-slug/'
u'?{}">'
u'<script>alert(42)</script> '
u'by <script>alert(666)</script></a>').format(
item.build_querystring())
def test_heading_non_custom(self):
addon = addon_factory(slug=u'somé-slug', name=u'Sôme Name')
addon.addonuser_set.create(user=user_factory(display_name=u'Fløp'))
item = DiscoveryItem.objects.create(addon=addon)
assert item.heading == (
u'Sôme Name <span>by '
u'<a href="http://testserver/en-US/firefox/addon/som%C3%A9-slug/'
u'?{}">'
u'Fløp</a></span>').format(item.build_querystring())
def test_heading_non_custom_xss(self):
addon = addon_factory(
slug=u'somé-slug', name=u'<script>alert(43)</script>')
user = user_factory(display_name=u'<script>alert(667)</script>')
addon.addonuser_set.create(user=user)
item = DiscoveryItem.objects.create(addon=addon)
assert item.heading == (
u'<script>alert(43)</script> <span>by '
u'<a href="http://testserver/en-US/firefox/addon/som%C3%A9-slug/'
u'?{}">'
u'<script>alert(667)</script></a></span>').format(
item.build_querystring())
def test_heading_custom_with_custom_addon_name(self):
addon = addon_factory(slug=u'somé-slug')
addon.addonuser_set.create(user=user_factory(display_name=u'Fløp'))
item = DiscoveryItem.objects.create(
addon=addon, custom_addon_name=u'Custôm Name',
custom_heading=(u'Fancy Héading {start_sub_heading}with '
u'{addon_name}{end_sub_heading}'))
assert item.heading == (
u'Fancy Héading <span>with '
u'<a href="http://testserver/en-US/firefox/addon/som%C3%A9-slug/'
u'?{}">'
u'Custôm Name by Fløp</a></span>').format(item.build_querystring())
def test_heading_custom_with_custom_addon_name_xss(self):
addon = addon_factory(slug=u'somé-slug')
user = user_factory(display_name=u'<script>alert(668)</script>')
addon.addonuser_set.create(user=user)
item = DiscoveryItem.objects.create(
addon=addon, custom_addon_name=u'Custôm Name',
custom_heading=(u'Fancy Héading {start_sub_heading}with '
u'{addon_name}{end_sub_heading}'))
item.custom_addon_name = '<script>alert(2)</script>'
item.custom_heading = '<script>alert(2)</script>{addon_name}'
assert item.heading == (
u'<script>alert(2)</script>'
u'<a href="http://testserver/en-US/firefox/addon/som%C3%A9-slug/'
u'?{}">'
u'<script>alert(2)</script> '
u'by <script>alert(668)</script></a>').format(
item.build_querystring())
def test_heading_non_custom_but_with_custom_addon_name(self):
addon = addon_factory(slug=u'somé-slug')
addon.addonuser_set.create(user=user_factory(display_name=u'Fløp'))
item = DiscoveryItem.objects.create(
addon=addon, custom_addon_name=u'Custôm Name')
assert item.heading == (
u'Custôm Name <span>by '
u'<a href="http://testserver/en-US/firefox/addon/som%C3%A9-slug/'
u'?{}">'
u'Fløp</a></span>').format(item.build_querystring())
def test_heading_non_custom_but_with_custom_addon_name_xss(self):
addon = addon_factory(slug=u'somé-slug')
user = user_factory(display_name=u'<script>alert(669)</script>')
addon.addonuser_set.create(user=user)
item = DiscoveryItem.objects.create(
addon=addon, custom_addon_name=u'<script>alert(3)</script>')
assert item.heading == (
u'<script>alert(3)</script> <span>by '
u'<a href="http://testserver/en-US/firefox/addon/som%C3%A9-slug/'
u'?{}">'
u'<script>alert(669)</script></a></span>').format(
item.build_querystring())
def test_heading_text(self):
addon = addon_factory(slug='somé-slug', name='Sôme Name')
user = user_factory(display_name='Fløp')
addon.addonuser_set.create(user=user)
item = DiscoveryItem.objects.create(addon=addon)
assert item.heading_text == 'Sôme Name'
def test_heading_text_custom_addon_name(self):
addon = addon_factory(slug='somé-slug', name='Sôme Name')
user = user_factory(display_name='Fløp')
addon.addonuser_set.create(user=user)
item = DiscoveryItem.objects.create(
addon=addon, custom_addon_name='Custôm Name')
assert item.heading_text == 'Custôm Name'
def test_heading_text_custom(self):
addon = addon_factory(slug='somé-slug', name=u'Sôme Name')
user = user_factory(display_name='Fløp')
addon.addonuser_set.create(user=user)
item = DiscoveryItem.objects.create(
addon=addon,
custom_heading=('Fancy Héading {start_sub_heading}with '
'{addon_name}{end_sub_heading}.'))
assert item.heading_text == 'Fancy Héading with Sôme Name.'
def test_heading_text_custom_with_custom_addon_name(self):
addon = addon_factory(slug='somé-slug', name='Sôme Name')
user = user_factory(display_name='Fløp')
addon.addonuser_set.create(user=user)
item = DiscoveryItem.objects.create(
addon=addon,
custom_addon_name='Custôm Name',
custom_heading=('Fancy Héading {start_sub_heading}with '
'{addon_name}{end_sub_heading}.'))
assert item.heading_text == 'Fancy Héading with Custôm Name.'
def test_heading_is_translated(self):
addon = addon_factory(slug='somé-slug', name='Sôme Name')
user = user_factory(display_name='Fløp')
addon.addonuser_set.create(user=user)
item = DiscoveryItem.objects.create(
addon=addon,
custom_addon_name='Custôm Name',
custom_heading=('Fancy Héading {start_sub_heading}with '
'{addon_name}{end_sub_heading}.'))
with mock.patch('olympia.discovery.models.ugettext') as ugettext_mock:
ugettext_mock.return_value = f'Trans {item.custom_heading}'
assert item.heading_text == 'Trans Fancy Héading with Custôm Name.'
assert item.heading.startswith('Trans Fancy Héading <span>with ')
def test_description_custom(self):
addon = addon_factory(summary='Foo', description='Bar')
item = DiscoveryItem.objects.create(
addon=addon, custom_description=u'Custôm Desc')
assert item.description == u'<blockquote>Custôm Desc</blockquote>'
item.custom_description = u'û<script>alert(4)</script>'
assert item.description == (
u'<blockquote>û<script>alert(4)</script></blockquote>')
def test_description_non_custom_extension(self):
addon = addon_factory(summary='')
item = DiscoveryItem.objects.create(addon=addon)
assert item.description == u''
addon.summary = u'Mÿ Summary'
assert item.description == u'<blockquote>Mÿ Summary</blockquote>'
def test_description_non_custom_extension_xss(self):
addon = addon_factory(summary=u'Mÿ <script>alert(5)</script>')
item = DiscoveryItem.objects.create(addon=addon)
assert item.description == (
u'<blockquote>'
u'Mÿ <script>alert(5)</script></blockquote>')
def test_description_non_custom_fallback(self):
item = DiscoveryItem.objects.create(addon=addon_factory(
type=amo.ADDON_DICT))
assert item.description == u''
def test_description_text_custom(self):
addon = addon_factory(summary='Foo', description='Bar')
item = DiscoveryItem.objects.create(
addon=addon, custom_description='Custôm Desc.')
assert item.description_text == 'Custôm Desc.'
def test_description_text_non_custom_extension(self):
addon = addon_factory(summary='')
item = DiscoveryItem.objects.create(addon=addon)
assert item.description_text == ''
addon.summary = 'Mÿ Summary'
assert item.description_text == 'Mÿ Summary'
def test_description_text_non_custom_fallback(self):
item = DiscoveryItem.objects.create(addon=addon_factory(
type=amo.ADDON_DICT))
assert item.description_text == ''
@override_settings(DOMAIN='addons.mozilla.org')
def test_build_querystring(self):
item = DiscoveryItem.objects.create(addon=addon_factory(
type=amo.ADDON_DICT))
# We do not use `urlencode()` and a string comparison because QueryDict
# does not preserve ordering.
q = QueryDict(item.build_querystring())
assert q.get('utm_source') == 'discovery.addons.mozilla.org'
assert q.get('utm_medium') == 'firefox-browser'
assert q.get('utm_content') == 'discopane-entry-link'
assert q.get('src') == 'api'
|
agvergara/Python
|
refs/heads/master
|
X-Serv-18.2-Practica2/project/manage.py
|
404
|
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "project.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
lucernae/inasafe
|
refs/heads/develop
|
safe/gui/tools/wizard/wizard_strings.py
|
8
|
# coding=utf-8
"""Wizard Strings."""
from safe.utilities.i18n import tr
__copyright__ = "Copyright 2016, The InaSAFE Project"
__license__ = "GPL version 3"
__email__ = "[email protected]"
__revision__ = '$Format:%H$'
category_question = tr(
'By following the simple steps in this wizard, you can assign '
'keywords to your layer: <b>%s</b>. First you need to define the purpose '
'of your layer. Is it a <b>hazard</b>, <b>exposure</b>, or '
'<b>aggregation</b> layer? ') # (layer name)
category_question_hazard = tr(
'You have selected a layer that needs to have keywords assigned or '
'updated. In the next steps you can assign keywords to that layer. '
'First you need to confirm the layer is a <b>hazard</b> layer.')
category_question_exposure = tr(
'You have selected a layer that needs to have keywords assigned or '
'updated. In the next steps you can assign keywords to that layer. '
'First you need to confirm the layer is an <b>exposure</b>.')
category_question_aggregation = tr(
'You have selected a layer that needs to have keywords assigned or '
'updated. In the next steps you can assign keywords to that layer. '
'First you need to confirm the layer is an <b>aggregation</b> layer.')
hazard_category_question = tr(
'What type of <b>hazard scenario</b> does this layer represent? '
'<p>Does it represent a <b>single event</b> or <b>multiple events</b>?'
'</p>')
hazard_question = tr(
'What kind of <b>hazard</b> does this layer represent? '
'<p>The choice you make here will determine which impact functions this '
'hazard layer can be used with. For example, if you choose <b>flood</b> '
'you will be able to use this hazard layer with impact functions such '
'as <b>flood impact on population</b>.</p>')
exposure_question = tr(
'What kind of <b>exposure</b> does this layer represent? '
'Is it a <b>population</b>, <b>structure</b>, <b>area</b>, or '
'<b>road</b> layer? '
'<p>The choice you make here will determine '
'which impact functions this exposure layer can be used with. '
'For example, if you choose <b>population</b> you will be able to use '
'this exposure layer with impact functions such as <b>flood impact on '
'population</b>.</p>')
layer_mode_raster_question = tr(
'You have selected <b>%s %s</b> for this raster layer. '
'<p>We need to know whether each cell in this raster represents '
'<b>continuous</b> data or if the data have been <b>classified</b>.</p>'
) # (subcategory, layer purpose)
layer_mode_vector_question = tr(
'You have selected <b>%s</b> for this <b>%s</b> layer. '
'<p>We need to confirm that attribute values represents <b>continuous</b> '
'data or if the data have been <b>classified</b>.</p>'
) # (subcategory, layer purpose)
layer_mode_vector_classified_confirm = tr(
'You have selected <b>%s</b> for this <b>%s</b> layer. '
'<p>We need to confirm that attribute values in this vector layer have '
'been <b>classified</b> and are represented by a code.</p>'
) # (subcategory, layer purpose)
layer_mode_vector_continuous_confirm = tr(
'You have selected <b>%s</b> for this <b>%s</b> layer. '
'<p>We need to confirm that attribute values represents '
'<b>continuous</b> data.</p>') # (subcategory, layer purpose)
unit_question = tr(
'You have selected <b>%s</b> for this <b>%s</b> layer type. '
'<p>We need to know what units the continuous data are in. For example in '
'a raster layer, each cell might represent depth in metres or depth in '
'feet.</p>') # (subcategory, layer purpose)
flood_metres_question = tr('flood depth in metres')
flood_feet_question = tr('flood depth in feet')
flood_wetdry_question = tr('flood extent as wet/dry')
tsunami_metres_question = tr('tsunami depth in metres')
tsunami_feet_depth_question = tr('tsunami depth in feet')
tsunami_wetdry_question = tr('tsunami extent as wet/dry')
earthquake_mmi_question = tr('earthquake intensity in MMI')
tephra_kgm2_question = tr('tephra intensity in kg/m<sup>2</sup>')
volcano_volcano_categorical_question = tr('volcano hazard categorical level')
cyclone_kilometres_per_hour_question = tr('wind speed in km/h')
cyclone_miles_per_hour_question = tr('wind speed in mph')
cyclone_knots_question = tr('wind speed in kn')
population_count_question = tr('the number of people')
population_density_question = tr('people density in people/km<sup>2</sup>')
road_road_type_question = tr('type for your road')
structure_building_type_question = tr('type for your building')
field_question_subcategory_unit = tr(
'You have selected a <b>%s %s</b> for the vector layer measured in '
'<b>%s</b>. Please select the attribute in this layer that represents %s.'
) # (layer purpose, subcategory, unit, subcategory-unit relation)
field_question_subcategory_classified = tr(
'You have selected <b>classified</b> data for the vector <b>%s</b> layer. '
'Please select the attribute in this layer that represents the <b>%s</b> '
'classes.'
)
field_question_population_field = tr(
'You have selected <b>place</b> as an exposure input for the vector '
'<b>%s</b> layer. Please select the attribute in this layer that '
'represents the population. If you do not have one, let this step empty '
'and click the next button.'
)
field_question_name_field = tr(
'You have selected <b>place</b> as an exposure input for the vector '
'<b>%s</b> layer. Please select the attribute in this layer that '
'represents the name.'
)
field_question_subcategory_classified_id = tr(
'You have selected <b>classified</b> data for the vector <b>%s</b> layer. '
'Please select the attribute in this layer that represents the ids.'
) # (layer purpose, subcategory)
field_question_aggregation = tr(
'You have selected a vector <b>aggregation</b> layer. Please select the '
'attribute in this layer that has the names of the aggregation areas.')
classification_question = tr(
'You have selected <b>%s %s</b> for this classified data. '
'Please select the type of classification you want to use. '
) # (subcategory, layer purpose)
classify_vector_question = tr(
'You have selected <b>%s %s</b> classified by <b>%s</b>, '
'and the attribute is <b>%s</b>. '
'Please drag unique values from the list on the left '
'into the panel on the right and place them in the appropriate categories.'
) # (subcategory, layer purpose, classification, field)
classify_raster_question = tr(
'You have selected <b>%s %s</b> classified by <b>%s</b>, '
'for the raster layer. '
'Please drag unique values from the list on the left '
'into the panel on the right and place them in the appropriate categories.'
) # (subcategory, layer purpose, classification)
continuous_vector_question = tr(
'You have selected <b>%s %s</b> as a <b>continuous</b> layer and the '
'attribute is <b>%s</b> with <b>%s</b>. '
'Please input the minimum and maximum value for each class below. '
'Minimum value will be excluded in the range, while maximum value will be '
'included. The minimum value from the layer is <b>%s</b> and the maximum '
'value is <b>%s</b>.'
) # (subcategory, purpose, field, classification, min value, max value)
continuous_raster_question = tr(
'You have selected <b>%s %s</b> as a <b>continuous</b> layer with '
'<b>%s</b>. Minimum value will be excluded in the range, while maximum '
'value will be included. Please input the minimum and maximum value for '
'each class below. The minimum value from the layer is <b>%s</b> and the '
'maximum value is <b>%s</b>.'
) # (subcategory, purpose, classification, min value, max value)
select_function_constraints2_question = tr(
'You selected <b>%s</b> hazard and <b>%s</b> exposure. Now, select the '
'<b>geometry types</b> for the hazard and exposure layers you want to '
'use. Click on the cell in the table below that matches '
'the geometry type for each.') # (hazard, exposure)
select_function_question = tr(
'<p>You have selected <b>%s %s</b> hazard and <b>%s %s</b> exposure. '
'Below you can see a list of available <b>impact functions</b> matching '
'the selected hazard, exposure and their geometries. Please choose which '
'impact function you would like to use from the list below.</p> '
'<p>Please note some functions may require either continuous or '
'classified input data. A <b>continuous</b> raster is one where cell '
'values are real data values such as: depth of flood water in metres or '
'the number of people per cell. A <b>classified</b> raster is one where '
'cell values represent classes or zones such as: high hazard zone, '
'medium hazard zone, low hazard zone.</p>'
) # (haz_geom, haz, expo_geom, exp)
select_hazard_origin_question = tr(
'<p>You selected <b>%s %s</b> as hazard input.</p> '
'<p>Please help us to find your <b>hazard</b> layer. A hazard layer '
'represents something that will impact the people or infrastructure '
'in an area. '
'For example flood, earthquake and tsunami inundation are all different '
'kinds of hazards. Select the appropriate option below to indicate '
'where your data resides:</p>') # (hazard_geom, hazard)
select_hazlayer_from_canvas_question = tr(
'<p>You selected <b>%s %s</b> as hazard input.</p> '
'<p>These are suitable layers currently loaded in QGIS. Please choose '
'the hazard layer that you would like to use for your assessment.</p>'
) # (hazard_geom, hazard)
select_hazlayer_from_browser_question = tr(
'<p>You selected <b>%s %s</b> as hazard input.</p> '
'<p>Please choose the hazard layer that you would like to use '
'for your assessment.</p>') # (exposure_geom, exposure)
select_exposure_origin_question = tr(
'<p>You selected <b>%s %s</b> as exposure input.</p>'
'<p>Please help us to find your <b>exposure</b> layer. An exposure layer '
'represents people, property or infrastructure that may be affected in '
'the event of a flood, earthquake, volcano etc. Select an appropriate '
'option below to indicate where your data can be found:</p>'
) # (exposure_geom, exposure)
select_explayer_from_canvas_question = tr(
'<p>You selected <b>%s %s</b> as exposure input.</p>'
'<p>These are suitable layers currently loaded in QGIS. Please choose '
'the exposure layer that you would like to use for your '
'assessment.</p>') # (exposure_geom, exposure)
select_explayer_from_browser_question = tr(
'<p>You selected <b>%s %s</b> as exposure input</p>'
'<p>Please choose the exposure layer that you would like to use '
'for your assessment.</p>') # (exposure_geom, exposure)
create_postGIS_connection_first = tr(
'<html>In order to use PostGIS layers, please close the wizard, '
'create a new PostGIS connection and run the wizard again. <br/><br/> '
'You can manage connections under the '
'<i>Layer</i> > <i>Add Layer</i> > <i>Add PostGIS Layers</i> '
'menu.</html>')
multiple_classified_hazard_classifications_vector = tr(
# (subcategory, layer purpose, field)
'You have selected <b>%s %s</b> and attribute <b>%s</b>. Please select '
'hazard classifications for each exposure type. If you want to edit the '
'value mapping, you can click edit button next to each and you can do '
'value mapping in the right panel. Do not forget to save the value '
'mapping before you continue to the next step.')
multiple_classified_hazard_classifications_raster = tr(
# (subcategory, layer purpose)
'You have selected <b>%s %s</b>. Please select hazard classifications for '
'each exposure type. If you want to edit the value mapping, you can click '
'edit button next to each and you can do value mapping in the right '
'panel. Do not forget to save the value mapping before you continue to '
'the next step.')
multiple_continuous_hazard_classifications_vector = tr(
# (subcategory, layer purpose, field)
'You have selected <b>%s %s</b> and attribute <b>%s</b>. Please select '
'hazard classifications for each exposure type. If you want to edit the '
'thresholds, you can click edit button next to each and you can edit the '
'threshold in the right panel. Do not forget to save the thresholds '
'before you continue to the next step.')
multiple_continuous_hazard_classifications_raster = tr(
# (subcategory, layer purpose)
'You have selected <b>%s %s</b>. Please select hazard classifications for '
'each exposure type. If you want to edit the thresholds, you can click '
'edit button next to each and you can edit the threshold in the right '
'panel. Do not forget to save the thresholds before you continue to the '
'next step.')
|
syncloud/platform
|
refs/heads/master
|
src/test/disks/test_disk.py
|
1
|
from syncloudlib import logger
from syncloud_platform.disks.lsblk import Disk, Partition
logger.init(console=True)
def test_find_root_partition_some():
disk = Disk('disk', '/dev/sda', 20, [
Partition(10, '/dev/sda1', '/', True, 'ext4', False),
Partition(10, '/dev/sda2', '', True, 'ext4', True)
])
assert disk.find_root_partition().device == '/dev/sda1'
def test_find_root_partition_none():
disk = Disk('disk', '/dev/sda', 20, [
Partition(10, '/dev/sda1', '/my', True, 'ext4', False),
Partition(10, '/dev/sda2', '', True, 'ext4', True)
])
assert disk.find_root_partition() is None
|
bcheung92/Paperproject
|
refs/heads/master
|
gem5/tests/configs/realview-simple-timing.py
|
52
|
# Copyright (c) 2012 ARM Limited
# All rights reserved.
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Andreas Sandberg
from m5.objects import *
from arm_generic import *
root = LinuxArmFSSystemUniprocessor(mem_mode='timing',
mem_class=DDR3_1600_x64,
cpu_class=TimingSimpleCPU).create_root()
|
eschleicher/flask_shopping_list
|
refs/heads/master
|
venv/lib/python3.4/site-packages/setuptools/tests/contexts.py
|
73
|
import tempfile
import os
import shutil
import sys
import contextlib
import site
from ..compat import StringIO
@contextlib.contextmanager
def tempdir(cd=lambda dir:None, **kwargs):
temp_dir = tempfile.mkdtemp(**kwargs)
orig_dir = os.getcwd()
try:
cd(temp_dir)
yield temp_dir
finally:
cd(orig_dir)
shutil.rmtree(temp_dir)
@contextlib.contextmanager
def environment(**replacements):
"""
In a context, patch the environment with replacements. Pass None values
to clear the values.
"""
saved = dict(
(key, os.environ[key])
for key in replacements
if key in os.environ
)
# remove values that are null
remove = (key for (key, value) in replacements.items() if value is None)
for key in list(remove):
os.environ.pop(key, None)
replacements.pop(key)
os.environ.update(replacements)
try:
yield saved
finally:
for key in replacements:
os.environ.pop(key, None)
os.environ.update(saved)
@contextlib.contextmanager
def argv(repl):
old_argv = sys.argv[:]
sys.argv[:] = repl
yield
sys.argv[:] = old_argv
@contextlib.contextmanager
def quiet():
"""
Redirect stdout/stderr to StringIO objects to prevent console output from
distutils commands.
"""
old_stdout = sys.stdout
old_stderr = sys.stderr
new_stdout = sys.stdout = StringIO()
new_stderr = sys.stderr = StringIO()
try:
yield new_stdout, new_stderr
finally:
new_stdout.seek(0)
new_stderr.seek(0)
sys.stdout = old_stdout
sys.stderr = old_stderr
@contextlib.contextmanager
def save_user_site_setting():
saved = site.ENABLE_USER_SITE
try:
yield saved
finally:
site.ENABLE_USER_SITE = saved
@contextlib.contextmanager
def suppress_exceptions(*excs):
try:
yield
except excs:
pass
|
Distrotech/intellij-community
|
refs/heads/master
|
python/testData/psi/SetLiteral.py
|
83
|
{1, 2}
{1}
|
acsone/account-analytic
|
refs/heads/8.0
|
stock_analytic/__init__.py
|
2
|
# -*- coding: utf-8 -*-
# Copyright 2013 Julius Network Solutions
# Copyright 2015 Clear Corp
# Copyright 2016 OpenSynergy Indonesia
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from . import models
|
dincamihai/django-allauth
|
refs/heads/master
|
allauth/tests.py
|
38
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import requests
from datetime import datetime, date
import django
from django.test import TestCase
from django.db import models
from . import utils
class MockedResponse(object):
def __init__(self, status_code, content, headers=None):
if headers is None:
headers = {}
self.status_code = status_code
self.content = content.encode('utf8')
self.headers = headers
def json(self):
import json
return json.loads(self.text)
def raise_for_status(self):
pass
@property
def text(self):
return self.content.decode('utf8')
class mocked_response:
def __init__(self, *responses):
self.responses = list(responses)
def __enter__(self):
self.orig_get = requests.get
self.orig_post = requests.post
self.orig_request = requests.request
def mockable_request(f):
def new_f(*args, **kwargs):
if self.responses:
return self.responses.pop(0)
return f(*args, **kwargs)
return new_f
requests.get = mockable_request(requests.get)
requests.post = mockable_request(requests.post)
requests.request = mockable_request(requests.request)
def __exit__(self, type, value, traceback):
requests.get = self.orig_get
requests.post = self.orig_post
requests.request = self.orig_request
class BasicTests(TestCase):
def test_generate_unique_username(self):
examples = [('[email protected]', 'a.b-c'),
('Üsêrnamê', 'username'),
('User Name', 'user_name'),
('', 'user')]
for input, username in examples:
self.assertEqual(utils.generate_unique_username([input]),
username)
def test_email_validation(self):
is_email_max_75 = django.VERSION[:2] <= (1, 7)
if is_email_max_75:
s = 'unfortunately.django.user.email.max_length.is.set.to.75.which.is.too.short@bummer.com' # noqa
self.assertEqual(None, utils.valid_email_or_none(s))
s = 'this.email.address.is.a.bit.too.long.but.should.still.validate.ok@short.com' # noqa
self.assertEqual(s, utils.valid_email_or_none(s))
if is_email_max_75:
s = 'x' + s
self.assertEqual(None, utils.valid_email_or_none(s))
self.assertEqual(None, utils.valid_email_or_none("Bad ?"))
def test_serializer(self):
class SomeModel(models.Model):
dt = models.DateTimeField()
t = models.TimeField()
d = models.DateField()
def method(self):
pass
instance = SomeModel(dt=datetime.now(),
d=date.today(),
t=datetime.now().time())
# make sure serializer doesn't fail if a method is attached to
# the instance
instance.method = method
instance.nonfield = 'hello'
data = utils.serialize_instance(instance)
instance2 = utils.deserialize_instance(SomeModel, data)
self.assertEqual(getattr(instance, 'method', None), method)
self.assertEqual(getattr(instance2, 'method', None), None)
self.assertEqual(instance.nonfield, instance2.nonfield)
self.assertEqual(instance.d, instance2.d)
self.assertEqual(instance.dt.date(), instance2.dt.date())
for t1, t2 in [(instance.t, instance2.t),
(instance.dt.time(), instance2.dt.time())]:
self.assertEqual(t1.hour, t2.hour)
self.assertEqual(t1.minute, t2.minute)
self.assertEqual(t1.second, t2.second)
# AssertionError: datetime.time(10, 6, 28, 705776)
# != datetime.time(10, 6, 28, 705000)
self.assertEqual(int(t1.microsecond / 1000),
int(t2.microsecond / 1000))
def test_serializer_binary_field(self):
class SomeBinaryModel(models.Model):
bb = models.BinaryField()
bb_empty = models.BinaryField()
instance = SomeBinaryModel(bb=b'some binary data')
serialized = utils.serialize_instance(instance)
deserialized = utils.deserialize_instance(SomeBinaryModel, serialized)
self.assertEqual(serialized['bb'], 'c29tZSBiaW5hcnkgZGF0YQ==')
self.assertEqual(serialized['bb_empty'], '')
self.assertEqual(deserialized.bb, b'some binary data')
self.assertEqual(deserialized.bb_empty, b'')
def test_build_absolute_uri(self):
self.assertEqual(
utils.build_absolute_uri(None, '/foo'),
'http://example.com/foo')
self.assertEqual(
utils.build_absolute_uri(None, '/foo', protocol='ftp'),
'ftp://example.com/foo')
self.assertEqual(
utils.build_absolute_uri(None, 'http://foo.com/bar'),
'http://foo.com/bar')
|
EmreAtes/spack
|
refs/heads/develop
|
var/spack/repos/builtin/packages/r-mgcv/package.py
|
5
|
##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, [email protected], All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class RMgcv(RPackage):
"""GAMs, GAMMs and other generalized ridge regression with multiple
smoothing parameter estimation by GCV, REML or UBRE/AIC. Includes a gam()
function, a wide variety of smoothers, JAGS support and distributions
beyond the exponential family."""
homepage = "https://cran.r-project.org/package=mgcv"
url = "https://cran.r-project.org/src/contrib/mgcv_1.8-16.tar.gz"
list_url = "https://cran.r-project.org/src/contrib/Archive/mgcv"
version('1.8-22', 'b42079b33b46de784f293a74c824b877')
version('1.8-21', 'aae8262a07c8698ca8d6213065c4983f')
version('1.8-20', '58eb94404aad7ff8a0cf11a2f098f8bf')
version('1.8-19', 'f9a4e29464f4d10b7b2cb9d0bec3fa9e')
version('1.8-18', 'c134fc2db253530233b95f2e36b56a2f')
version('1.8-17', '398582d0f999ac34749f4f5f1d103f75')
version('1.8-16', '4c1d85e0f80b017bccb4b63395842911')
version('1.8-13', '30607be3aaf44b13bd8c81fc32e8c984')
depends_on('r-nlme', type=('build', 'run'))
depends_on('r-matrix', type=('build', 'run'))
|
buptlsl/learn-python3
|
refs/heads/master
|
samples/module/hello.py
|
20
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
' a test module '
__author__ = 'Michael Liao'
import sys
def test():
args = sys.argv
if len(args)==1:
print('Hello, world!')
elif len(args)==2:
print('Hello, %s!' % args[1])
else:
print('Too many arguments!')
if __name__=='__main__':
test()
|
joesonw/shadowsocks
|
refs/heads/master
|
tests/graceful_cli.py
|
977
|
#!/usr/bin/python
import socks
import time
SERVER_IP = '127.0.0.1'
SERVER_PORT = 8001
if __name__ == '__main__':
s = socks.socksocket()
s.set_proxy(socks.SOCKS5, SERVER_IP, 1081)
s.connect((SERVER_IP, SERVER_PORT))
s.send(b'test')
time.sleep(30)
s.close()
|
HyperBaton/ansible
|
refs/heads/devel
|
lib/ansible/modules/network/cloudengine/ce_bgp_neighbor.py
|
13
|
#!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: ce_bgp_neighbor
version_added: "2.4"
short_description: Manages BGP peer configuration on HUAWEI CloudEngine switches.
description:
- Manages BGP peer configurations on HUAWEI CloudEngine switches.
author:
- wangdezhuang (@QijunPan)
notes:
- This module requires the netconf system service be enabled on the remote device being managed.
- Recommended connection is C(netconf).
- This module also works with C(local) connections for legacy playbooks.
options:
state:
description:
- Specify desired state of the resource.
default: present
choices: ['present','absent']
vrf_name:
description:
- Name of a BGP instance. The name is a case-sensitive string of characters.
The BGP instance can be used only after the corresponding VPN instance is created.
required: true
peer_addr:
description:
- Connection address of a peer, which can be an IPv4 or IPv6 address.
required: true
remote_as:
description:
- AS number of a peer.
The value is a string of 1 to 11 characters.
required: true
description:
description:
- Description of a peer, which can be letters or digits.
The value is a string of 1 to 80 characters.
fake_as:
description:
- Fake AS number that is specified for a local peer.
The value is a string of 1 to 11 characters.
dual_as:
description:
- If the value is true, the EBGP peer can use either a fake AS number or the actual AS number.
If the value is false, the EBGP peer can only use a fake AS number.
choices: ['no_use','true','false']
default: no_use
conventional:
description:
- If the value is true, the router has all extended capabilities.
If the value is false, the router does not have all extended capabilities.
choices: ['no_use','true','false']
default: no_use
route_refresh:
description:
- If the value is true, BGP is enabled to advertise REFRESH packets.
If the value is false, the route refresh function is enabled.
choices: ['no_use','true','false']
default: no_use
is_ignore:
description:
- If the value is true, the session with a specified peer is torn down and all related
routing entries are cleared.
If the value is false, the session with a specified peer is retained.
choices: ['no_use','true','false']
default: no_use
local_if_name:
description:
- Name of a source interface that sends BGP packets.
The value is a string of 1 to 63 characters.
ebgp_max_hop:
description:
- Maximum number of hops in an indirect EBGP connection.
The value is an ranging from 1 to 255.
valid_ttl_hops:
description:
- Enable GTSM on a peer or peer group.
The valid-TTL-Value parameter is used to specify the number of TTL hops to be detected.
The value is an integer ranging from 1 to 255.
connect_mode:
description:
- The value can be Connect-only, Listen-only, or Both.
is_log_change:
description:
- If the value is true, BGP is enabled to record peer session status and event information.
If the value is false, BGP is disabled from recording peer session status and event information.
choices: ['no_use','true','false']
default: no_use
pswd_type:
description:
- Enable BGP peers to establish a TCP connection and perform the Message Digest 5 (MD5)
authentication for BGP messages.
choices: ['null','cipher','simple']
pswd_cipher_text:
description:
- The character string in a password identifies the contents of the password, spaces not supported.
The value is a string of 1 to 255 characters.
keep_alive_time:
description:
- Specify the Keepalive time of a peer or peer group.
The value is an integer ranging from 0 to 21845. The default value is 60.
hold_time:
description:
- Specify the Hold time of a peer or peer group.
The value is 0 or an integer ranging from 3 to 65535.
min_hold_time:
description:
- Specify the Min hold time of a peer or peer group.
key_chain_name:
description:
- Specify the Keychain authentication name used when BGP peers establish a TCP connection.
The value is a string of 1 to 47 case-insensitive characters.
conn_retry_time:
description:
- ConnectRetry interval.
The value is an integer ranging from 1 to 65535.
tcp_MSS:
description:
- Maximum TCP MSS value used for TCP connection establishment for a peer.
The value is an integer ranging from 176 to 4096.
mpls_local_ifnet_disable:
description:
- If the value is true, peer create MPLS Local IFNET disable.
If the value is false, peer create MPLS Local IFNET enable.
choices: ['no_use','true','false']
default: no_use
prepend_global_as:
description:
- Add the global AS number to the Update packets to be advertised.
choices: ['no_use','true','false']
default: no_use
prepend_fake_as:
description:
- Add the Fake AS number to received Update packets.
choices: ['no_use','true','false']
default: no_use
is_bfd_block:
description:
- If the value is true, peers are enabled to inherit the BFD function from the peer group.
If the value is false, peers are disabled to inherit the BFD function from the peer group.
choices: ['no_use','true','false']
default: no_use
multiplier:
description:
- Specify the detection multiplier. The default value is 3.
The value is an integer ranging from 3 to 50.
is_bfd_enable:
description:
- If the value is true, BFD is enabled.
If the value is false, BFD is disabled.
choices: ['no_use','true','false']
default: no_use
rx_interval:
description:
- Specify the minimum interval at which BFD packets are received.
The value is an integer ranging from 50 to 1000, in milliseconds.
tx_interval:
description:
- Specify the minimum interval at which BFD packets are sent.
The value is an integer ranging from 50 to 1000, in milliseconds.
is_single_hop:
description:
- If the value is true, the system is enabled to preferentially use the single-hop mode for
BFD session setup between IBGP peers.
If the value is false, the system is disabled from preferentially using the single-hop
mode for BFD session setup between IBGP peers.
choices: ['no_use','true','false']
default: no_use
'''
EXAMPLES = '''
- name: CloudEngine BGP neighbor test
hosts: cloudengine
connection: local
gather_facts: no
vars:
cli:
host: "{{ inventory_hostname }}"
port: "{{ ansible_ssh_port }}"
username: "{{ username }}"
password: "{{ password }}"
transport: cli
tasks:
- name: "Config bgp peer"
ce_bgp_neighbor:
state: present
vrf_name: js
peer_addr: 192.168.10.10
remote_as: 500
provider: "{{ cli }}"
- name: "Config bgp route id"
ce_bgp_neighbor:
state: absent
vrf_name: js
peer_addr: 192.168.10.10
provider: "{{ cli }}"
'''
RETURN = '''
changed:
description: check to see if a change was made on the device
returned: always
type: bool
sample: true
proposed:
description: k/v pairs of parameters passed into module
returned: always
type: dict
sample: {"peer_addr": "192.168.10.10", "remote_as": "500", "state": "present", "vrf_name": "js"}
existing:
description: k/v pairs of existing aaa server
returned: always
type: dict
sample: {"bgp peer": []}
end_state:
description: k/v pairs of aaa params after module execution
returned: always
type: dict
sample: {"bgp peer": [["192.168.10.10", "500"]]}
updates:
description: command sent to the device
returned: always
type: list
sample: ["peer 192.168.10.10 as-number 500"]
'''
import re
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.cloudengine.ce import get_nc_config, set_nc_config, ce_argument_spec, check_ip_addr
# get bgp peer
CE_GET_BGP_PEER_HEADER = """
<filter type="subtree">
<bgp xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<bgpcomm>
<bgpVrfs>
<bgpVrf>
<vrfName>%s</vrfName>
<bgpPeers>
<bgpPeer>
<peerAddr>%s</peerAddr>
"""
CE_GET_BGP_PEER_TAIL = """
</bgpPeer>
</bgpPeers>
</bgpVrf>
</bgpVrfs>
</bgpcomm>
</bgp>
</filter>
"""
# merge bgp peer
CE_MERGE_BGP_PEER_HEADER = """
<config>
<bgp xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<bgpcomm>
<bgpVrfs>
<bgpVrf>
<vrfName>%s</vrfName>
<bgpPeers>
<bgpPeer operation="merge">
<peerAddr>%s</peerAddr>
"""
CE_MERGE_BGP_PEER_TAIL = """
</bgpPeer>
</bgpPeers>
</bgpVrf>
</bgpVrfs>
</bgpcomm>
</bgp>
</config>
"""
# create bgp peer
CE_CREATE_BGP_PEER_HEADER = """
<config>
<bgp xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<bgpcomm>
<bgpVrfs>
<bgpVrf>
<vrfName>%s</vrfName>
<bgpPeers>
<bgpPeer operation="create">
<peerAddr>%s</peerAddr>
"""
CE_CREATE_BGP_PEER_TAIL = """
</bgpPeer>
</bgpPeers>
</bgpVrf>
</bgpVrfs>
</bgpcomm>
</bgp>
</config>
"""
# delete bgp peer
CE_DELETE_BGP_PEER_HEADER = """
<config>
<bgp xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<bgpcomm>
<bgpVrfs>
<bgpVrf>
<vrfName>%s</vrfName>
<bgpPeers>
<bgpPeer operation="delete">
<peerAddr>%s</peerAddr>
"""
CE_DELETE_BGP_PEER_TAIL = """
</bgpPeer>
</bgpPeers>
</bgpVrf>
</bgpVrfs>
</bgpcomm>
</bgp>
</config>
"""
# get peer bfd
CE_GET_PEER_BFD_HEADER = """
<filter type="subtree">
<bgp xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<bgpcomm>
<bgpVrfs>
<bgpVrf>
<vrfName>%s</vrfName>
<bgpPeers>
<bgpPeer>
<peerAddr>%s</peerAddr>
<peerBfd>
"""
CE_GET_PEER_BFD_TAIL = """
</peerBfd>
</bgpPeer>
</bgpPeers>
</bgpVrf>
</bgpVrfs>
</bgpcomm>
</bgp>
</filter>
"""
# merge peer bfd
CE_MERGE_PEER_BFD_HEADER = """
<config>
<bgp xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<bgpcomm>
<bgpVrfs>
<bgpVrf>
<vrfName>%s</vrfName>
<bgpPeers>
<bgpPeer>
<peerAddr>%s</peerAddr>
<peerBfd operation="merge">
"""
CE_MERGE_PEER_BFD_TAIL = """
</peerBfd>
</bgpPeer>
</bgpPeers>
</bgpVrf>
</bgpVrfs>
</bgpcomm>
</bgp>
</config>
"""
# delete peer bfd
CE_DELETE_PEER_BFD_HEADER = """
<config>
<bgp xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<bgpcomm>
<bgpVrfs>
<bgpVrf>
<vrfName>%s</vrfName>
<bgpPeers>
<bgpPeer>
<peerAddr>%s</peerAddr>
<peerBfd operation="delete">
"""
CE_DELETE_PEER_BFD_TAIL = """
</peerBfd>
</bgpPeer>
</bgpPeers>
</bgpVrf>
</bgpVrfs>
</bgpcomm>
</bgp>
</config>
"""
class BgpNeighbor(object):
""" Manages BGP peer configuration """
def netconf_get_config(self, **kwargs):
""" netconf_get_config """
module = kwargs["module"]
conf_str = kwargs["conf_str"]
xml_str = get_nc_config(module, conf_str)
return xml_str
def netconf_set_config(self, **kwargs):
""" netconf_set_config """
module = kwargs["module"]
conf_str = kwargs["conf_str"]
xml_str = set_nc_config(module, conf_str)
return xml_str
def check_bgp_peer_args(self, **kwargs):
""" check_bgp_peer_args """
module = kwargs["module"]
result = dict()
need_cfg = False
vrf_name = module.params['vrf_name']
if vrf_name:
if len(vrf_name) > 31 or len(vrf_name) == 0:
module.fail_json(
msg='Error: The len of vrf_name %s is out of [1 - 31].' % vrf_name)
peer_addr = module.params['peer_addr']
if peer_addr:
if not check_ip_addr(ipaddr=peer_addr):
module.fail_json(
msg='Error: The peer_addr %s is invalid.' % peer_addr)
need_cfg = True
remote_as = module.params['remote_as']
if remote_as:
if len(remote_as) > 11 or len(remote_as) < 1:
module.fail_json(
msg='Error: The len of remote_as %s is out of [1 - 11].' % remote_as)
need_cfg = True
result["need_cfg"] = need_cfg
return result
def check_bgp_peer_other_args(self, **kwargs):
""" check_bgp_peer_other_args """
module = kwargs["module"]
result = dict()
need_cfg = False
peerip = module.params['peer_addr']
vrf_name = module.params['vrf_name']
if vrf_name:
if len(vrf_name) > 31 or len(vrf_name) == 0:
module.fail_json(
msg='Error: The len of vrf_name %s is out of [1 - 31].' % vrf_name)
description = module.params['description']
if description:
if len(description) > 80 or len(description) < 1:
module.fail_json(
msg='Error: The len of description %s is out of [1 - 80].' % description)
conf_str = CE_GET_BGP_PEER_HEADER % (vrf_name, peerip) + \
"<description></description>" + CE_GET_BGP_PEER_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
need_cfg = True
else:
re_find = re.findall(
r'.*<description>(.*)</description>.*', recv_xml)
if re_find:
result["description"] = re_find
if re_find[0] != description:
need_cfg = True
else:
need_cfg = True
fake_as = module.params['fake_as']
if fake_as:
if len(fake_as) > 11 or len(fake_as) < 1:
module.fail_json(
msg='Error: The len of fake_as %s is out of [1 - 11].' % fake_as)
conf_str = CE_GET_BGP_PEER_HEADER % (vrf_name, peerip) + \
"<fakeAs></fakeAs>" + CE_GET_BGP_PEER_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
need_cfg = True
else:
re_find = re.findall(
r'.*<fakeAs>(.*)</fakeAs>.*', recv_xml)
if re_find:
result["fake_as"] = re_find
if re_find[0] != fake_as:
need_cfg = True
else:
need_cfg = True
dual_as = module.params['dual_as']
if dual_as != 'no_use':
if not fake_as:
module.fail_json(msg='fake_as must exist.')
conf_str = CE_GET_BGP_PEER_HEADER % (vrf_name, peerip) + \
"<dualAs></dualAs>" + CE_GET_BGP_PEER_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
need_cfg = True
else:
re_find = re.findall(
r'.*<dualAs>(.*)</dualAs>.*', recv_xml)
if re_find:
result["dual_as"] = re_find
if re_find[0] != dual_as:
need_cfg = True
else:
need_cfg = True
conventional = module.params['conventional']
if conventional != 'no_use':
conf_str = CE_GET_BGP_PEER_HEADER % (vrf_name, peerip) + \
"<conventional></conventional>" + CE_GET_BGP_PEER_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
need_cfg = True
else:
re_find = re.findall(
r'.*<conventional>(.*)</conventional>.*', recv_xml)
if re_find:
result["conventional"] = re_find
if re_find[0] != conventional:
need_cfg = True
else:
need_cfg = True
route_refresh = module.params['route_refresh']
if route_refresh != 'no_use':
conf_str = CE_GET_BGP_PEER_HEADER % (vrf_name, peerip) + \
"<routeRefresh></routeRefresh>" + CE_GET_BGP_PEER_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
need_cfg = True
else:
re_find = re.findall(
r'.*<routeRefresh>(.*)</routeRefresh>.*', recv_xml)
if re_find:
result["route_refresh"] = re_find
if re_find[0] != route_refresh:
need_cfg = True
else:
need_cfg = True
four_byte_as = module.params['four_byte_as']
if four_byte_as != 'no_use':
conf_str = CE_GET_BGP_PEER_HEADER % (vrf_name, peerip) + \
"<fourByteAs></fourByteAs>" + CE_GET_BGP_PEER_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
need_cfg = True
else:
re_find = re.findall(
r'.*<fourByteAs>(.*)</fourByteAs>.*', recv_xml)
if re_find:
result["four_byte_as"] = re_find
if re_find[0] != four_byte_as:
need_cfg = True
else:
need_cfg = True
is_ignore = module.params['is_ignore']
if is_ignore != 'no_use':
conf_str = CE_GET_BGP_PEER_HEADER % (vrf_name, peerip) + \
"<isIgnore></isIgnore>" + CE_GET_BGP_PEER_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
need_cfg = True
else:
re_find = re.findall(
r'.*<isIgnore>(.*)</isIgnore>.*', recv_xml)
if re_find:
result["is_ignore"] = re_find
if re_find[0] != is_ignore:
need_cfg = True
else:
need_cfg = True
local_if_name = module.params['local_if_name']
if local_if_name:
if len(local_if_name) > 63 or len(local_if_name) < 1:
module.fail_json(
msg='Error: The len of local_if_name %s is out of [1 - 63].' % local_if_name)
conf_str = CE_GET_BGP_PEER_HEADER % (vrf_name, peerip) + \
"<localIfName></localIfName>" + CE_GET_BGP_PEER_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
need_cfg = True
else:
re_find = re.findall(
r'.*<localIfName>(.*)</localIfName>.*', recv_xml)
if re_find:
result["local_if_name"] = re_find
if re_find[0].lower() != local_if_name.lower():
need_cfg = True
else:
need_cfg = True
ebgp_max_hop = module.params['ebgp_max_hop']
if ebgp_max_hop:
if int(ebgp_max_hop) > 255 or int(ebgp_max_hop) < 1:
module.fail_json(
msg='Error: The value of ebgp_max_hop %s is out of [1 - 255].' % ebgp_max_hop)
conf_str = CE_GET_BGP_PEER_HEADER % (vrf_name, peerip) + \
"<ebgpMaxHop></ebgpMaxHop>" + CE_GET_BGP_PEER_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
need_cfg = True
else:
re_find = re.findall(
r'.*<ebgpMaxHop>(.*)</ebgpMaxHop>.*', recv_xml)
if re_find:
result["ebgp_max_hop"] = re_find
if re_find[0] != ebgp_max_hop:
need_cfg = True
else:
need_cfg = True
valid_ttl_hops = module.params['valid_ttl_hops']
if valid_ttl_hops:
if int(valid_ttl_hops) > 255 or int(valid_ttl_hops) < 1:
module.fail_json(
msg='Error: The value of valid_ttl_hops %s is out of [1 - 255].' % valid_ttl_hops)
conf_str = CE_GET_BGP_PEER_HEADER % (vrf_name, peerip) + \
"<validTtlHops></validTtlHops>" + CE_GET_BGP_PEER_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
need_cfg = True
else:
re_find = re.findall(
r'.*<validTtlHops>(.*)</validTtlHops>.*', recv_xml)
if re_find:
result["valid_ttl_hops"] = re_find
if re_find[0] != valid_ttl_hops:
need_cfg = True
else:
need_cfg = True
connect_mode = module.params['connect_mode']
if connect_mode:
conf_str = CE_GET_BGP_PEER_HEADER % (vrf_name, peerip) + \
"<connectMode></connectMode>" + CE_GET_BGP_PEER_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
need_cfg = True
else:
re_find = re.findall(
r'.*<connectMode>(.*)</connectMode>.*', recv_xml)
if re_find:
result["connect_mode"] = re_find
if re_find[0] != connect_mode:
need_cfg = True
else:
need_cfg = True
is_log_change = module.params['is_log_change']
if is_log_change != 'no_use':
conf_str = CE_GET_BGP_PEER_HEADER % (vrf_name, peerip) + \
"<isLogChange></isLogChange>" + CE_GET_BGP_PEER_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
need_cfg = True
else:
re_find = re.findall(
r'.*<isLogChange>(.*)</isLogChange>.*', recv_xml)
if re_find:
result["is_log_change"] = re_find
if re_find[0] != is_log_change:
need_cfg = True
else:
need_cfg = True
pswd_type = module.params['pswd_type']
if pswd_type:
conf_str = CE_GET_BGP_PEER_HEADER % (vrf_name, peerip) + \
"<pswdType></pswdType>" + CE_GET_BGP_PEER_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
need_cfg = True
else:
re_find = re.findall(
r'.*<pswdType>(.*)</pswdType>.*', recv_xml)
if re_find:
result["pswd_type"] = re_find
if re_find[0] != pswd_type:
need_cfg = True
else:
need_cfg = True
pswd_cipher_text = module.params['pswd_cipher_text']
if pswd_cipher_text:
if len(pswd_cipher_text) > 255 or len(pswd_cipher_text) < 1:
module.fail_json(
msg='Error: The len of pswd_cipher_text %s is out of [1 - 255].' % pswd_cipher_text)
conf_str = CE_GET_BGP_PEER_HEADER % (vrf_name, peerip) + \
"<pswdCipherText></pswdCipherText>" + CE_GET_BGP_PEER_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
need_cfg = True
else:
re_find = re.findall(
r'.*<pswdCipherText>(.*)</pswdCipherText>.*', recv_xml)
if re_find:
result["pswd_cipher_text"] = re_find
if re_find[0] != pswd_cipher_text:
need_cfg = True
else:
need_cfg = True
keep_alive_time = module.params['keep_alive_time']
if keep_alive_time:
if int(keep_alive_time) > 21845 or len(keep_alive_time) < 0:
module.fail_json(
msg='Error: The len of keep_alive_time %s is out of [0 - 21845].' % keep_alive_time)
conf_str = CE_GET_BGP_PEER_HEADER % (vrf_name, peerip) + \
"<keepAliveTime></keepAliveTime>" + CE_GET_BGP_PEER_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
need_cfg = True
else:
re_find = re.findall(
r'.*<keepAliveTime>(.*)</keepAliveTime>.*', recv_xml)
if re_find:
result["keep_alive_time"] = re_find
if re_find[0] != keep_alive_time:
need_cfg = True
else:
need_cfg = True
hold_time = module.params['hold_time']
if hold_time:
if int(hold_time) != 0 and (int(hold_time) > 65535 or int(hold_time) < 3):
module.fail_json(
msg='Error: The value of hold_time %s is out of [0 or 3 - 65535].' % hold_time)
conf_str = CE_GET_BGP_PEER_HEADER % (vrf_name, peerip) + \
"<holdTime></holdTime>" + CE_GET_BGP_PEER_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
need_cfg = True
else:
re_find = re.findall(
r'.*<holdTime>(.*)</holdTime>.*', recv_xml)
if re_find:
result["hold_time"] = re_find
if re_find[0] != hold_time:
need_cfg = True
else:
need_cfg = True
min_hold_time = module.params['min_hold_time']
if min_hold_time:
if int(min_hold_time) != 0 and (int(min_hold_time) > 65535 or int(min_hold_time) < 20):
module.fail_json(
msg='Error: The value of min_hold_time %s is out of [0 or 20 - 65535].' % min_hold_time)
conf_str = CE_GET_BGP_PEER_HEADER % (vrf_name, peerip) + \
"<minHoldTime></minHoldTime>" + CE_GET_BGP_PEER_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
need_cfg = True
else:
re_find = re.findall(
r'.*<minHoldTime>(.*)</minHoldTime>.*', recv_xml)
if re_find:
result["min_hold_time"] = re_find
if re_find[0] != min_hold_time:
need_cfg = True
else:
need_cfg = True
key_chain_name = module.params['key_chain_name']
if key_chain_name:
if len(key_chain_name) > 47 or len(key_chain_name) < 1:
module.fail_json(
msg='Error: The len of key_chain_name %s is out of [1 - 47].' % key_chain_name)
conf_str = CE_GET_BGP_PEER_HEADER % (vrf_name, peerip) + \
"<keyChainName></keyChainName>" + CE_GET_BGP_PEER_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
need_cfg = True
else:
re_find = re.findall(
r'.*<keyChainName>(.*)</keyChainName>.*', recv_xml)
if re_find:
result["key_chain_name"] = re_find
if re_find[0] != key_chain_name:
need_cfg = True
else:
need_cfg = True
conn_retry_time = module.params['conn_retry_time']
if conn_retry_time:
if int(conn_retry_time) > 65535 or int(conn_retry_time) < 1:
module.fail_json(
msg='Error: The value of conn_retry_time %s is out of [1 - 65535].' % conn_retry_time)
conf_str = CE_GET_BGP_PEER_HEADER % (vrf_name, peerip) + \
"<connRetryTime></connRetryTime>" + CE_GET_BGP_PEER_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
need_cfg = True
else:
re_find = re.findall(
r'.*<connRetryTime>(.*)</connRetryTime>.*', recv_xml)
if re_find:
result["conn_retry_time"] = re_find
if re_find[0] != conn_retry_time:
need_cfg = True
else:
need_cfg = True
tcp_mss = module.params['tcp_MSS']
if tcp_mss:
if int(tcp_mss) > 4096 or int(tcp_mss) < 176:
module.fail_json(
msg='Error: The value of tcp_mss %s is out of [176 - 4096].' % tcp_mss)
conf_str = CE_GET_BGP_PEER_HEADER % (vrf_name, peerip) + \
"<tcpMSS></tcpMSS>" + CE_GET_BGP_PEER_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
need_cfg = True
else:
re_find = re.findall(
r'.*<tcpMSS>(.*)</tcpMSS>.*', recv_xml)
if re_find:
result["tcp_MSS"] = re_find
if re_find[0] != tcp_mss:
need_cfg = True
else:
need_cfg = True
mpls_local_ifnet_disable = module.params['mpls_local_ifnet_disable']
if mpls_local_ifnet_disable != 'no_use':
conf_str = CE_GET_BGP_PEER_HEADER % (vrf_name, peerip) + \
"<mplsLocalIfnetDisable></mplsLocalIfnetDisable>" + CE_GET_BGP_PEER_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
need_cfg = True
else:
re_find = re.findall(
r'.*<mplsLocalIfnetDisable>(.*)</mplsLocalIfnetDisable>.*', recv_xml)
if re_find:
result["mpls_local_ifnet_disable"] = re_find
if re_find[0] != mpls_local_ifnet_disable:
need_cfg = True
else:
need_cfg = True
prepend_global_as = module.params['prepend_global_as']
if prepend_global_as != 'no_use':
if not fake_as:
module.fail_json(msg='fake_as must exist.')
conf_str = CE_GET_BGP_PEER_HEADER % (vrf_name, peerip) + \
"<prependGlobalAs></prependGlobalAs>" + CE_GET_BGP_PEER_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
need_cfg = True
else:
re_find = re.findall(
r'.*<prependGlobalAs>(.*)</prependGlobalAs>.*', recv_xml)
if re_find:
result["prepend_global_as"] = re_find
if re_find[0] != prepend_global_as:
need_cfg = True
else:
need_cfg = True
prepend_fake_as = module.params['prepend_fake_as']
if prepend_fake_as != 'no_use':
if not fake_as:
module.fail_json(msg='fake_as must exist.')
conf_str = CE_GET_BGP_PEER_HEADER % (vrf_name, peerip) + \
"<prependFakeAs></prependFakeAs>" + CE_GET_BGP_PEER_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
need_cfg = True
else:
re_find = re.findall(
r'.*<prependFakeAs>(.*)</prependFakeAs>.*', recv_xml)
if re_find:
result["prepend_fake_as"] = re_find
if re_find[0] != prepend_fake_as:
need_cfg = True
else:
need_cfg = True
result["need_cfg"] = need_cfg
return result
def check_peer_bfd_merge_args(self, **kwargs):
""" check_peer_bfd_merge_args """
module = kwargs["module"]
result = dict()
need_cfg = False
state = module.params['state']
if state == "absent":
result["need_cfg"] = need_cfg
return result
vrf_name = module.params['vrf_name']
if vrf_name:
if len(vrf_name) > 31 or len(vrf_name) == 0:
module.fail_json(
msg='Error: The len of vrf_name %s is out of [1 - 31].' % vrf_name)
peer_addr = module.params['peer_addr']
is_bfd_block = module.params['is_bfd_block']
if is_bfd_block != 'no_use':
conf_str = CE_GET_PEER_BFD_HEADER % (
vrf_name, peer_addr) + "<isBfdBlock></isBfdBlock>" + CE_GET_PEER_BFD_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
need_cfg = True
else:
re_find = re.findall(
r'.*<isBfdBlock>(.*)</isBfdBlock>.*', recv_xml)
if re_find:
result["is_bfd_block"] = re_find
if re_find[0] != is_bfd_block:
need_cfg = True
else:
need_cfg = True
multiplier = module.params['multiplier']
if multiplier:
if int(multiplier) > 50 or int(multiplier) < 3:
module.fail_json(
msg='Error: The value of multiplier %s is out of [3 - 50].' % multiplier)
conf_str = CE_GET_PEER_BFD_HEADER % (
vrf_name, peer_addr) + "<multiplier></multiplier>" + CE_GET_PEER_BFD_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
need_cfg = True
else:
re_find = re.findall(
r'.*<multiplier>(.*)</multiplier>.*', recv_xml)
if re_find:
result["multiplier"] = re_find
if re_find[0] != multiplier:
need_cfg = True
else:
need_cfg = True
is_bfd_enable = module.params['is_bfd_enable']
if is_bfd_enable != 'no_use':
conf_str = CE_GET_PEER_BFD_HEADER % (
vrf_name, peer_addr) + "<isBfdEnable></isBfdEnable>" + CE_GET_PEER_BFD_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
need_cfg = True
else:
re_find = re.findall(
r'.*<isBfdEnable>(.*)</isBfdEnable>.*', recv_xml)
if re_find:
result["is_bfd_enable"] = re_find
if re_find[0] != is_bfd_enable:
need_cfg = True
else:
need_cfg = True
rx_interval = module.params['rx_interval']
if rx_interval:
if int(rx_interval) > 1000 or int(rx_interval) < 50:
module.fail_json(
msg='Error: The value of rx_interval %s is out of [50 - 1000].' % rx_interval)
conf_str = CE_GET_PEER_BFD_HEADER % (
vrf_name, peer_addr) + "<rxInterval></rxInterval>" + CE_GET_PEER_BFD_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
need_cfg = True
else:
re_find = re.findall(
r'.*<rxInterval>(.*)</rxInterval>.*', recv_xml)
if re_find:
result["rx_interval"] = re_find
if re_find[0] != rx_interval:
need_cfg = True
else:
need_cfg = True
tx_interval = module.params['tx_interval']
if tx_interval:
if int(tx_interval) > 1000 or int(tx_interval) < 50:
module.fail_json(
msg='Error: The value of tx_interval %s is out of [50 - 1000].' % tx_interval)
conf_str = CE_GET_PEER_BFD_HEADER % (
vrf_name, peer_addr) + "<txInterval></txInterval>" + CE_GET_PEER_BFD_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
need_cfg = True
else:
re_find = re.findall(
r'.*<txInterval>(.*)</txInterval>.*', recv_xml)
if re_find:
result["tx_interval"] = re_find
if re_find[0] != tx_interval:
need_cfg = True
else:
need_cfg = True
is_single_hop = module.params['is_single_hop']
if is_single_hop != 'no_use':
conf_str = CE_GET_PEER_BFD_HEADER % (
vrf_name, peer_addr) + "<isSingleHop></isSingleHop>" + CE_GET_PEER_BFD_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
need_cfg = True
else:
re_find = re.findall(
r'.*<isSingleHop>(.*)</isSingleHop>.*', recv_xml)
if re_find:
result["is_single_hop"] = re_find
if re_find[0] != is_single_hop:
need_cfg = True
else:
need_cfg = True
result["need_cfg"] = need_cfg
return result
def check_peer_bfd_delete_args(self, **kwargs):
""" check_peer_bfd_delete_args """
module = kwargs["module"]
result = dict()
need_cfg = False
state = module.params['state']
if state == "present":
result["need_cfg"] = need_cfg
return result
vrf_name = module.params['vrf_name']
if vrf_name:
if len(vrf_name) > 31 or len(vrf_name) == 0:
module.fail_json(
msg='Error: The len of vrf_name %s is out of [1 - 31].' % vrf_name)
peer_addr = module.params['peer_addr']
is_bfd_block = module.params['is_bfd_block']
if is_bfd_block != 'no_use':
conf_str = CE_GET_PEER_BFD_HEADER % (
vrf_name, peer_addr) + "<isBfdBlock></isBfdBlock>" + CE_GET_PEER_BFD_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
pass
else:
re_find = re.findall(
r'.*<isBfdBlock>(.*)</isBfdBlock>.*', recv_xml)
if re_find:
result["is_bfd_block"] = re_find
if re_find[0] == is_bfd_block:
need_cfg = True
multiplier = module.params['multiplier']
if multiplier:
if int(multiplier) > 50 or int(multiplier) < 3:
module.fail_json(
msg='Error: The value of multiplier %s is out of [3 - 50].' % multiplier)
conf_str = CE_GET_PEER_BFD_HEADER % (
vrf_name, peer_addr) + "<multiplier></multiplier>" + CE_GET_PEER_BFD_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
pass
else:
re_find = re.findall(
r'.*<multiplier>(.*)</multiplier>.*', recv_xml)
if re_find:
result["multiplier"] = re_find
if re_find[0] == multiplier:
need_cfg = True
is_bfd_enable = module.params['is_bfd_enable']
if is_bfd_enable != 'no_use':
conf_str = CE_GET_PEER_BFD_HEADER % (
vrf_name, peer_addr) + "<isBfdEnable></isBfdEnable>" + CE_GET_PEER_BFD_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
pass
else:
re_find = re.findall(
r'.*<isBfdEnable>(.*)</isBfdEnable>.*', recv_xml)
if re_find:
result["is_bfd_enable"] = re_find
if re_find[0] == is_bfd_enable:
need_cfg = True
rx_interval = module.params['rx_interval']
if rx_interval:
if int(rx_interval) > 1000 or int(rx_interval) < 50:
module.fail_json(
msg='Error: The value of rx_interval %s is out of [50 - 1000].' % rx_interval)
conf_str = CE_GET_PEER_BFD_HEADER % (
vrf_name, peer_addr) + "<rxInterval></rxInterval>" + CE_GET_PEER_BFD_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
pass
else:
re_find = re.findall(
r'.*<rxInterval>(.*)</rxInterval>.*', recv_xml)
if re_find:
result["rx_interval"] = re_find
if re_find[0] == rx_interval:
need_cfg = True
tx_interval = module.params['tx_interval']
if tx_interval:
if int(tx_interval) > 1000 or int(tx_interval) < 50:
module.fail_json(
msg='Error: The value of tx_interval %s is out of [50 - 1000].' % tx_interval)
conf_str = CE_GET_PEER_BFD_HEADER % (
vrf_name, peer_addr) + "<txInterval></txInterval>" + CE_GET_PEER_BFD_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
pass
else:
re_find = re.findall(
r'.*<txInterval>(.*)</txInterval>.*', recv_xml)
if re_find:
result["tx_interval"] = re_find
if re_find[0] == tx_interval:
need_cfg = True
is_single_hop = module.params['is_single_hop']
if is_single_hop != 'no_use':
conf_str = CE_GET_PEER_BFD_HEADER % (
vrf_name, peer_addr) + "<isSingleHop></isSingleHop>" + CE_GET_PEER_BFD_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
pass
else:
re_find = re.findall(
r'.*<isSingleHop>(.*)</isSingleHop>.*', recv_xml)
if re_find:
result["is_single_hop"] = re_find
if re_find[0] == is_single_hop:
need_cfg = True
result["need_cfg"] = need_cfg
return result
def get_bgp_peer(self, **kwargs):
""" get_bgp_peer """
module = kwargs["module"]
peerip = module.params['peer_addr']
vrf_name = module.params['vrf_name']
if vrf_name:
if len(vrf_name) > 31 or len(vrf_name) == 0:
module.fail_json(
msg='Error: The len of vrf_name %s is out of [1 - 31].' % vrf_name)
conf_str = CE_GET_BGP_PEER_HEADER % (vrf_name, peerip) + \
"<remoteAs></remoteAs>" + CE_GET_BGP_PEER_TAIL
xml_str = self.netconf_get_config(module=module, conf_str=conf_str)
result = list()
if "<data/>" in xml_str:
return result
else:
re_find = re.findall(
r'.*<peerAddr>(.*)</peerAddr>.*\s.*<remoteAs>(.*)</remoteAs>.*', xml_str)
if re_find:
return re_find
else:
return result
def get_bgp_del_peer(self, **kwargs):
""" get_bgp_del_peer """
module = kwargs["module"]
peerip = module.params['peer_addr']
vrf_name = module.params['vrf_name']
if vrf_name:
if len(vrf_name) > 31 or len(vrf_name) == 0:
module.fail_json(
msg='Error: The len of vrf_name %s is out of [1 - 31].' % vrf_name)
conf_str = CE_GET_BGP_PEER_HEADER % (vrf_name, peerip) + CE_GET_BGP_PEER_TAIL
xml_str = self.netconf_get_config(module=module, conf_str=conf_str)
result = list()
if "<data/>" in xml_str:
return result
else:
re_find = re.findall(
r'.*<peerAddr>(.*)</peerAddr>.*', xml_str)
if re_find:
return re_find
else:
return result
def merge_bgp_peer(self, **kwargs):
""" merge_bgp_peer """
module = kwargs["module"]
vrf_name = module.params['vrf_name']
peer_addr = module.params['peer_addr']
remote_as = module.params['remote_as']
conf_str = CE_MERGE_BGP_PEER_HEADER % (
vrf_name, peer_addr) + "<remoteAs>%s</remoteAs>" % remote_as + CE_MERGE_BGP_PEER_TAIL
recv_xml = self.netconf_set_config(module=module, conf_str=conf_str)
if "<ok/>" not in recv_xml:
module.fail_json(msg='Error: Merge bgp peer failed.')
cmds = []
cmd = "peer %s as-number %s" % (peer_addr, remote_as)
cmds.append(cmd)
return cmds
def create_bgp_peer(self, **kwargs):
""" create_bgp_peer """
module = kwargs["module"]
vrf_name = module.params['vrf_name']
peer_addr = module.params['peer_addr']
remote_as = module.params['remote_as']
conf_str = CE_CREATE_BGP_PEER_HEADER % (
vrf_name, peer_addr) + "<remoteAs>%s</remoteAs>" % remote_as + CE_CREATE_BGP_PEER_TAIL
recv_xml = self.netconf_set_config(module=module, conf_str=conf_str)
if "<ok/>" not in recv_xml:
module.fail_json(msg='Error: Create bgp peer failed.')
cmds = []
cmd = "peer %s as-number %s" % (peer_addr, remote_as)
cmds.append(cmd)
return cmds
def delete_bgp_peer(self, **kwargs):
""" delete_bgp_peer """
module = kwargs["module"]
vrf_name = module.params['vrf_name']
peer_addr = module.params['peer_addr']
conf_str = CE_DELETE_BGP_PEER_HEADER % (
vrf_name, peer_addr) + CE_DELETE_BGP_PEER_TAIL
recv_xml = self.netconf_set_config(module=module, conf_str=conf_str)
if "<ok/>" not in recv_xml:
module.fail_json(msg='Error: Delete bgp peer failed.')
cmds = []
cmd = "undo peer %s" % peer_addr
cmds.append(cmd)
return cmds
def merge_bgp_peer_other(self, **kwargs):
""" merge_bgp_peer """
module = kwargs["module"]
vrf_name = module.params['vrf_name']
peer_addr = module.params['peer_addr']
conf_str = CE_MERGE_BGP_PEER_HEADER % (vrf_name, peer_addr)
cmds = []
description = module.params['description']
if description:
conf_str += "<description>%s</description>" % description
cmd = "peer %s description %s" % (peer_addr, description)
cmds.append(cmd)
fake_as = module.params['fake_as']
if fake_as:
conf_str += "<fakeAs>%s</fakeAs>" % fake_as
cmd = "peer %s local-as %s" % (peer_addr, fake_as)
cmds.append(cmd)
dual_as = module.params['dual_as']
if dual_as != 'no_use':
conf_str += "<dualAs>%s</dualAs>" % dual_as
if dual_as == "true":
cmd = "peer %s local-as %s dual-as" % (peer_addr, fake_as)
else:
cmd = "peer %s local-as %s" % (peer_addr, fake_as)
cmds.append(cmd)
conventional = module.params['conventional']
if conventional != 'no_use':
conf_str += "<conventional>%s</conventional>" % conventional
if conventional == "true":
cmd = "peer %s capability-advertise conventional" % peer_addr
else:
cmd = "undo peer %s capability-advertise conventional" % peer_addr
cmds.append(cmd)
route_refresh = module.params['route_refresh']
if route_refresh != 'no_use':
conf_str += "<routeRefresh>%s</routeRefresh>" % route_refresh
if route_refresh == "true":
cmd = "peer %s capability-advertise route-refresh" % peer_addr
else:
cmd = "undo peer %s capability-advertise route-refresh" % peer_addr
cmds.append(cmd)
four_byte_as = module.params['four_byte_as']
if four_byte_as != 'no_use':
conf_str += "<fourByteAs>%s</fourByteAs>" % four_byte_as
if four_byte_as == "true":
cmd = "peer %s capability-advertise 4-byte-as" % peer_addr
else:
cmd = "undo peer %s capability-advertise 4-byte-as" % peer_addr
cmds.append(cmd)
is_ignore = module.params['is_ignore']
if is_ignore != 'no_use':
conf_str += "<isIgnore>%s</isIgnore>" % is_ignore
if is_ignore == "true":
cmd = "peer %s ignore" % peer_addr
else:
cmd = "undo peer %s ignore" % peer_addr
cmds.append(cmd)
local_if_name = module.params['local_if_name']
if local_if_name:
conf_str += "<localIfName>%s</localIfName>" % local_if_name
cmd = "peer %s connect-interface %s" % (peer_addr, local_if_name)
cmds.append(cmd)
ebgp_max_hop = module.params['ebgp_max_hop']
if ebgp_max_hop:
conf_str += "<ebgpMaxHop>%s</ebgpMaxHop>" % ebgp_max_hop
cmd = "peer %s ebgp-max-hop %s" % (peer_addr, ebgp_max_hop)
cmds.append(cmd)
valid_ttl_hops = module.params['valid_ttl_hops']
if valid_ttl_hops:
conf_str += "<validTtlHops>%s</validTtlHops>" % valid_ttl_hops
cmd = "peer %s valid-ttl-hops %s" % (peer_addr, valid_ttl_hops)
cmds.append(cmd)
connect_mode = module.params['connect_mode']
if connect_mode:
if connect_mode == "listenOnly":
cmd = "peer %s listen-only" % peer_addr
cmds.append(cmd)
elif connect_mode == "connectOnly":
cmd = "peer %s connect-only" % peer_addr
cmds.append(cmd)
elif connect_mode == "both":
connect_mode = "null"
cmd = "peer %s listen-only" % peer_addr
cmds.append(cmd)
cmd = "peer %s connect-only" % peer_addr
cmds.append(cmd)
conf_str += "<connectMode>%s</connectMode>" % connect_mode
is_log_change = module.params['is_log_change']
if is_log_change != 'no_use':
conf_str += "<isLogChange>%s</isLogChange>" % is_log_change
if is_log_change == "true":
cmd = "peer %s log-change" % peer_addr
else:
cmd = "undo peer %s log-change" % peer_addr
cmds.append(cmd)
pswd_type = module.params['pswd_type']
if pswd_type:
conf_str += "<pswdType>%s</pswdType>" % pswd_type
pswd_cipher_text = module.params['pswd_cipher_text']
if pswd_cipher_text:
conf_str += "<pswdCipherText>%s</pswdCipherText>" % pswd_cipher_text
if pswd_type == "cipher":
cmd = "peer %s password cipher %s" % (
peer_addr, pswd_cipher_text)
elif pswd_type == "simple":
cmd = "peer %s password simple %s" % (
peer_addr, pswd_cipher_text)
cmds.append(cmd)
keep_alive_time = module.params['keep_alive_time']
if keep_alive_time:
conf_str += "<keepAliveTime>%s</keepAliveTime>" % keep_alive_time
cmd = "peer %s timer keepalive %s" % (peer_addr, keep_alive_time)
cmds.append(cmd)
hold_time = module.params['hold_time']
if hold_time:
conf_str += "<holdTime>%s</holdTime>" % hold_time
cmd = "peer %s timer hold %s" % (peer_addr, hold_time)
cmds.append(cmd)
min_hold_time = module.params['min_hold_time']
if min_hold_time:
conf_str += "<minHoldTime>%s</minHoldTime>" % min_hold_time
cmd = "peer %s timer min-holdtime %s" % (peer_addr, min_hold_time)
cmds.append(cmd)
key_chain_name = module.params['key_chain_name']
if key_chain_name:
conf_str += "<keyChainName>%s</keyChainName>" % key_chain_name
cmd = "peer %s keychain %s" % (peer_addr, key_chain_name)
cmds.append(cmd)
conn_retry_time = module.params['conn_retry_time']
if conn_retry_time:
conf_str += "<connRetryTime>%s</connRetryTime>" % conn_retry_time
cmd = "peer %s timer connect-retry %s" % (
peer_addr, conn_retry_time)
cmds.append(cmd)
tcp_mss = module.params['tcp_MSS']
if tcp_mss:
conf_str += "<tcpMSS>%s</tcpMSS>" % tcp_mss
cmd = "peer %s tcp-mss %s" % (peer_addr, tcp_mss)
cmds.append(cmd)
mpls_local_ifnet_disable = module.params['mpls_local_ifnet_disable']
if mpls_local_ifnet_disable != 'no_use':
conf_str += "<mplsLocalIfnetDisable>%s</mplsLocalIfnetDisable>" % mpls_local_ifnet_disable
if mpls_local_ifnet_disable == "false":
cmd = "undo peer %s mpls-local-ifnet disable" % peer_addr
else:
cmd = "peer %s mpls-local-ifnet disable" % peer_addr
cmds.append(cmd)
prepend_global_as = module.params['prepend_global_as']
if prepend_global_as != 'no_use':
conf_str += "<prependGlobalAs>%s</prependGlobalAs>" % prepend_global_as
if prepend_global_as == "true":
cmd = "peer %s local-as %s prepend-global-as" % (peer_addr, fake_as)
else:
cmd = "undo peer %s local-as %s prepend-global-as" % (peer_addr, fake_as)
cmds.append(cmd)
prepend_fake_as = module.params['prepend_fake_as']
if prepend_fake_as != 'no_use':
conf_str += "<prependFakeAs>%s</prependFakeAs>" % prepend_fake_as
if prepend_fake_as == "true":
cmd = "peer %s local-as %s prepend-local-as" % (peer_addr, fake_as)
else:
cmd = "undo peer %s local-as %s prepend-local-as" % (peer_addr, fake_as)
cmds.append(cmd)
conf_str += CE_MERGE_BGP_PEER_TAIL
recv_xml = self.netconf_set_config(module=module, conf_str=conf_str)
if "<ok/>" not in recv_xml:
module.fail_json(msg='Error: Merge bgp peer other failed.')
return cmds
def merge_peer_bfd(self, **kwargs):
""" merge_peer_bfd """
module = kwargs["module"]
vrf_name = module.params['vrf_name']
peer_addr = module.params['peer_addr']
conf_str = CE_MERGE_PEER_BFD_HEADER % (vrf_name, peer_addr)
cmds = []
is_bfd_block = module.params['is_bfd_block']
if is_bfd_block != 'no_use':
conf_str += "<isBfdBlock>%s</isBfdBlock>" % is_bfd_block
if is_bfd_block == "true":
cmd = "peer %s bfd block" % peer_addr
else:
cmd = "undo peer %s bfd block" % peer_addr
cmds.append(cmd)
multiplier = module.params['multiplier']
if multiplier:
conf_str += "<multiplier>%s</multiplier>" % multiplier
cmd = "peer %s bfd detect-multiplier %s" % (peer_addr, multiplier)
cmds.append(cmd)
is_bfd_enable = module.params['is_bfd_enable']
if is_bfd_enable != 'no_use':
conf_str += "<isBfdEnable>%s</isBfdEnable>" % is_bfd_enable
if is_bfd_enable == "true":
cmd = "peer %s bfd enable" % peer_addr
else:
cmd = "undo peer %s bfd enable" % peer_addr
cmds.append(cmd)
rx_interval = module.params['rx_interval']
if rx_interval:
conf_str += "<rxInterval>%s</rxInterval>" % rx_interval
cmd = "peer %s bfd min-rx-interval %s" % (peer_addr, rx_interval)
cmds.append(cmd)
tx_interval = module.params['tx_interval']
if tx_interval:
conf_str += "<txInterval>%s</txInterval>" % tx_interval
cmd = "peer %s bfd min-tx-interval %s" % (peer_addr, tx_interval)
cmds.append(cmd)
is_single_hop = module.params['is_single_hop']
if is_single_hop != 'no_use':
conf_str += "<isSingleHop>%s</isSingleHop>" % is_single_hop
if is_single_hop == "true":
cmd = "peer %s bfd enable single-hop-prefer" % peer_addr
else:
cmd = "undo peer %s bfd enable single-hop-prefer" % peer_addr
cmds.append(cmd)
conf_str += CE_MERGE_PEER_BFD_TAIL
recv_xml = self.netconf_set_config(module=module, conf_str=conf_str)
if "<ok/>" not in recv_xml:
module.fail_json(msg='Error: Merge peer bfd failed.')
return cmds
def delete_peer_bfd(self, **kwargs):
""" delete_peer_bfd """
module = kwargs["module"]
vrf_name = module.params['vrf_name']
peer_addr = module.params['peer_addr']
conf_str = CE_DELETE_PEER_BFD_HEADER % (vrf_name, peer_addr)
cmds = []
is_bfd_block = module.params['is_bfd_block']
if is_bfd_block != 'no_use':
conf_str += "<isBfdBlock>%s</isBfdBlock>" % is_bfd_block
cmd = "undo peer %s bfd block" % peer_addr
cmds.append(cmd)
multiplier = module.params['multiplier']
if multiplier:
conf_str += "<multiplier>%s</multiplier>" % multiplier
cmd = "undo peer %s bfd detect-multiplier %s" % (
peer_addr, multiplier)
cmds.append(cmd)
is_bfd_enable = module.params['is_bfd_enable']
if is_bfd_enable != 'no_use':
conf_str += "<isBfdEnable>%s</isBfdEnable>" % is_bfd_enable
cmd = "undo peer %s bfd enable" % peer_addr
cmds.append(cmd)
rx_interval = module.params['rx_interval']
if rx_interval:
conf_str += "<rxInterval>%s</rxInterval>" % rx_interval
cmd = "undo peer %s bfd min-rx-interval %s" % (
peer_addr, rx_interval)
cmds.append(cmd)
tx_interval = module.params['tx_interval']
if tx_interval:
conf_str += "<txInterval>%s</txInterval>" % tx_interval
cmd = "undo peer %s bfd min-tx-interval %s" % (
peer_addr, tx_interval)
cmds.append(cmd)
is_single_hop = module.params['is_single_hop']
if is_single_hop != 'no_use':
conf_str += "<isSingleHop>%s</isSingleHop>" % is_single_hop
cmd = "undo peer %s bfd enable single-hop-prefer" % peer_addr
cmds.append(cmd)
conf_str += CE_DELETE_PEER_BFD_TAIL
recv_xml = self.netconf_set_config(module=module, conf_str=conf_str)
if "<ok/>" not in recv_xml:
module.fail_json(msg='Error: Delete peer bfd failed.')
return cmds
def main():
""" main """
argument_spec = dict(
state=dict(choices=['present', 'absent'], default='present'),
vrf_name=dict(type='str', required=True),
peer_addr=dict(type='str', required=True),
remote_as=dict(type='str', required=True),
description=dict(type='str'),
fake_as=dict(type='str'),
dual_as=dict(type='str', default='no_use', choices=['no_use', 'true', 'false']),
conventional=dict(type='str', default='no_use', choices=['no_use', 'true', 'false']),
route_refresh=dict(type='str', default='no_use', choices=['no_use', 'true', 'false']),
four_byte_as=dict(type='str', default='no_use', choices=['no_use', 'true', 'false']),
is_ignore=dict(type='str', default='no_use', choices=['no_use', 'true', 'false']),
local_if_name=dict(type='str'),
ebgp_max_hop=dict(type='str'),
valid_ttl_hops=dict(type='str'),
connect_mode=dict(choices=['listenOnly', 'connectOnly', 'both']),
is_log_change=dict(type='str', default='no_use', choices=['no_use', 'true', 'false']),
pswd_type=dict(choices=['null', 'cipher', 'simple']),
pswd_cipher_text=dict(type='str', no_log=True),
keep_alive_time=dict(type='str'),
hold_time=dict(type='str'),
min_hold_time=dict(type='str'),
key_chain_name=dict(type='str'),
conn_retry_time=dict(type='str'),
tcp_MSS=dict(type='str'),
mpls_local_ifnet_disable=dict(type='str', default='no_use', choices=['no_use', 'true', 'false']),
prepend_global_as=dict(type='str', default='no_use', choices=['no_use', 'true', 'false']),
prepend_fake_as=dict(type='str', default='no_use', choices=['no_use', 'true', 'false']),
is_bfd_block=dict(type='str', default='no_use', choices=['no_use', 'true', 'false']),
multiplier=dict(type='str'),
is_bfd_enable=dict(type='str', default='no_use', choices=['no_use', 'true', 'false']),
rx_interval=dict(type='str'),
tx_interval=dict(type='str'),
is_single_hop=dict(type='str', default='no_use', choices=['no_use', 'true', 'false']))
argument_spec.update(ce_argument_spec)
module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=True)
changed = False
proposed = dict()
existing = dict()
end_state = dict()
updates = []
state = module.params['state']
vrf_name = module.params['vrf_name']
peer_addr = module.params['peer_addr']
remote_as = module.params['remote_as']
description = module.params['description']
fake_as = module.params['fake_as']
dual_as = module.params['dual_as']
conventional = module.params['conventional']
route_refresh = module.params['route_refresh']
four_byte_as = module.params['four_byte_as']
is_ignore = module.params['is_ignore']
local_if_name = module.params['local_if_name']
ebgp_max_hop = module.params['ebgp_max_hop']
valid_ttl_hops = module.params['valid_ttl_hops']
connect_mode = module.params['connect_mode']
is_log_change = module.params['is_log_change']
pswd_type = module.params['pswd_type']
pswd_cipher_text = module.params['pswd_cipher_text']
keep_alive_time = module.params['keep_alive_time']
hold_time = module.params['hold_time']
min_hold_time = module.params['min_hold_time']
key_chain_name = module.params['key_chain_name']
conn_retry_time = module.params['conn_retry_time']
tcp_mss = module.params['tcp_MSS']
mpls_local_ifnet_disable = module.params['mpls_local_ifnet_disable']
prepend_global_as = module.params['prepend_global_as']
prepend_fake_as = module.params['prepend_fake_as']
is_bfd_block = module.params['is_bfd_block']
multiplier = module.params['multiplier']
is_bfd_enable = module.params['is_bfd_enable']
rx_interval = module.params['rx_interval']
tx_interval = module.params['tx_interval']
is_single_hop = module.params['is_single_hop']
ce_bgp_peer_obj = BgpNeighbor()
# get proposed
proposed["state"] = state
if vrf_name:
proposed["vrf_name"] = vrf_name
if peer_addr:
proposed["peer_addr"] = peer_addr
if remote_as:
proposed["remote_as"] = remote_as
if description:
proposed["description"] = description
if fake_as:
proposed["fake_as"] = fake_as
if dual_as != 'no_use':
proposed["dual_as"] = dual_as
if conventional != 'no_use':
proposed["conventional"] = conventional
if route_refresh != 'no_use':
proposed["route_refresh"] = route_refresh
if four_byte_as != 'no_use':
proposed["four_byte_as"] = four_byte_as
if is_ignore != 'no_use':
proposed["is_ignore"] = is_ignore
if local_if_name:
proposed["local_if_name"] = local_if_name
if ebgp_max_hop:
proposed["ebgp_max_hop"] = ebgp_max_hop
if valid_ttl_hops:
proposed["valid_ttl_hops"] = valid_ttl_hops
if connect_mode:
proposed["connect_mode"] = connect_mode
if is_log_change != 'no_use':
proposed["is_log_change"] = is_log_change
if pswd_type:
proposed["pswd_type"] = pswd_type
if pswd_cipher_text:
proposed["pswd_cipher_text"] = pswd_cipher_text
if keep_alive_time:
proposed["keep_alive_time"] = keep_alive_time
if hold_time:
proposed["hold_time"] = hold_time
if min_hold_time:
proposed["min_hold_time"] = min_hold_time
if key_chain_name:
proposed["key_chain_name"] = key_chain_name
if conn_retry_time:
proposed["conn_retry_time"] = conn_retry_time
if tcp_mss:
proposed["tcp_MSS"] = tcp_mss
if mpls_local_ifnet_disable != 'no_use':
proposed["mpls_local_ifnet_disable"] = mpls_local_ifnet_disable
if prepend_global_as != 'no_use':
proposed["prepend_global_as"] = prepend_global_as
if prepend_fake_as != 'no_use':
proposed["prepend_fake_as"] = prepend_fake_as
if is_bfd_block != 'no_use':
proposed["is_bfd_block"] = is_bfd_block
if multiplier:
proposed["multiplier"] = multiplier
if is_bfd_enable != 'no_use':
proposed["is_bfd_enable"] = is_bfd_enable
if rx_interval:
proposed["rx_interval"] = rx_interval
if tx_interval:
proposed["tx_interval"] = tx_interval
if is_single_hop != 'no_use':
proposed["is_single_hop"] = is_single_hop
if not ce_bgp_peer_obj:
module.fail_json(msg='Error: Init module failed.')
need_bgp_peer_enable = ce_bgp_peer_obj.check_bgp_peer_args(module=module)
need_bgp_peer_other_rst = ce_bgp_peer_obj.check_bgp_peer_other_args(
module=module)
need_peer_bfd_merge_rst = ce_bgp_peer_obj.check_peer_bfd_merge_args(
module=module)
need_peer_bfd_del_rst = ce_bgp_peer_obj.check_peer_bfd_delete_args(
module=module)
# bgp peer config
if need_bgp_peer_enable["need_cfg"]:
if state == "present":
if remote_as:
bgp_peer_exist = ce_bgp_peer_obj.get_bgp_peer(module=module)
existing["bgp peer"] = bgp_peer_exist
bgp_peer_new = (peer_addr, remote_as)
if len(bgp_peer_exist) == 0:
cmd = ce_bgp_peer_obj.create_bgp_peer(module=module)
changed = True
for item in cmd:
updates.append(item)
elif bgp_peer_new in bgp_peer_exist:
pass
else:
cmd = ce_bgp_peer_obj.merge_bgp_peer(module=module)
changed = True
for item in cmd:
updates.append(item)
bgp_peer_end = ce_bgp_peer_obj.get_bgp_peer(module=module)
end_state["bgp peer"] = bgp_peer_end
else:
bgp_peer_exist = ce_bgp_peer_obj.get_bgp_del_peer(module=module)
existing["bgp peer"] = bgp_peer_exist
bgp_peer_new = (peer_addr)
if len(bgp_peer_exist) == 0:
pass
elif bgp_peer_new in bgp_peer_exist:
cmd = ce_bgp_peer_obj.delete_bgp_peer(module=module)
changed = True
for item in cmd:
updates.append(item)
bgp_peer_end = ce_bgp_peer_obj.get_bgp_del_peer(module=module)
end_state["bgp peer"] = bgp_peer_end
# bgp peer other args
exist_tmp = dict()
for item in need_bgp_peer_other_rst:
if item != "need_cfg":
exist_tmp[item] = need_bgp_peer_other_rst[item]
if exist_tmp:
existing["bgp peer other"] = exist_tmp
if need_bgp_peer_other_rst["need_cfg"]:
if state == "present":
cmd = ce_bgp_peer_obj.merge_bgp_peer_other(module=module)
changed = True
for item in cmd:
updates.append(item)
need_bgp_peer_other_rst = ce_bgp_peer_obj.check_bgp_peer_other_args(
module=module)
end_tmp = dict()
for item in need_bgp_peer_other_rst:
if item != "need_cfg":
end_tmp[item] = need_bgp_peer_other_rst[item]
if end_tmp:
end_state["bgp peer other"] = end_tmp
# peer bfd args
if state == "present":
exist_tmp = dict()
for item in need_peer_bfd_merge_rst:
if item != "need_cfg":
exist_tmp[item] = need_peer_bfd_merge_rst[item]
if exist_tmp:
existing["peer bfd"] = exist_tmp
if need_peer_bfd_merge_rst["need_cfg"]:
cmd = ce_bgp_peer_obj.merge_peer_bfd(module=module)
changed = True
for item in cmd:
updates.append(item)
need_peer_bfd_merge_rst = ce_bgp_peer_obj.check_peer_bfd_merge_args(
module=module)
end_tmp = dict()
for item in need_peer_bfd_merge_rst:
if item != "need_cfg":
end_tmp[item] = need_peer_bfd_merge_rst[item]
if end_tmp:
end_state["peer bfd"] = end_tmp
else:
exist_tmp = dict()
for item in need_peer_bfd_del_rst:
if item != "need_cfg":
exist_tmp[item] = need_peer_bfd_del_rst[item]
if exist_tmp:
existing["peer bfd"] = exist_tmp
# has already delete with bgp peer
need_peer_bfd_del_rst = ce_bgp_peer_obj.check_peer_bfd_delete_args(
module=module)
end_tmp = dict()
for item in need_peer_bfd_del_rst:
if item != "need_cfg":
end_tmp[item] = need_peer_bfd_del_rst[item]
if end_tmp:
end_state["peer bfd"] = end_tmp
results = dict()
results['proposed'] = proposed
results['existing'] = existing
results['changed'] = changed
results['end_state'] = end_state
results['updates'] = updates
module.exit_json(**results)
if __name__ == '__main__':
main()
|
georgemarshall/django
|
refs/heads/master
|
django/http/cookie.py
|
62
|
from http import cookies
# For backwards compatibility in Django 2.1.
SimpleCookie = cookies.SimpleCookie
# Add support for the SameSite attribute (obsolete when PY37 is unsupported).
cookies.Morsel._reserved.setdefault('samesite', 'SameSite')
def parse_cookie(cookie):
"""
Return a dictionary parsed from a `Cookie:` header string.
"""
cookiedict = {}
for chunk in cookie.split(';'):
if '=' in chunk:
key, val = chunk.split('=', 1)
else:
# Assume an empty name per
# https://bugzilla.mozilla.org/show_bug.cgi?id=169091
key, val = '', chunk
key, val = key.strip(), val.strip()
if key or val:
# unquote using Python's algorithm.
cookiedict[key] = cookies._unquote(val)
return cookiedict
|
eonpatapon/lollypop
|
refs/heads/master
|
src/define.py
|
1
|
# Copyright (c) 2014-2015 Cedric Bellegarde <[email protected]>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# This is global object initialised at lollypop start
# member init order is important!
from gi.repository import Gio
try:
from gi.repository import Secret
SecretSchema = {
"org.gnome.Lollypop.lastfm.login": Secret.SchemaAttributeType.STRING
}
SecretAttributes = {
"org.gnome.Lollypop.lastfm.login": "Last.fm login"
}
except:
Secret = None
SecretSchema = None
SecretAttributes = None
GOOGLE_INC = 8
GOOGLE_MAX = 32
Lp = Gio.Application.get_default
# Represent what to do on next track
class NextContext:
NONE = 0 # Continue playback
STOP_TRACK = 1 # Stop after current track
STOP_ALBUM = 2 # Stop after current album
STOP_ARTIST = 3 # Stop after current artist
START_NEW_ALBUM = 4 # Start a new album
# Represent playback context
class PlayContext:
genre_id = None
next = NextContext.NONE
class GstPlayFlags:
GST_PLAY_FLAG_VIDEO = 1 << 0 # We want video output
GST_PLAY_FLAG_AUDIO = 1 << 1 # We want audio output
GST_PLAY_FLAG_TEXT = 1 << 3 # We want subtitle output
class ArtSize:
SMALL = 33
MEDIUM = 48 # If changed, adapt width request in AlbumRow.ui
BIG = 200
MONSTER = 500
class Shuffle:
NONE = 0 # No shuffle
TRACKS = 1 # Shuffle by tracks on genre
ALBUMS = 2 # Shuffle by albums on genre
TRACKS_ARTIST = 3 # Shuffle by tracks on artist
ALBUMS_ARTIST = 4 # Shuffle by albums on artist
# Order is important
class Type:
NONE = -1
POPULARS = -2
RANDOMS = -3
RECENTS = -4
PLAYLISTS = -5
RADIOS = -6
EXTERNALS = -7
ALL = -8
MPD = -996
LOVED = -997
NEVER = -998
DEVICES = -1000
SEPARATOR = -2000
COMPILATIONS = -2001
|
mrimp/N910TUVU1ANIH_kernel
|
refs/heads/master
|
tools/perf/scripts/python/Perf-Trace-Util/lib/Perf/Trace/EventClass.py
|
4653
|
# EventClass.py
#
# This is a library defining some events types classes, which could
# be used by other scripts to analyzing the perf samples.
#
# Currently there are just a few classes defined for examples,
# PerfEvent is the base class for all perf event sample, PebsEvent
# is a HW base Intel x86 PEBS event, and user could add more SW/HW
# event classes based on requirements.
import struct
# Event types, user could add more here
EVTYPE_GENERIC = 0
EVTYPE_PEBS = 1 # Basic PEBS event
EVTYPE_PEBS_LL = 2 # PEBS event with load latency info
EVTYPE_IBS = 3
#
# Currently we don't have good way to tell the event type, but by
# the size of raw buffer, raw PEBS event with load latency data's
# size is 176 bytes, while the pure PEBS event's size is 144 bytes.
#
def create_event(name, comm, dso, symbol, raw_buf):
if (len(raw_buf) == 144):
event = PebsEvent(name, comm, dso, symbol, raw_buf)
elif (len(raw_buf) == 176):
event = PebsNHM(name, comm, dso, symbol, raw_buf)
else:
event = PerfEvent(name, comm, dso, symbol, raw_buf)
return event
class PerfEvent(object):
event_num = 0
def __init__(self, name, comm, dso, symbol, raw_buf, ev_type=EVTYPE_GENERIC):
self.name = name
self.comm = comm
self.dso = dso
self.symbol = symbol
self.raw_buf = raw_buf
self.ev_type = ev_type
PerfEvent.event_num += 1
def show(self):
print "PMU event: name=%12s, symbol=%24s, comm=%8s, dso=%12s" % (self.name, self.symbol, self.comm, self.dso)
#
# Basic Intel PEBS (Precise Event-based Sampling) event, whose raw buffer
# contains the context info when that event happened: the EFLAGS and
# linear IP info, as well as all the registers.
#
class PebsEvent(PerfEvent):
pebs_num = 0
def __init__(self, name, comm, dso, symbol, raw_buf, ev_type=EVTYPE_PEBS):
tmp_buf=raw_buf[0:80]
flags, ip, ax, bx, cx, dx, si, di, bp, sp = struct.unpack('QQQQQQQQQQ', tmp_buf)
self.flags = flags
self.ip = ip
self.ax = ax
self.bx = bx
self.cx = cx
self.dx = dx
self.si = si
self.di = di
self.bp = bp
self.sp = sp
PerfEvent.__init__(self, name, comm, dso, symbol, raw_buf, ev_type)
PebsEvent.pebs_num += 1
del tmp_buf
#
# Intel Nehalem and Westmere support PEBS plus Load Latency info which lie
# in the four 64 bit words write after the PEBS data:
# Status: records the IA32_PERF_GLOBAL_STATUS register value
# DLA: Data Linear Address (EIP)
# DSE: Data Source Encoding, where the latency happens, hit or miss
# in L1/L2/L3 or IO operations
# LAT: the actual latency in cycles
#
class PebsNHM(PebsEvent):
pebs_nhm_num = 0
def __init__(self, name, comm, dso, symbol, raw_buf, ev_type=EVTYPE_PEBS_LL):
tmp_buf=raw_buf[144:176]
status, dla, dse, lat = struct.unpack('QQQQ', tmp_buf)
self.status = status
self.dla = dla
self.dse = dse
self.lat = lat
PebsEvent.__init__(self, name, comm, dso, symbol, raw_buf, ev_type)
PebsNHM.pebs_nhm_num += 1
del tmp_buf
|
jaggu303619/asylum
|
refs/heads/master
|
openerp/report/pyPdf/filters.py
|
13
|
# vim: sw=4:expandtab:foldmethod=marker
#
# Copyright (c) 2006, Mathieu Fenniak
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * The name of the author may not be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""
Implementation of stream filters for PDF.
"""
__author__ = "Mathieu Fenniak"
__author_email__ = "[email protected]"
from utils import PdfReadError
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
try:
import zlib
def decompress(data):
return zlib.decompress(data)
def compress(data):
return zlib.compress(data)
except ImportError:
# Unable to import zlib. Attempt to use the System.IO.Compression
# library from the .NET framework. (IronPython only)
import System
from System import IO, Collections, Array
def _string_to_bytearr(buf):
retval = Array.CreateInstance(System.Byte, len(buf))
for i in range(len(buf)):
retval[i] = ord(buf[i])
return retval
def _bytearr_to_string(bytes):
retval = ""
for i in range(bytes.Length):
retval += chr(bytes[i])
return retval
def _read_bytes(stream):
ms = IO.MemoryStream()
buf = Array.CreateInstance(System.Byte, 2048)
while True:
bytes = stream.Read(buf, 0, buf.Length)
if bytes == 0:
break
else:
ms.Write(buf, 0, bytes)
retval = ms.ToArray()
ms.Close()
return retval
def decompress(data):
bytes = _string_to_bytearr(data)
ms = IO.MemoryStream()
ms.Write(bytes, 0, bytes.Length)
ms.Position = 0 # fseek 0
gz = IO.Compression.DeflateStream(ms, IO.Compression.CompressionMode.Decompress)
bytes = _read_bytes(gz)
retval = _bytearr_to_string(bytes)
gz.Close()
return retval
def compress(data):
bytes = _string_to_bytearr(data)
ms = IO.MemoryStream()
gz = IO.Compression.DeflateStream(ms, IO.Compression.CompressionMode.Compress, True)
gz.Write(bytes, 0, bytes.Length)
gz.Close()
ms.Position = 0 # fseek 0
bytes = ms.ToArray()
retval = _bytearr_to_string(bytes)
ms.Close()
return retval
class FlateDecode(object):
def decode(data, decodeParms):
data = decompress(data)
predictor = 1
if decodeParms:
predictor = decodeParms.get("/Predictor", 1)
# predictor 1 == no predictor
if predictor != 1:
columns = decodeParms["/Columns"]
# PNG prediction:
if 10 <= predictor <= 15:
output = StringIO()
# PNG prediction can vary from row to row
rowlength = columns + 1
assert len(data) % rowlength == 0
prev_rowdata = (0,) * rowlength
for row in xrange(len(data) / rowlength):
rowdata = [ord(x) for x in data[(row*rowlength):((row+1)*rowlength)]]
filterByte = rowdata[0]
if filterByte == 0:
pass
elif filterByte == 1:
for i in range(2, rowlength):
rowdata[i] = (rowdata[i] + rowdata[i-1]) % 256
elif filterByte == 2:
for i in range(1, rowlength):
rowdata[i] = (rowdata[i] + prev_rowdata[i]) % 256
else:
# unsupported PNG filter
raise PdfReadError("Unsupported PNG filter %r" % filterByte)
prev_rowdata = rowdata
output.write(''.join([chr(x) for x in rowdata[1:]]))
data = output.getvalue()
else:
# unsupported predictor
raise PdfReadError("Unsupported flatedecode predictor %r" % predictor)
return data
decode = staticmethod(decode)
def encode(data):
return compress(data)
encode = staticmethod(encode)
class ASCIIHexDecode(object):
def decode(data, decodeParms=None):
retval = ""
char = ""
x = 0
while True:
c = data[x]
if c == ">":
break
elif c.isspace():
x += 1
continue
char += c
if len(char) == 2:
retval += chr(int(char, base=16))
char = ""
x += 1
assert char == ""
return retval
decode = staticmethod(decode)
class ASCII85Decode(object):
def decode(data, decodeParms=None):
retval = ""
group = []
x = 0
hitEod = False
# remove all whitespace from data
data = [y for y in data if not (y in ' \n\r\t')]
while not hitEod:
c = data[x]
if len(retval) == 0 and c == "<" and data[x+1] == "~":
x += 2
continue
#elif c.isspace():
# x += 1
# continue
elif c == 'z':
assert len(group) == 0
retval += '\x00\x00\x00\x00'
continue
elif c == "~" and data[x+1] == ">":
if len(group) != 0:
# cannot have a final group of just 1 char
assert len(group) > 1
cnt = len(group) - 1
group += [ 85, 85, 85 ]
hitEod = cnt
else:
break
else:
c = ord(c) - 33
assert 0 <= c < 85
group += [ c ]
if len(group) >= 5:
b = group[0] * (85**4) + \
group[1] * (85**3) + \
group[2] * (85**2) + \
group[3] * 85 + \
group[4]
assert b < (2**32 - 1)
c4 = chr((b >> 0) % 256)
c3 = chr((b >> 8) % 256)
c2 = chr((b >> 16) % 256)
c1 = chr(b >> 24)
retval += (c1 + c2 + c3 + c4)
if hitEod:
retval = retval[:-4+hitEod]
group = []
x += 1
return retval
decode = staticmethod(decode)
def decodeStreamData(stream):
from generic import NameObject
filters = stream.get("/Filter", ())
if len(filters) and not isinstance(filters[0], NameObject):
# we have a single filter instance
filters = (filters,)
data = stream._data
for filterType in filters:
if filterType == "/FlateDecode":
data = FlateDecode.decode(data, stream.get("/DecodeParms"))
elif filterType == "/ASCIIHexDecode":
data = ASCIIHexDecode.decode(data)
elif filterType == "/ASCII85Decode":
data = ASCII85Decode.decode(data)
elif filterType == "/Crypt":
decodeParams = stream.get("/DecodeParams", {})
if "/Name" not in decodeParams and "/Type" not in decodeParams:
pass
else:
raise NotImplementedError("/Crypt filter with /Name or /Type not supported yet")
else:
# unsupported filter
raise NotImplementedError("unsupported filter %s" % filterType)
return data
if __name__ == "__main__":
assert "abc" == ASCIIHexDecode.decode('61\n626\n3>')
ascii85Test = """
<~9jqo^BlbD-BleB1DJ+*+F(f,q/0JhKF<GL>[email protected]$d7F!,L7@<6@)/0JDEF<G%<+EV:2F!,
O<DJ+*.@<*K0@<6L(Df-\\0Ec5e;DffZ(EZee.Bl.9pF"AGXBPCsi+DGm>@3BB/F*&OCAfu2/AKY
i(DIb:@FD,*)+C]U=@3BN#EcYf8ATD3s@q?d$AftVqCh[NqF<G:8+EV:.+Cf>-FD5W8ARlolDIa
l(DId<j@<?3r@:F%a+D58'ATD4$Bl@l3De:,-DJs`8ARoFb/0JMK@qB4^F!,R<AKZ&-DfTqBG%G
>uD.RTpAKYo'+CT/5+Cei#DII?(E,9)oF*2M7/c~>
"""
ascii85_originalText="Man is distinguished, not only by his reason, but by this singular passion from other animals, which is a lust of the mind, that by a perseverance of delight in the continued and indefatigable generation of knowledge, exceeds the short vehemence of any carnal pleasure."
assert ASCII85Decode.decode(ascii85Test) == ascii85_originalText
|
Djabbz/wakatime
|
refs/heads/master
|
wakatime/packages/requests/packages/chardet/hebrewprober.py
|
2928
|
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Shy Shalom
# Portions created by the Initial Developer are Copyright (C) 2005
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .charsetprober import CharSetProber
from .constants import eNotMe, eDetecting
from .compat import wrap_ord
# This prober doesn't actually recognize a language or a charset.
# It is a helper prober for the use of the Hebrew model probers
### General ideas of the Hebrew charset recognition ###
#
# Four main charsets exist in Hebrew:
# "ISO-8859-8" - Visual Hebrew
# "windows-1255" - Logical Hebrew
# "ISO-8859-8-I" - Logical Hebrew
# "x-mac-hebrew" - ?? Logical Hebrew ??
#
# Both "ISO" charsets use a completely identical set of code points, whereas
# "windows-1255" and "x-mac-hebrew" are two different proper supersets of
# these code points. windows-1255 defines additional characters in the range
# 0x80-0x9F as some misc punctuation marks as well as some Hebrew-specific
# diacritics and additional 'Yiddish' ligature letters in the range 0xc0-0xd6.
# x-mac-hebrew defines similar additional code points but with a different
# mapping.
#
# As far as an average Hebrew text with no diacritics is concerned, all four
# charsets are identical with respect to code points. Meaning that for the
# main Hebrew alphabet, all four map the same values to all 27 Hebrew letters
# (including final letters).
#
# The dominant difference between these charsets is their directionality.
# "Visual" directionality means that the text is ordered as if the renderer is
# not aware of a BIDI rendering algorithm. The renderer sees the text and
# draws it from left to right. The text itself when ordered naturally is read
# backwards. A buffer of Visual Hebrew generally looks like so:
# "[last word of first line spelled backwards] [whole line ordered backwards
# and spelled backwards] [first word of first line spelled backwards]
# [end of line] [last word of second line] ... etc' "
# adding punctuation marks, numbers and English text to visual text is
# naturally also "visual" and from left to right.
#
# "Logical" directionality means the text is ordered "naturally" according to
# the order it is read. It is the responsibility of the renderer to display
# the text from right to left. A BIDI algorithm is used to place general
# punctuation marks, numbers and English text in the text.
#
# Texts in x-mac-hebrew are almost impossible to find on the Internet. From
# what little evidence I could find, it seems that its general directionality
# is Logical.
#
# To sum up all of the above, the Hebrew probing mechanism knows about two
# charsets:
# Visual Hebrew - "ISO-8859-8" - backwards text - Words and sentences are
# backwards while line order is natural. For charset recognition purposes
# the line order is unimportant (In fact, for this implementation, even
# word order is unimportant).
# Logical Hebrew - "windows-1255" - normal, naturally ordered text.
#
# "ISO-8859-8-I" is a subset of windows-1255 and doesn't need to be
# specifically identified.
# "x-mac-hebrew" is also identified as windows-1255. A text in x-mac-hebrew
# that contain special punctuation marks or diacritics is displayed with
# some unconverted characters showing as question marks. This problem might
# be corrected using another model prober for x-mac-hebrew. Due to the fact
# that x-mac-hebrew texts are so rare, writing another model prober isn't
# worth the effort and performance hit.
#
#### The Prober ####
#
# The prober is divided between two SBCharSetProbers and a HebrewProber,
# all of which are managed, created, fed data, inquired and deleted by the
# SBCSGroupProber. The two SBCharSetProbers identify that the text is in
# fact some kind of Hebrew, Logical or Visual. The final decision about which
# one is it is made by the HebrewProber by combining final-letter scores
# with the scores of the two SBCharSetProbers to produce a final answer.
#
# The SBCSGroupProber is responsible for stripping the original text of HTML
# tags, English characters, numbers, low-ASCII punctuation characters, spaces
# and new lines. It reduces any sequence of such characters to a single space.
# The buffer fed to each prober in the SBCS group prober is pure text in
# high-ASCII.
# The two SBCharSetProbers (model probers) share the same language model:
# Win1255Model.
# The first SBCharSetProber uses the model normally as any other
# SBCharSetProber does, to recognize windows-1255, upon which this model was
# built. The second SBCharSetProber is told to make the pair-of-letter
# lookup in the language model backwards. This in practice exactly simulates
# a visual Hebrew model using the windows-1255 logical Hebrew model.
#
# The HebrewProber is not using any language model. All it does is look for
# final-letter evidence suggesting the text is either logical Hebrew or visual
# Hebrew. Disjointed from the model probers, the results of the HebrewProber
# alone are meaningless. HebrewProber always returns 0.00 as confidence
# since it never identifies a charset by itself. Instead, the pointer to the
# HebrewProber is passed to the model probers as a helper "Name Prober".
# When the Group prober receives a positive identification from any prober,
# it asks for the name of the charset identified. If the prober queried is a
# Hebrew model prober, the model prober forwards the call to the
# HebrewProber to make the final decision. In the HebrewProber, the
# decision is made according to the final-letters scores maintained and Both
# model probers scores. The answer is returned in the form of the name of the
# charset identified, either "windows-1255" or "ISO-8859-8".
# windows-1255 / ISO-8859-8 code points of interest
FINAL_KAF = 0xea
NORMAL_KAF = 0xeb
FINAL_MEM = 0xed
NORMAL_MEM = 0xee
FINAL_NUN = 0xef
NORMAL_NUN = 0xf0
FINAL_PE = 0xf3
NORMAL_PE = 0xf4
FINAL_TSADI = 0xf5
NORMAL_TSADI = 0xf6
# Minimum Visual vs Logical final letter score difference.
# If the difference is below this, don't rely solely on the final letter score
# distance.
MIN_FINAL_CHAR_DISTANCE = 5
# Minimum Visual vs Logical model score difference.
# If the difference is below this, don't rely at all on the model score
# distance.
MIN_MODEL_DISTANCE = 0.01
VISUAL_HEBREW_NAME = "ISO-8859-8"
LOGICAL_HEBREW_NAME = "windows-1255"
class HebrewProber(CharSetProber):
def __init__(self):
CharSetProber.__init__(self)
self._mLogicalProber = None
self._mVisualProber = None
self.reset()
def reset(self):
self._mFinalCharLogicalScore = 0
self._mFinalCharVisualScore = 0
# The two last characters seen in the previous buffer,
# mPrev and mBeforePrev are initialized to space in order to simulate
# a word delimiter at the beginning of the data
self._mPrev = ' '
self._mBeforePrev = ' '
# These probers are owned by the group prober.
def set_model_probers(self, logicalProber, visualProber):
self._mLogicalProber = logicalProber
self._mVisualProber = visualProber
def is_final(self, c):
return wrap_ord(c) in [FINAL_KAF, FINAL_MEM, FINAL_NUN, FINAL_PE,
FINAL_TSADI]
def is_non_final(self, c):
# The normal Tsadi is not a good Non-Final letter due to words like
# 'lechotet' (to chat) containing an apostrophe after the tsadi. This
# apostrophe is converted to a space in FilterWithoutEnglishLetters
# causing the Non-Final tsadi to appear at an end of a word even
# though this is not the case in the original text.
# The letters Pe and Kaf rarely display a related behavior of not being
# a good Non-Final letter. Words like 'Pop', 'Winamp' and 'Mubarak'
# for example legally end with a Non-Final Pe or Kaf. However, the
# benefit of these letters as Non-Final letters outweighs the damage
# since these words are quite rare.
return wrap_ord(c) in [NORMAL_KAF, NORMAL_MEM, NORMAL_NUN, NORMAL_PE]
def feed(self, aBuf):
# Final letter analysis for logical-visual decision.
# Look for evidence that the received buffer is either logical Hebrew
# or visual Hebrew.
# The following cases are checked:
# 1) A word longer than 1 letter, ending with a final letter. This is
# an indication that the text is laid out "naturally" since the
# final letter really appears at the end. +1 for logical score.
# 2) A word longer than 1 letter, ending with a Non-Final letter. In
# normal Hebrew, words ending with Kaf, Mem, Nun, Pe or Tsadi,
# should not end with the Non-Final form of that letter. Exceptions
# to this rule are mentioned above in isNonFinal(). This is an
# indication that the text is laid out backwards. +1 for visual
# score
# 3) A word longer than 1 letter, starting with a final letter. Final
# letters should not appear at the beginning of a word. This is an
# indication that the text is laid out backwards. +1 for visual
# score.
#
# The visual score and logical score are accumulated throughout the
# text and are finally checked against each other in GetCharSetName().
# No checking for final letters in the middle of words is done since
# that case is not an indication for either Logical or Visual text.
#
# We automatically filter out all 7-bit characters (replace them with
# spaces) so the word boundary detection works properly. [MAP]
if self.get_state() == eNotMe:
# Both model probers say it's not them. No reason to continue.
return eNotMe
aBuf = self.filter_high_bit_only(aBuf)
for cur in aBuf:
if cur == ' ':
# We stand on a space - a word just ended
if self._mBeforePrev != ' ':
# next-to-last char was not a space so self._mPrev is not a
# 1 letter word
if self.is_final(self._mPrev):
# case (1) [-2:not space][-1:final letter][cur:space]
self._mFinalCharLogicalScore += 1
elif self.is_non_final(self._mPrev):
# case (2) [-2:not space][-1:Non-Final letter][
# cur:space]
self._mFinalCharVisualScore += 1
else:
# Not standing on a space
if ((self._mBeforePrev == ' ') and
(self.is_final(self._mPrev)) and (cur != ' ')):
# case (3) [-2:space][-1:final letter][cur:not space]
self._mFinalCharVisualScore += 1
self._mBeforePrev = self._mPrev
self._mPrev = cur
# Forever detecting, till the end or until both model probers return
# eNotMe (handled above)
return eDetecting
def get_charset_name(self):
# Make the decision: is it Logical or Visual?
# If the final letter score distance is dominant enough, rely on it.
finalsub = self._mFinalCharLogicalScore - self._mFinalCharVisualScore
if finalsub >= MIN_FINAL_CHAR_DISTANCE:
return LOGICAL_HEBREW_NAME
if finalsub <= -MIN_FINAL_CHAR_DISTANCE:
return VISUAL_HEBREW_NAME
# It's not dominant enough, try to rely on the model scores instead.
modelsub = (self._mLogicalProber.get_confidence()
- self._mVisualProber.get_confidence())
if modelsub > MIN_MODEL_DISTANCE:
return LOGICAL_HEBREW_NAME
if modelsub < -MIN_MODEL_DISTANCE:
return VISUAL_HEBREW_NAME
# Still no good, back to final letter distance, maybe it'll save the
# day.
if finalsub < 0.0:
return VISUAL_HEBREW_NAME
# (finalsub > 0 - Logical) or (don't know what to do) default to
# Logical.
return LOGICAL_HEBREW_NAME
def get_state(self):
# Remain active as long as any of the model probers are active.
if (self._mLogicalProber.get_state() == eNotMe) and \
(self._mVisualProber.get_state() == eNotMe):
return eNotMe
return eDetecting
|
nvazquez/Turtlebots
|
refs/heads/master
|
plugins/xevents/Xlib/keysymdef/miscellany.py
|
14
|
XK_BackSpace = 0xFF08
XK_Tab = 0xFF09
XK_Linefeed = 0xFF0A
XK_Clear = 0xFF0B
XK_Return = 0xFF0D
XK_Pause = 0xFF13
XK_Scroll_Lock = 0xFF14
XK_Sys_Req = 0xFF15
XK_Escape = 0xFF1B
XK_Delete = 0xFFFF
XK_Multi_key = 0xFF20
XK_SingleCandidate = 0xFF3C
XK_MultipleCandidate = 0xFF3D
XK_PreviousCandidate = 0xFF3E
XK_Kanji = 0xFF21
XK_Muhenkan = 0xFF22
XK_Henkan_Mode = 0xFF23
XK_Henkan = 0xFF23
XK_Romaji = 0xFF24
XK_Hiragana = 0xFF25
XK_Katakana = 0xFF26
XK_Hiragana_Katakana = 0xFF27
XK_Zenkaku = 0xFF28
XK_Hankaku = 0xFF29
XK_Zenkaku_Hankaku = 0xFF2A
XK_Touroku = 0xFF2B
XK_Massyo = 0xFF2C
XK_Kana_Lock = 0xFF2D
XK_Kana_Shift = 0xFF2E
XK_Eisu_Shift = 0xFF2F
XK_Eisu_toggle = 0xFF30
XK_Zen_Koho = 0xFF3D
XK_Mae_Koho = 0xFF3E
XK_Home = 0xFF50
XK_Left = 0xFF51
XK_Up = 0xFF52
XK_Right = 0xFF53
XK_Down = 0xFF54
XK_Prior = 0xFF55
XK_Page_Up = 0xFF55
XK_Next = 0xFF56
XK_Page_Down = 0xFF56
XK_End = 0xFF57
XK_Begin = 0xFF58
XK_Select = 0xFF60
XK_Print = 0xFF61
XK_Execute = 0xFF62
XK_Insert = 0xFF63
XK_Undo = 0xFF65
XK_Redo = 0xFF66
XK_Menu = 0xFF67
XK_Find = 0xFF68
XK_Cancel = 0xFF69
XK_Help = 0xFF6A
XK_Break = 0xFF6B
XK_Mode_switch = 0xFF7E
XK_script_switch = 0xFF7E
XK_Num_Lock = 0xFF7F
XK_KP_Space = 0xFF80
XK_KP_Tab = 0xFF89
XK_KP_Enter = 0xFF8D
XK_KP_F1 = 0xFF91
XK_KP_F2 = 0xFF92
XK_KP_F3 = 0xFF93
XK_KP_F4 = 0xFF94
XK_KP_Home = 0xFF95
XK_KP_Left = 0xFF96
XK_KP_Up = 0xFF97
XK_KP_Right = 0xFF98
XK_KP_Down = 0xFF99
XK_KP_Prior = 0xFF9A
XK_KP_Page_Up = 0xFF9A
XK_KP_Next = 0xFF9B
XK_KP_Page_Down = 0xFF9B
XK_KP_End = 0xFF9C
XK_KP_Begin = 0xFF9D
XK_KP_Insert = 0xFF9E
XK_KP_Delete = 0xFF9F
XK_KP_Equal = 0xFFBD
XK_KP_Multiply = 0xFFAA
XK_KP_Add = 0xFFAB
XK_KP_Separator = 0xFFAC
XK_KP_Subtract = 0xFFAD
XK_KP_Decimal = 0xFFAE
XK_KP_Divide = 0xFFAF
XK_KP_0 = 0xFFB0
XK_KP_1 = 0xFFB1
XK_KP_2 = 0xFFB2
XK_KP_3 = 0xFFB3
XK_KP_4 = 0xFFB4
XK_KP_5 = 0xFFB5
XK_KP_6 = 0xFFB6
XK_KP_7 = 0xFFB7
XK_KP_8 = 0xFFB8
XK_KP_9 = 0xFFB9
XK_F1 = 0xFFBE
XK_F2 = 0xFFBF
XK_F3 = 0xFFC0
XK_F4 = 0xFFC1
XK_F5 = 0xFFC2
XK_F6 = 0xFFC3
XK_F7 = 0xFFC4
XK_F8 = 0xFFC5
XK_F9 = 0xFFC6
XK_F10 = 0xFFC7
XK_F11 = 0xFFC8
XK_L1 = 0xFFC8
XK_F12 = 0xFFC9
XK_L2 = 0xFFC9
XK_F13 = 0xFFCA
XK_L3 = 0xFFCA
XK_F14 = 0xFFCB
XK_L4 = 0xFFCB
XK_F15 = 0xFFCC
XK_L5 = 0xFFCC
XK_F16 = 0xFFCD
XK_L6 = 0xFFCD
XK_F17 = 0xFFCE
XK_L7 = 0xFFCE
XK_F18 = 0xFFCF
XK_L8 = 0xFFCF
XK_F19 = 0xFFD0
XK_L9 = 0xFFD0
XK_F20 = 0xFFD1
XK_L10 = 0xFFD1
XK_F21 = 0xFFD2
XK_R1 = 0xFFD2
XK_F22 = 0xFFD3
XK_R2 = 0xFFD3
XK_F23 = 0xFFD4
XK_R3 = 0xFFD4
XK_F24 = 0xFFD5
XK_R4 = 0xFFD5
XK_F25 = 0xFFD6
XK_R5 = 0xFFD6
XK_F26 = 0xFFD7
XK_R6 = 0xFFD7
XK_F27 = 0xFFD8
XK_R7 = 0xFFD8
XK_F28 = 0xFFD9
XK_R8 = 0xFFD9
XK_F29 = 0xFFDA
XK_R9 = 0xFFDA
XK_F30 = 0xFFDB
XK_R10 = 0xFFDB
XK_F31 = 0xFFDC
XK_R11 = 0xFFDC
XK_F32 = 0xFFDD
XK_R12 = 0xFFDD
XK_F33 = 0xFFDE
XK_R13 = 0xFFDE
XK_F34 = 0xFFDF
XK_R14 = 0xFFDF
XK_F35 = 0xFFE0
XK_R15 = 0xFFE0
XK_Shift_L = 0xFFE1
XK_Shift_R = 0xFFE2
XK_Control_L = 0xFFE3
XK_Control_R = 0xFFE4
XK_Caps_Lock = 0xFFE5
XK_Shift_Lock = 0xFFE6
XK_Meta_L = 0xFFE7
XK_Meta_R = 0xFFE8
XK_Alt_L = 0xFFE9
XK_Alt_R = 0xFFEA
XK_Super_L = 0xFFEB
XK_Super_R = 0xFFEC
XK_Hyper_L = 0xFFED
XK_Hyper_R = 0xFFEE
|
Bforartists/scons
|
refs/heads/master
|
scons-local/SCons/Warnings.py
|
3
|
#
# Copyright (c) 2001 - 2014 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
"""SCons.Warnings
This file implements the warnings framework for SCons.
"""
__revision__ = "src/engine/SCons/Warnings.py 2014/07/05 09:42:21 garyo"
import sys
import SCons.Errors
class Warning(SCons.Errors.UserError):
pass
class WarningOnByDefault(Warning):
pass
# NOTE: If you add a new warning class, add it to the man page, too!
class TargetNotBuiltWarning(Warning): # Should go to OnByDefault
pass
class CacheWriteErrorWarning(Warning):
pass
class CorruptSConsignWarning(WarningOnByDefault):
pass
class DependencyWarning(Warning):
pass
class DuplicateEnvironmentWarning(WarningOnByDefault):
pass
class FutureReservedVariableWarning(WarningOnByDefault):
pass
class LinkWarning(WarningOnByDefault):
pass
class MisleadingKeywordsWarning(WarningOnByDefault):
pass
class MissingSConscriptWarning(WarningOnByDefault):
pass
class NoMD5ModuleWarning(WarningOnByDefault):
pass
class NoMetaclassSupportWarning(WarningOnByDefault):
pass
class NoObjectCountWarning(WarningOnByDefault):
pass
class NoParallelSupportWarning(WarningOnByDefault):
pass
class ReservedVariableWarning(WarningOnByDefault):
pass
class StackSizeWarning(WarningOnByDefault):
pass
class VisualCMissingWarning(WarningOnByDefault):
pass
# Used when MSVC_VERSION and MSVS_VERSION do not point to the
# same version (MSVS_VERSION is deprecated)
class VisualVersionMismatch(WarningOnByDefault):
pass
class VisualStudioMissingWarning(Warning):
pass
class FortranCxxMixWarning(LinkWarning):
pass
# Deprecation warnings
class FutureDeprecatedWarning(Warning):
pass
class DeprecatedWarning(Warning):
pass
class MandatoryDeprecatedWarning(DeprecatedWarning):
pass
# Special case; base always stays DeprecatedWarning
class PythonVersionWarning(DeprecatedWarning):
pass
class DeprecatedSourceCodeWarning(FutureDeprecatedWarning):
pass
class DeprecatedBuildDirWarning(DeprecatedWarning):
pass
class TaskmasterNeedsExecuteWarning(DeprecatedWarning):
pass
class DeprecatedCopyWarning(MandatoryDeprecatedWarning):
pass
class DeprecatedOptionsWarning(MandatoryDeprecatedWarning):
pass
class DeprecatedSourceSignaturesWarning(MandatoryDeprecatedWarning):
pass
class DeprecatedTargetSignaturesWarning(MandatoryDeprecatedWarning):
pass
class DeprecatedDebugOptionsWarning(MandatoryDeprecatedWarning):
pass
class DeprecatedSigModuleWarning(MandatoryDeprecatedWarning):
pass
class DeprecatedBuilderKeywordsWarning(MandatoryDeprecatedWarning):
pass
# The below is a list of 2-tuples. The first element is a class object.
# The second element is true if that class is enabled, false if it is disabled.
_enabled = []
# If set, raise the warning as an exception
_warningAsException = 0
# If not None, a function to call with the warning
_warningOut = None
def suppressWarningClass(clazz):
"""Suppresses all warnings that are of type clazz or
derived from clazz."""
_enabled.insert(0, (clazz, 0))
def enableWarningClass(clazz):
"""Enables all warnings that are of type clazz or
derived from clazz."""
_enabled.insert(0, (clazz, 1))
def warningAsException(flag=1):
"""Turn warnings into exceptions. Returns the old value of the flag."""
global _warningAsException
old = _warningAsException
_warningAsException = flag
return old
def warn(clazz, *args):
global _enabled, _warningAsException, _warningOut
warning = clazz(args)
for clazz, flag in _enabled:
if isinstance(warning, clazz):
if flag:
if _warningAsException:
raise warning
if _warningOut:
_warningOut(warning)
break
def process_warn_strings(arguments):
"""Process string specifications of enabling/disabling warnings,
as passed to the --warn option or the SetOption('warn') function.
An argument to this option should be of the form <warning-class>
or no-<warning-class>. The warning class is munged in order
to get an actual class name from the classes above, which we
need to pass to the {enable,disable}WarningClass() functions.
The supplied <warning-class> is split on hyphens, each element
is capitalized, then smushed back together. Then the string
"Warning" is appended to get the class name.
For example, 'deprecated' will enable the DeprecatedWarning
class. 'no-dependency' will disable the DependencyWarning class.
As a special case, --warn=all and --warn=no-all will enable or
disable (respectively) the base Warning class of all warnings.
"""
def _capitalize(s):
if s[:5] == "scons":
return "SCons" + s[5:]
else:
return s.capitalize()
for arg in arguments:
elems = arg.lower().split('-')
enable = 1
if elems[0] == 'no':
enable = 0
del elems[0]
if len(elems) == 1 and elems[0] == 'all':
class_name = "Warning"
else:
class_name = ''.join(map(_capitalize, elems)) + "Warning"
try:
clazz = globals()[class_name]
except KeyError:
sys.stderr.write("No warning type: '%s'\n" % arg)
else:
if enable:
enableWarningClass(clazz)
elif issubclass(clazz, MandatoryDeprecatedWarning):
fmt = "Can not disable mandataory warning: '%s'\n"
sys.stderr.write(fmt % arg)
else:
suppressWarningClass(clazz)
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
tomasreimers/tensorflow-emscripten
|
refs/heads/master
|
tensorflow/contrib/linalg/python/kernel_tests/linear_operator_composition_test.py
|
5
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.contrib import linalg as linalg_lib
from tensorflow.contrib.linalg.python.ops import linear_operator_test_util
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import random_seed
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.platform import test
linalg = linalg_lib
random_seed.set_random_seed(23)
rng = np.random.RandomState(0)
class SquareLinearOperatorCompositionTest(
linear_operator_test_util.SquareLinearOperatorDerivedClassTest):
"""Most tests done in the base class LinearOperatorDerivedClassTest."""
def setUp(self):
# Increase from 1e-6 to 1e-4
self._atol[dtypes.float32] = 1e-4
self._atol[dtypes.complex64] = 1e-4
self._rtol[dtypes.float32] = 1e-4
self._rtol[dtypes.complex64] = 1e-4
def _operator_and_mat_and_feed_dict(self, shape, dtype, use_placeholder):
sess = ops.get_default_session()
shape = list(shape)
# Either 1 or 2 matrices, depending.
num_operators = rng.randint(low=1, high=3)
matrices = [
linear_operator_test_util.random_positive_definite_matrix(
shape, dtype, force_well_conditioned=True)
for _ in range(num_operators)
]
if use_placeholder:
matrices_ph = [
array_ops.placeholder(dtype=dtype) for _ in range(num_operators)
]
# Evaluate here because (i) you cannot feed a tensor, and (ii)
# values are random and we want the same value used for both mat and
# feed_dict.
matrices = sess.run(matrices)
operator = linalg.LinearOperatorComposition(
[linalg.LinearOperatorMatrix(m_ph) for m_ph in matrices_ph])
feed_dict = {m_ph: m for (m_ph, m) in zip(matrices_ph, matrices)}
else:
operator = linalg.LinearOperatorComposition(
[linalg.LinearOperatorMatrix(m) for m in matrices])
feed_dict = None
# Convert back to Tensor. Needed if use_placeholder, since then we have
# already evaluated each matrix to a numpy array.
apply_order_list = list(reversed(matrices))
mat = ops.convert_to_tensor(apply_order_list[0])
for other_mat in apply_order_list[1:]:
mat = math_ops.matmul(other_mat, mat)
return operator, mat, feed_dict
def test_is_x_flags(self):
# Matrix with two positive eigenvalues, 1, and 1.
# The matrix values do not effect auto-setting of the flags.
matrix = [[1., 0.], [1., 1.]]
operator = linalg.LinearOperatorComposition(
[linalg.LinearOperatorMatrix(matrix)],
is_positive_definite=True,
is_non_singular=True,
is_self_adjoint=False)
self.assertTrue(operator.is_positive_definite)
self.assertTrue(operator.is_non_singular)
self.assertFalse(operator.is_self_adjoint)
def test_is_non_singular_auto_set(self):
# Matrix with two positive eigenvalues, 11 and 8.
# The matrix values do not effect auto-setting of the flags.
matrix = [[11., 0.], [1., 8.]]
operator_1 = linalg.LinearOperatorMatrix(matrix, is_non_singular=True)
operator_2 = linalg.LinearOperatorMatrix(matrix, is_non_singular=True)
operator = linalg.LinearOperatorComposition(
[operator_1, operator_2],
is_positive_definite=False, # No reason it HAS to be False...
is_non_singular=None)
self.assertFalse(operator.is_positive_definite)
self.assertTrue(operator.is_non_singular)
with self.assertRaisesRegexp(ValueError, "always non-singular"):
linalg.LinearOperatorComposition(
[operator_1, operator_2], is_non_singular=False)
def test_name(self):
matrix = [[11., 0.], [1., 8.]]
operator_1 = linalg.LinearOperatorMatrix(matrix, name="left")
operator_2 = linalg.LinearOperatorMatrix(matrix, name="right")
operator = linalg.LinearOperatorComposition([operator_1, operator_2])
self.assertEqual("left_o_right", operator.name)
def test_different_dtypes_raises(self):
operators = [
linalg.LinearOperatorMatrix(rng.rand(2, 3, 3)),
linalg.LinearOperatorMatrix(rng.rand(2, 3, 3).astype(np.float32))
]
with self.assertRaisesRegexp(TypeError, "same dtype"):
linalg.LinearOperatorComposition(operators)
def test_empty_operators_raises(self):
with self.assertRaisesRegexp(ValueError, "non-empty"):
linalg.LinearOperatorComposition([])
class NonSquareLinearOperatorCompositionTest(
linear_operator_test_util.NonSquareLinearOperatorDerivedClassTest):
"""Most tests done in the base class LinearOperatorDerivedClassTest."""
def setUp(self):
# Increase from 1e-6 to 1e-4
self._atol[dtypes.float32] = 1e-4
self._atol[dtypes.complex64] = 1e-4
self._rtol[dtypes.float32] = 1e-4
self._rtol[dtypes.complex64] = 1e-4
def _operator_and_mat_and_feed_dict(self, shape, dtype, use_placeholder):
sess = ops.get_default_session()
shape = list(shape)
# Test only the case of 2 matrices.
# The Square test uses either 1 or 2, so we have tested the case of 1 matrix
# sufficiently.
num_operators = 2
# Create 2 matrices/operators, A1, A2, which becomes A = A1 A2.
# Use inner dimension of 2.
k = 2
batch_shape = shape[:-2]
shape_1 = batch_shape + [shape[-2], k]
shape_2 = batch_shape + [k, shape[-1]]
matrices = [
linear_operator_test_util.random_normal(
shape_1, dtype=dtype), linear_operator_test_util.random_normal(
shape_2, dtype=dtype)
]
if use_placeholder:
matrices_ph = [
array_ops.placeholder(dtype=dtype) for _ in range(num_operators)
]
# Evaluate here because (i) you cannot feed a tensor, and (ii)
# values are random and we want the same value used for both mat and
# feed_dict.
matrices = sess.run(matrices)
operator = linalg.LinearOperatorComposition(
[linalg.LinearOperatorMatrix(m_ph) for m_ph in matrices_ph])
feed_dict = {m_ph: m for (m_ph, m) in zip(matrices_ph, matrices)}
else:
operator = linalg.LinearOperatorComposition(
[linalg.LinearOperatorMatrix(m) for m in matrices])
feed_dict = None
# Convert back to Tensor. Needed if use_placeholder, since then we have
# already evaluated each matrix to a numpy array.
apply_order_list = list(reversed(matrices))
mat = ops.convert_to_tensor(apply_order_list[0])
for other_mat in apply_order_list[1:]:
mat = math_ops.matmul(other_mat, mat)
return operator, mat, feed_dict
def test_static_shapes(self):
operators = [
linalg.LinearOperatorMatrix(rng.rand(2, 3, 4)),
linalg.LinearOperatorMatrix(rng.rand(2, 4, 5))
]
operator = linalg.LinearOperatorComposition(operators)
self.assertAllEqual((2, 3, 5), operator.shape)
def test_dynamic_shapes_when_statically_available(self):
operators = [
linalg.LinearOperatorMatrix(rng.rand(2, 3, 4)),
linalg.LinearOperatorMatrix(rng.rand(2, 4, 5))
]
operator = linalg.LinearOperatorComposition(operators)
with self.test_session():
self.assertAllEqual((2, 3, 5), operator.shape_dynamic().eval())
def test_dynamic_shapes_when_only_dynamically_available(self):
mat_1 = rng.rand(1, 2, 3, 4)
mat_2 = rng.rand(1, 2, 4, 5)
mat_ph_1 = array_ops.placeholder(dtypes.float64)
mat_ph_2 = array_ops.placeholder(dtypes.float64)
feed_dict = {mat_ph_1: mat_1, mat_ph_2: mat_2}
operators = [
linalg.LinearOperatorMatrix(mat_ph_1),
linalg.LinearOperatorMatrix(mat_ph_2)
]
operator = linalg.LinearOperatorComposition(operators)
with self.test_session():
self.assertAllEqual(
(1, 2, 3, 5), operator.shape_dynamic().eval(feed_dict=feed_dict))
if __name__ == "__main__":
test.main()
|
michael-berlin/vitess
|
refs/heads/master
|
py/vtproto/vtctldata_pb2.py
|
5
|
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: vtctldata.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
import logutil_pb2 as logutil__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='vtctldata.proto',
package='vtctldata',
syntax='proto3',
serialized_pb=_b('\n\x0fvtctldata.proto\x12\tvtctldata\x1a\rlogutil.proto\"X\n\x1a\x45xecuteVtctlCommandRequest\x12\x0c\n\x04\x61rgs\x18\x01 \x03(\t\x12\x16\n\x0e\x61\x63tion_timeout\x18\x02 \x01(\x03\x12\x14\n\x0clock_timeout\x18\x03 \x01(\x03\"<\n\x1b\x45xecuteVtctlCommandResponse\x12\x1d\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x0e.logutil.Eventb\x06proto3')
,
dependencies=[logutil__pb2.DESCRIPTOR,])
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_EXECUTEVTCTLCOMMANDREQUEST = _descriptor.Descriptor(
name='ExecuteVtctlCommandRequest',
full_name='vtctldata.ExecuteVtctlCommandRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='args', full_name='vtctldata.ExecuteVtctlCommandRequest.args', index=0,
number=1, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='action_timeout', full_name='vtctldata.ExecuteVtctlCommandRequest.action_timeout', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='lock_timeout', full_name='vtctldata.ExecuteVtctlCommandRequest.lock_timeout', index=2,
number=3, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=45,
serialized_end=133,
)
_EXECUTEVTCTLCOMMANDRESPONSE = _descriptor.Descriptor(
name='ExecuteVtctlCommandResponse',
full_name='vtctldata.ExecuteVtctlCommandResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='event', full_name='vtctldata.ExecuteVtctlCommandResponse.event', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=135,
serialized_end=195,
)
_EXECUTEVTCTLCOMMANDRESPONSE.fields_by_name['event'].message_type = logutil__pb2._EVENT
DESCRIPTOR.message_types_by_name['ExecuteVtctlCommandRequest'] = _EXECUTEVTCTLCOMMANDREQUEST
DESCRIPTOR.message_types_by_name['ExecuteVtctlCommandResponse'] = _EXECUTEVTCTLCOMMANDRESPONSE
ExecuteVtctlCommandRequest = _reflection.GeneratedProtocolMessageType('ExecuteVtctlCommandRequest', (_message.Message,), dict(
DESCRIPTOR = _EXECUTEVTCTLCOMMANDREQUEST,
__module__ = 'vtctldata_pb2'
# @@protoc_insertion_point(class_scope:vtctldata.ExecuteVtctlCommandRequest)
))
_sym_db.RegisterMessage(ExecuteVtctlCommandRequest)
ExecuteVtctlCommandResponse = _reflection.GeneratedProtocolMessageType('ExecuteVtctlCommandResponse', (_message.Message,), dict(
DESCRIPTOR = _EXECUTEVTCTLCOMMANDRESPONSE,
__module__ = 'vtctldata_pb2'
# @@protoc_insertion_point(class_scope:vtctldata.ExecuteVtctlCommandResponse)
))
_sym_db.RegisterMessage(ExecuteVtctlCommandResponse)
import abc
from grpc.early_adopter import implementations
from grpc.framework.alpha import utilities
# @@protoc_insertion_point(module_scope)
|
CatherineWong/zxing-glass
|
refs/heads/master
|
cpp/scons/scons-local-2.0.0.final.0/SCons/Taskmaster.py
|
34
|
#
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
__doc__ = """
Generic Taskmaster module for the SCons build engine.
This module contains the primary interface(s) between a wrapping user
interface and the SCons build engine. There are two key classes here:
Taskmaster
This is the main engine for walking the dependency graph and
calling things to decide what does or doesn't need to be built.
Task
This is the base class for allowing a wrapping interface to
decide what does or doesn't actually need to be done. The
intention is for a wrapping interface to subclass this as
appropriate for different types of behavior it may need.
The canonical example is the SCons native Python interface,
which has Task subclasses that handle its specific behavior,
like printing "`foo' is up to date" when a top-level target
doesn't need to be built, and handling the -c option by removing
targets as its "build" action. There is also a separate subclass
for suppressing this output when the -q option is used.
The Taskmaster instantiates a Task object for each (set of)
target(s) that it decides need to be evaluated and/or built.
"""
__revision__ = "src/engine/SCons/Taskmaster.py 5023 2010/06/14 22:05:46 scons"
from itertools import chain
import operator
import sys
import traceback
import SCons.Errors
import SCons.Node
import SCons.Warnings
StateString = SCons.Node.StateString
NODE_NO_STATE = SCons.Node.no_state
NODE_PENDING = SCons.Node.pending
NODE_EXECUTING = SCons.Node.executing
NODE_UP_TO_DATE = SCons.Node.up_to_date
NODE_EXECUTED = SCons.Node.executed
NODE_FAILED = SCons.Node.failed
# A subsystem for recording stats about how different Nodes are handled by
# the main Taskmaster loop. There's no external control here (no need for
# a --debug= option); enable it by changing the value of CollectStats.
CollectStats = None
class Stats(object):
"""
A simple class for holding statistics about the disposition of a
Node by the Taskmaster. If we're collecting statistics, each Node
processed by the Taskmaster gets one of these attached, in which case
the Taskmaster records its decision each time it processes the Node.
(Ideally, that's just once per Node.)
"""
def __init__(self):
"""
Instantiates a Taskmaster.Stats object, initializing all
appropriate counters to zero.
"""
self.considered = 0
self.already_handled = 0
self.problem = 0
self.child_failed = 0
self.not_built = 0
self.side_effects = 0
self.build = 0
StatsNodes = []
fmt = "%(considered)3d "\
"%(already_handled)3d " \
"%(problem)3d " \
"%(child_failed)3d " \
"%(not_built)3d " \
"%(side_effects)3d " \
"%(build)3d "
def dump_stats():
for n in sorted(StatsNodes, key=lambda a: str(a)):
print (fmt % n.stats.__dict__) + str(n)
class Task(object):
"""
Default SCons build engine task.
This controls the interaction of the actual building of node
and the rest of the engine.
This is expected to handle all of the normally-customizable
aspects of controlling a build, so any given application
*should* be able to do what it wants by sub-classing this
class and overriding methods as appropriate. If an application
needs to customze something by sub-classing Taskmaster (or
some other build engine class), we should first try to migrate
that functionality into this class.
Note that it's generally a good idea for sub-classes to call
these methods explicitly to update state, etc., rather than
roll their own interaction with Taskmaster from scratch.
"""
def __init__(self, tm, targets, top, node):
self.tm = tm
self.targets = targets
self.top = top
self.node = node
self.exc_clear()
def trace_message(self, method, node, description='node'):
fmt = '%-20s %s %s\n'
return fmt % (method + ':', description, self.tm.trace_node(node))
def display(self, message):
"""
Hook to allow the calling interface to display a message.
This hook gets called as part of preparing a task for execution
(that is, a Node to be built). As part of figuring out what Node
should be built next, the actually target list may be altered,
along with a message describing the alteration. The calling
interface can subclass Task and provide a concrete implementation
of this method to see those messages.
"""
pass
def prepare(self):
"""
Called just before the task is executed.
This is mainly intended to give the target Nodes a chance to
unlink underlying files and make all necessary directories before
the Action is actually called to build the targets.
"""
T = self.tm.trace
if T: T.write(self.trace_message(u'Task.prepare()', self.node))
# Now that it's the appropriate time, give the TaskMaster a
# chance to raise any exceptions it encountered while preparing
# this task.
self.exception_raise()
if self.tm.message:
self.display(self.tm.message)
self.tm.message = None
# Let the targets take care of any necessary preparations.
# This includes verifying that all of the necessary sources
# and dependencies exist, removing the target file(s), etc.
#
# As of April 2008, the get_executor().prepare() method makes
# sure that all of the aggregate sources necessary to build this
# Task's target(s) exist in one up-front check. The individual
# target t.prepare() methods check that each target's explicit
# or implicit dependencies exists, and also initialize the
# .sconsign info.
executor = self.targets[0].get_executor()
executor.prepare()
for t in executor.get_action_targets():
t.prepare()
for s in t.side_effects:
s.prepare()
def get_target(self):
"""Fetch the target being built or updated by this task.
"""
return self.node
def needs_execute(self):
# TODO(deprecate): "return True" is the old default behavior;
# change it to NotImplementedError (after running through the
# Deprecation Cycle) so the desired behavior is explicitly
# determined by which concrete subclass is used.
#raise NotImplementedError
msg = ('Taskmaster.Task is an abstract base class; instead of\n'
'\tusing it directly, '
'derive from it and override the abstract methods.')
SCons.Warnings.warn(SCons.Warnings.TaskmasterNeedsExecuteWarning, msg)
return True
def execute(self):
"""
Called to execute the task.
This method is called from multiple threads in a parallel build,
so only do thread safe stuff here. Do thread unsafe stuff in
prepare(), executed() or failed().
"""
T = self.tm.trace
if T: T.write(self.trace_message(u'Task.execute()', self.node))
try:
everything_was_cached = 1
for t in self.targets:
if t.retrieve_from_cache():
# Call the .built() method without calling the
# .push_to_cache() method, since we just got the
# target from the cache and don't need to push
# it back there.
t.set_state(NODE_EXECUTED)
t.built()
else:
everything_was_cached = 0
break
if not everything_was_cached:
self.targets[0].build()
except SystemExit:
exc_value = sys.exc_info()[1]
raise SCons.Errors.ExplicitExit(self.targets[0], exc_value.code)
except SCons.Errors.UserError:
raise
except SCons.Errors.BuildError:
raise
except Exception, e:
buildError = SCons.Errors.convert_to_BuildError(e)
buildError.node = self.targets[0]
buildError.exc_info = sys.exc_info()
raise buildError
def executed_without_callbacks(self):
"""
Called when the task has been successfully executed
and the Taskmaster instance doesn't want to call
the Node's callback methods.
"""
T = self.tm.trace
if T: T.write(self.trace_message('Task.executed_without_callbacks()',
self.node))
for t in self.targets:
if t.get_state() == NODE_EXECUTING:
for side_effect in t.side_effects:
side_effect.set_state(NODE_NO_STATE)
t.set_state(NODE_EXECUTED)
def executed_with_callbacks(self):
"""
Called when the task has been successfully executed and
the Taskmaster instance wants to call the Node's callback
methods.
This may have been a do-nothing operation (to preserve build
order), so we must check the node's state before deciding whether
it was "built", in which case we call the appropriate Node method.
In any event, we always call "visited()", which will handle any
post-visit actions that must take place regardless of whether
or not the target was an actual built target or a source Node.
"""
T = self.tm.trace
if T: T.write(self.trace_message('Task.executed_with_callbacks()',
self.node))
for t in self.targets:
if t.get_state() == NODE_EXECUTING:
for side_effect in t.side_effects:
side_effect.set_state(NODE_NO_STATE)
t.set_state(NODE_EXECUTED)
t.push_to_cache()
t.built()
t.visited()
executed = executed_with_callbacks
def failed(self):
"""
Default action when a task fails: stop the build.
Note: Although this function is normally invoked on nodes in
the executing state, it might also be invoked on up-to-date
nodes when using Configure().
"""
self.fail_stop()
def fail_stop(self):
"""
Explicit stop-the-build failure.
This sets failure status on the target nodes and all of
their dependent parent nodes.
Note: Although this function is normally invoked on nodes in
the executing state, it might also be invoked on up-to-date
nodes when using Configure().
"""
T = self.tm.trace
if T: T.write(self.trace_message('Task.failed_stop()', self.node))
# Invoke will_not_build() to clean-up the pending children
# list.
self.tm.will_not_build(self.targets, lambda n: n.set_state(NODE_FAILED))
# Tell the taskmaster to not start any new tasks
self.tm.stop()
# We're stopping because of a build failure, but give the
# calling Task class a chance to postprocess() the top-level
# target under which the build failure occurred.
self.targets = [self.tm.current_top]
self.top = 1
def fail_continue(self):
"""
Explicit continue-the-build failure.
This sets failure status on the target nodes and all of
their dependent parent nodes.
Note: Although this function is normally invoked on nodes in
the executing state, it might also be invoked on up-to-date
nodes when using Configure().
"""
T = self.tm.trace
if T: T.write(self.trace_message('Task.failed_continue()', self.node))
self.tm.will_not_build(self.targets, lambda n: n.set_state(NODE_FAILED))
def make_ready_all(self):
"""
Marks all targets in a task ready for execution.
This is used when the interface needs every target Node to be
visited--the canonical example being the "scons -c" option.
"""
T = self.tm.trace
if T: T.write(self.trace_message('Task.make_ready_all()', self.node))
self.out_of_date = self.targets[:]
for t in self.targets:
t.disambiguate().set_state(NODE_EXECUTING)
for s in t.side_effects:
# add disambiguate here to mirror the call on targets above
s.disambiguate().set_state(NODE_EXECUTING)
def make_ready_current(self):
"""
Marks all targets in a task ready for execution if any target
is not current.
This is the default behavior for building only what's necessary.
"""
T = self.tm.trace
if T: T.write(self.trace_message(u'Task.make_ready_current()',
self.node))
self.out_of_date = []
needs_executing = False
for t in self.targets:
try:
t.disambiguate().make_ready()
is_up_to_date = not t.has_builder() or \
(not t.always_build and t.is_up_to_date())
except EnvironmentError, e:
raise SCons.Errors.BuildError(node=t, errstr=e.strerror, filename=e.filename)
if not is_up_to_date:
self.out_of_date.append(t)
needs_executing = True
if needs_executing:
for t in self.targets:
t.set_state(NODE_EXECUTING)
for s in t.side_effects:
# add disambiguate here to mirror the call on targets in first loop above
s.disambiguate().set_state(NODE_EXECUTING)
else:
for t in self.targets:
# We must invoke visited() to ensure that the node
# information has been computed before allowing the
# parent nodes to execute. (That could occur in a
# parallel build...)
t.visited()
t.set_state(NODE_UP_TO_DATE)
make_ready = make_ready_current
def postprocess(self):
"""
Post-processes a task after it's been executed.
This examines all the targets just built (or not, we don't care
if the build was successful, or even if there was no build
because everything was up-to-date) to see if they have any
waiting parent Nodes, or Nodes waiting on a common side effect,
that can be put back on the candidates list.
"""
T = self.tm.trace
if T: T.write(self.trace_message(u'Task.postprocess()', self.node))
# We may have built multiple targets, some of which may have
# common parents waiting for this build. Count up how many
# targets each parent was waiting for so we can subtract the
# values later, and so we *don't* put waiting side-effect Nodes
# back on the candidates list if the Node is also a waiting
# parent.
targets = set(self.targets)
pending_children = self.tm.pending_children
parents = {}
for t in targets:
# A node can only be in the pending_children set if it has
# some waiting_parents.
if t.waiting_parents:
if T: T.write(self.trace_message(u'Task.postprocess()',
t,
'removing'))
pending_children.discard(t)
for p in t.waiting_parents:
parents[p] = parents.get(p, 0) + 1
for t in targets:
for s in t.side_effects:
if s.get_state() == NODE_EXECUTING:
s.set_state(NODE_NO_STATE)
for p in s.waiting_parents:
parents[p] = parents.get(p, 0) + 1
for p in s.waiting_s_e:
if p.ref_count == 0:
self.tm.candidates.append(p)
for p, subtract in parents.items():
p.ref_count = p.ref_count - subtract
if T: T.write(self.trace_message(u'Task.postprocess()',
p,
'adjusted parent ref count'))
if p.ref_count == 0:
self.tm.candidates.append(p)
for t in targets:
t.postprocess()
# Exception handling subsystem.
#
# Exceptions that occur while walking the DAG or examining Nodes
# must be raised, but must be raised at an appropriate time and in
# a controlled manner so we can, if necessary, recover gracefully,
# possibly write out signature information for Nodes we've updated,
# etc. This is done by having the Taskmaster tell us about the
# exception, and letting
def exc_info(self):
"""
Returns info about a recorded exception.
"""
return self.exception
def exc_clear(self):
"""
Clears any recorded exception.
This also changes the "exception_raise" attribute to point
to the appropriate do-nothing method.
"""
self.exception = (None, None, None)
self.exception_raise = self._no_exception_to_raise
def exception_set(self, exception=None):
"""
Records an exception to be raised at the appropriate time.
This also changes the "exception_raise" attribute to point
to the method that will, in fact
"""
if not exception:
exception = sys.exc_info()
self.exception = exception
self.exception_raise = self._exception_raise
def _no_exception_to_raise(self):
pass
def _exception_raise(self):
"""
Raises a pending exception that was recorded while getting a
Task ready for execution.
"""
exc = self.exc_info()[:]
try:
exc_type, exc_value, exc_traceback = exc
except ValueError:
exc_type, exc_value = exc
exc_traceback = None
raise exc_type, exc_value, exc_traceback
class AlwaysTask(Task):
def needs_execute(self):
"""
Always returns True (indicating this Task should always
be executed).
Subclasses that need this behavior (as opposed to the default
of only executing Nodes that are out of date w.r.t. their
dependencies) can use this as follows:
class MyTaskSubclass(SCons.Taskmaster.Task):
needs_execute = SCons.Taskmaster.Task.execute_always
"""
return True
class OutOfDateTask(Task):
def needs_execute(self):
"""
Returns True (indicating this Task should be executed) if this
Task's target state indicates it needs executing, which has
already been determined by an earlier up-to-date check.
"""
return self.targets[0].get_state() == SCons.Node.executing
def find_cycle(stack, visited):
if stack[-1] in visited:
return None
visited.add(stack[-1])
for n in stack[-1].waiting_parents:
stack.append(n)
if stack[0] == stack[-1]:
return stack
if find_cycle(stack, visited):
return stack
stack.pop()
return None
class Taskmaster(object):
"""
The Taskmaster for walking the dependency DAG.
"""
def __init__(self, targets=[], tasker=None, order=None, trace=None):
self.original_top = targets
self.top_targets_left = targets[:]
self.top_targets_left.reverse()
self.candidates = []
if tasker is None:
tasker = OutOfDateTask
self.tasker = tasker
if not order:
order = lambda l: l
self.order = order
self.message = None
self.trace = trace
self.next_candidate = self.find_next_candidate
self.pending_children = set()
def find_next_candidate(self):
"""
Returns the next candidate Node for (potential) evaluation.
The candidate list (really a stack) initially consists of all of
the top-level (command line) targets provided when the Taskmaster
was initialized. While we walk the DAG, visiting Nodes, all the
children that haven't finished processing get pushed on to the
candidate list. Each child can then be popped and examined in
turn for whether *their* children are all up-to-date, in which
case a Task will be created for their actual evaluation and
potential building.
Here is where we also allow candidate Nodes to alter the list of
Nodes that should be examined. This is used, for example, when
invoking SCons in a source directory. A source directory Node can
return its corresponding build directory Node, essentially saying,
"Hey, you really need to build this thing over here instead."
"""
try:
return self.candidates.pop()
except IndexError:
pass
try:
node = self.top_targets_left.pop()
except IndexError:
return None
self.current_top = node
alt, message = node.alter_targets()
if alt:
self.message = message
self.candidates.append(node)
self.candidates.extend(self.order(alt))
node = self.candidates.pop()
return node
def no_next_candidate(self):
"""
Stops Taskmaster processing by not returning a next candidate.
Note that we have to clean-up the Taskmaster candidate list
because the cycle detection depends on the fact all nodes have
been processed somehow.
"""
while self.candidates:
candidates = self.candidates
self.candidates = []
self.will_not_build(candidates)
return None
def _validate_pending_children(self):
"""
Validate the content of the pending_children set. Assert if an
internal error is found.
This function is used strictly for debugging the taskmaster by
checking that no invariants are violated. It is not used in
normal operation.
The pending_children set is used to detect cycles in the
dependency graph. We call a "pending child" a child that is
found in the "pending" state when checking the dependencies of
its parent node.
A pending child can occur when the Taskmaster completes a loop
through a cycle. For example, lets imagine a graph made of
three node (A, B and C) making a cycle. The evaluation starts
at node A. The taskmaster first consider whether node A's
child B is up-to-date. Then, recursively, node B needs to
check whether node C is up-to-date. This leaves us with a
dependency graph looking like:
Next candidate \
\
Node A (Pending) --> Node B(Pending) --> Node C (NoState)
^ |
| |
+-------------------------------------+
Now, when the Taskmaster examines the Node C's child Node A,
it finds that Node A is in the "pending" state. Therefore,
Node A is a pending child of node C.
Pending children indicate that the Taskmaster has potentially
loop back through a cycle. We say potentially because it could
also occur when a DAG is evaluated in parallel. For example,
consider the following graph:
Node A (Pending) --> Node B(Pending) --> Node C (Pending) --> ...
| ^
| |
+----------> Node D (NoState) --------+
/
Next candidate /
The Taskmaster first evaluates the nodes A, B, and C and
starts building some children of node C. Assuming, that the
maximum parallel level has not been reached, the Taskmaster
will examine Node D. It will find that Node C is a pending
child of Node D.
In summary, evaluating a graph with a cycle will always
involve a pending child at one point. A pending child might
indicate either a cycle or a diamond-shaped DAG. Only a
fraction of the nodes ends-up being a "pending child" of
another node. This keeps the pending_children set small in
practice.
We can differentiate between the two cases if we wait until
the end of the build. At this point, all the pending children
nodes due to a diamond-shaped DAG will have been properly
built (or will have failed to build). But, the pending
children involved in a cycle will still be in the pending
state.
The taskmaster removes nodes from the pending_children set as
soon as a pending_children node moves out of the pending
state. This also helps to keep the pending_children set small.
"""
for n in self.pending_children:
assert n.state in (NODE_PENDING, NODE_EXECUTING), \
(str(n), StateString[n.state])
assert len(n.waiting_parents) != 0, (str(n), len(n.waiting_parents))
for p in n.waiting_parents:
assert p.ref_count > 0, (str(n), str(p), p.ref_count)
def trace_message(self, message):
return 'Taskmaster: %s\n' % message
def trace_node(self, node):
return '<%-10s %-3s %s>' % (StateString[node.get_state()],
node.ref_count,
repr(str(node)))
def _find_next_ready_node(self):
"""
Finds the next node that is ready to be built.
This is *the* main guts of the DAG walk. We loop through the
list of candidates, looking for something that has no un-built
children (i.e., that is a leaf Node or has dependencies that are
all leaf Nodes or up-to-date). Candidate Nodes are re-scanned
(both the target Node itself and its sources, which are always
scanned in the context of a given target) to discover implicit
dependencies. A Node that must wait for some children to be
built will be put back on the candidates list after the children
have finished building. A Node that has been put back on the
candidates list in this way may have itself (or its sources)
re-scanned, in order to handle generated header files (e.g.) and
the implicit dependencies therein.
Note that this method does not do any signature calculation or
up-to-date check itself. All of that is handled by the Task
class. This is purely concerned with the dependency graph walk.
"""
self.ready_exc = None
T = self.trace
if T: T.write(u'\n' + self.trace_message('Looking for a node to evaluate'))
while True:
node = self.next_candidate()
if node is None:
if T: T.write(self.trace_message('No candidate anymore.') + u'\n')
return None
node = node.disambiguate()
state = node.get_state()
# For debugging only:
#
# try:
# self._validate_pending_children()
# except:
# self.ready_exc = sys.exc_info()
# return node
if CollectStats:
if not hasattr(node, 'stats'):
node.stats = Stats()
StatsNodes.append(node)
S = node.stats
S.considered = S.considered + 1
else:
S = None
if T: T.write(self.trace_message(u' Considering node %s and its children:' % self.trace_node(node)))
if state == NODE_NO_STATE:
# Mark this node as being on the execution stack:
node.set_state(NODE_PENDING)
elif state > NODE_PENDING:
# Skip this node if it has already been evaluated:
if S: S.already_handled = S.already_handled + 1
if T: T.write(self.trace_message(u' already handled (executed)'))
continue
executor = node.get_executor()
try:
children = executor.get_all_children()
except SystemExit:
exc_value = sys.exc_info()[1]
e = SCons.Errors.ExplicitExit(node, exc_value.code)
self.ready_exc = (SCons.Errors.ExplicitExit, e)
if T: T.write(self.trace_message(' SystemExit'))
return node
except Exception, e:
# We had a problem just trying to figure out the
# children (like a child couldn't be linked in to a
# VariantDir, or a Scanner threw something). Arrange to
# raise the exception when the Task is "executed."
self.ready_exc = sys.exc_info()
if S: S.problem = S.problem + 1
if T: T.write(self.trace_message(' exception %s while scanning children.\n' % e))
return node
children_not_visited = []
children_pending = set()
children_not_ready = []
children_failed = False
for child in chain(executor.get_all_prerequisites(), children):
childstate = child.get_state()
if T: T.write(self.trace_message(u' ' + self.trace_node(child)))
if childstate == NODE_NO_STATE:
children_not_visited.append(child)
elif childstate == NODE_PENDING:
children_pending.add(child)
elif childstate == NODE_FAILED:
children_failed = True
if childstate <= NODE_EXECUTING:
children_not_ready.append(child)
# These nodes have not even been visited yet. Add
# them to the list so that on some next pass we can
# take a stab at evaluating them (or their children).
children_not_visited.reverse()
self.candidates.extend(self.order(children_not_visited))
#if T and children_not_visited:
# T.write(self.trace_message(' adding to candidates: %s' % map(str, children_not_visited)))
# T.write(self.trace_message(' candidates now: %s\n' % map(str, self.candidates)))
# Skip this node if any of its children have failed.
#
# This catches the case where we're descending a top-level
# target and one of our children failed while trying to be
# built by a *previous* descent of an earlier top-level
# target.
#
# It can also occur if a node is reused in multiple
# targets. One first descends though the one of the
# target, the next time occurs through the other target.
#
# Note that we can only have failed_children if the
# --keep-going flag was used, because without it the build
# will stop before diving in the other branch.
#
# Note that even if one of the children fails, we still
# added the other children to the list of candidate nodes
# to keep on building (--keep-going).
if children_failed:
for n in executor.get_action_targets():
n.set_state(NODE_FAILED)
if S: S.child_failed = S.child_failed + 1
if T: T.write(self.trace_message('****** %s\n' % self.trace_node(node)))
continue
if children_not_ready:
for child in children_not_ready:
# We're waiting on one or more derived targets
# that have not yet finished building.
if S: S.not_built = S.not_built + 1
# Add this node to the waiting parents lists of
# anything we're waiting on, with a reference
# count so we can be put back on the list for
# re-evaluation when they've all finished.
node.ref_count = node.ref_count + child.add_to_waiting_parents(node)
if T: T.write(self.trace_message(u' adjusted ref count: %s, child %s' %
(self.trace_node(node), repr(str(child)))))
if T:
for pc in children_pending:
T.write(self.trace_message(' adding %s to the pending children set\n' %
self.trace_node(pc)))
self.pending_children = self.pending_children | children_pending
continue
# Skip this node if it has side-effects that are
# currently being built:
wait_side_effects = False
for se in executor.get_action_side_effects():
if se.get_state() == NODE_EXECUTING:
se.add_to_waiting_s_e(node)
wait_side_effects = True
if wait_side_effects:
if S: S.side_effects = S.side_effects + 1
continue
# The default when we've gotten through all of the checks above:
# this node is ready to be built.
if S: S.build = S.build + 1
if T: T.write(self.trace_message(u'Evaluating %s\n' %
self.trace_node(node)))
# For debugging only:
#
# try:
# self._validate_pending_children()
# except:
# self.ready_exc = sys.exc_info()
# return node
return node
return None
def next_task(self):
"""
Returns the next task to be executed.
This simply asks for the next Node to be evaluated, and then wraps
it in the specific Task subclass with which we were initialized.
"""
node = self._find_next_ready_node()
if node is None:
return None
tlist = node.get_executor().get_all_targets()
task = self.tasker(self, tlist, node in self.original_top, node)
try:
task.make_ready()
except:
# We had a problem just trying to get this task ready (like
# a child couldn't be linked in to a VariantDir when deciding
# whether this node is current). Arrange to raise the
# exception when the Task is "executed."
self.ready_exc = sys.exc_info()
if self.ready_exc:
task.exception_set(self.ready_exc)
self.ready_exc = None
return task
def will_not_build(self, nodes, node_func=lambda n: None):
"""
Perform clean-up about nodes that will never be built. Invokes
a user defined function on all of these nodes (including all
of their parents).
"""
T = self.trace
pending_children = self.pending_children
to_visit = set(nodes)
pending_children = pending_children - to_visit
if T:
for n in nodes:
T.write(self.trace_message(' removing node %s from the pending children set\n' %
self.trace_node(n)))
try:
while len(to_visit):
node = to_visit.pop()
node_func(node)
# Prune recursion by flushing the waiting children
# list immediately.
parents = node.waiting_parents
node.waiting_parents = set()
to_visit = to_visit | parents
pending_children = pending_children - parents
for p in parents:
p.ref_count = p.ref_count - 1
if T: T.write(self.trace_message(' removing parent %s from the pending children set\n' %
self.trace_node(p)))
except KeyError:
# The container to_visit has been emptied.
pass
# We have the stick back the pending_children list into the
# taskmaster because the python 1.5.2 compatibility does not
# allow us to use in-place updates
self.pending_children = pending_children
def stop(self):
"""
Stops the current build completely.
"""
self.next_candidate = self.no_next_candidate
def cleanup(self):
"""
Check for dependency cycles.
"""
if not self.pending_children:
return
nclist = [(n, find_cycle([n], set())) for n in self.pending_children]
genuine_cycles = [
node for node,cycle in nclist
if cycle or node.get_state() != NODE_EXECUTED
]
if not genuine_cycles:
# All of the "cycles" found were single nodes in EXECUTED state,
# which is to say, they really weren't cycles. Just return.
return
desc = 'Found dependency cycle(s):\n'
for node, cycle in nclist:
if cycle:
desc = desc + " " + " -> ".join(map(str, cycle)) + "\n"
else:
desc = desc + \
" Internal Error: no cycle found for node %s (%s) in state %s\n" % \
(node, repr(node), StateString[node.get_state()])
raise SCons.Errors.UserError(desc)
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
campbe13/openhatch
|
refs/heads/master
|
vendor/packages/gdata/tests/gdata_tests/sites/live_client_test.py
|
41
|
#!/usr/bin/python
#
# Copyright 2009 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This module is used for version 2 of the Google Data APIs.
# These tests attempt to connect to Google servers.
__author__ = 'e.bidelman (Eric Bidelman)'
import unittest
import gdata.client
import gdata.data
import gdata.gauth
import gdata.sites.client
import gdata.sites.data
import gdata.test_config as conf
conf.options.register_option(conf.TEST_IMAGE_LOCATION_OPTION)
conf.options.register_option(conf.APPS_DOMAIN_OPTION)
conf.options.register_option(conf.SITES_NAME_OPTION)
class SitesClientTest(unittest.TestCase):
def setUp(self):
self.client = None
if conf.options.get_value('runlive') == 'true':
self.client = gdata.sites.client.SitesClient(
site=conf.options.get_value('sitename'),
domain=conf.options.get_value('appsdomain'))
if conf.options.get_value('ssl') == 'true':
self.client.ssl = True
conf.configure_client(self.client, 'SitesTest', self.client.auth_service,
True)
def tearDown(self):
conf.close_client(self.client)
def testCreateUpdateDelete(self):
if not conf.options.get_value('runlive') == 'true':
return
# Either load the recording or prepare to make a live request.
conf.configure_cache(self.client, 'testCreateUpdateDelete')
new_entry = self.client.CreatePage(
'webpage', 'Title Of Page', '<b>Your html content</b>')
self.assertEqual(new_entry.title.text, 'Title Of Page')
self.assertEqual(new_entry.page_name.text, 'title-of-page')
self.assert_(new_entry.GetAlternateLink().href is not None)
self.assertEqual(new_entry.Kind(), 'webpage')
# Change the title of the webpage we just added.
new_entry.title.text = 'Edited'
updated_entry = self.client.update(new_entry)
self.assertEqual(updated_entry.title.text, 'Edited')
self.assertEqual(updated_entry.page_name.text, 'title-of-page')
self.assert_(isinstance(updated_entry, gdata.sites.data.ContentEntry))
# Delete the test webpage from the Site.
self.client.delete(updated_entry)
def testCreateAndUploadToFilecabinet(self):
if not conf.options.get_value('runlive') == 'true':
return
# Either load the recording or prepare to make a live request.
conf.configure_cache(self.client, 'testCreateAndUploadToFilecabinet')
filecabinet = self.client.CreatePage(
'filecabinet', 'FilesGoHere', '<b>Your html content</b>',
page_name='diff-pagename-than-title')
self.assertEqual(filecabinet.title.text, 'FilesGoHere')
self.assertEqual(filecabinet.page_name.text, 'diff-pagename-than-title')
self.assert_(filecabinet.GetAlternateLink().href is not None)
self.assertEqual(filecabinet.Kind(), 'filecabinet')
# Upload a file to the filecabinet
filepath = conf.options.get_value('imgpath')
attachment = self.client.UploadAttachment(
filepath, filecabinet, content_type='image/jpeg', title='TestImageFile',
description='description here')
self.assertEqual(attachment.title.text, 'TestImageFile')
self.assertEqual(attachment.FindParentLink(),
filecabinet.GetSelfLink().href)
# Delete the test filecabinet and attachment from the Site.
self.client.delete(attachment)
self.client.delete(filecabinet)
def suite():
return conf.build_suite([SitesClientTest])
if __name__ == '__main__':
unittest.TextTestRunner().run(suite())
|
ram8647/gcb-mobilecsp
|
refs/heads/master
|
appengine_config.py
|
1
|
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Custom configurations and functions for Google App Engine."""
__author__ = '[email protected] (Pavel Simakov)'
import datetime
import importlib
import logging
import os
import sys
# configure Appstats
appstats_MAX_STACK = 20
# Whether we are running in the production environment.
PRODUCTION_MODE = not os.environ.get(
'SERVER_SOFTWARE', 'Development').startswith('Development')
# Set this flag to true to enable bulk downloads of Javascript/CSS files in lib
BUNDLE_LIB_FILES = not os.environ.get(
'GCB_STATIC_SERV_ENABLED', 'false').upper() == 'TRUE'
# this is the official location of this app for computing of all relative paths
BUNDLE_ROOT = os.path.dirname(__file__)
# make all Windows and Linux paths have the same separator '/'
BUNDLE_ROOT = BUNDLE_ROOT.replace('\\', '/')
CODE_ROOT = BUNDLE_ROOT
# Default namespace name is '' and not None.
DEFAULT_NAMESPACE_NAME = ''
# Flag to indicate whether module importation is in progress. Some modules
# and core items may wish to be a little flexible about warnings and
# exceptions due to some, but not all, modules being imported yet at module
# registration time.
MODULE_REGISTRATION_IN_PROGRESS = False
# Name for the core module. We don't actually have any code in modules/core,
# since having a core module is pretty well a contradiction in terms. However,
# there are a few things that want module and module-like-things to register
# themselves by name, and so here we provide a name for the un-module that is
# the immutable core functionality.
CORE_MODULE_NAME = 'core'
class _Library(object):
"""DDO that represents a Python library contained in a .zip file."""
def __init__(self, zipfile, relative_path=None):
self._relative_path = relative_path
self._zipfile = zipfile
@property
def file_path(self):
"""Path to the library's file on disk."""
return os.path.join(BUNDLE_ROOT, 'lib', self._zipfile)
@property
def full_path(self):
"""Full path for imports, containing archive-relative paths if any."""
path = self.file_path
if self._relative_path:
path = os.path.join(path, self._relative_path)
return path
# Google-produced library zip files.
GOOGLE_LIBS = [
_Library('google-api-python-client-1.4.0.zip'),
_Library('GoogleAppEngineCloudStorageClient-1.9.15.0.zip',
relative_path='GoogleAppEngineCloudStorageClient-1.9.15.0'),
_Library('GoogleAppEnginePipeline-1.9.17.0.zip',
relative_path='GoogleAppEnginePipeline-1.9.17.0'),
]
# Third-party library zip files.
THIRD_PARTY_LIBS = [
_Library('Graphy-1.0.0.zip', relative_path='Graphy-1.0.0'),
_Library('appengine-mapreduce-0.8.2.zip',
relative_path='appengine-mapreduce-0.8.2/python/src'),
_Library('babel-0.9.6.zip'),
_Library('decorator-3.4.0.zip', relative_path='src'),
_Library('gaepytz-2011h.zip'),
_Library('html5lib-0.95.zip'),
_Library('identity-toolkit-python-client-0.1.6.zip'),
_Library('markdown-2.5.zip', relative_path='Markdown-2.5'),
_Library('mrs-mapreduce-0.9.zip', relative_path='mrs-mapreduce-0.9'),
_Library('networkx-1.9.1.zip', relative_path='networkx-1.9.1'),
_Library('oauth-1.0.1.zip', relative_path='oauth'),
_Library('pyparsing-1.5.7.zip'),
_Library('reportlab-3.1.8.zip'),
_Library('simplejson-3.7.1.zip', relative_path='simplejson-3.7.1'),
# rdflib and deps
_Library('isodate-0.5.5.zip', relative_path='src'),
_Library('rdflib-4.2.2-dev.zip', relative_path='rdflib'),
]
ALL_LIBS = GOOGLE_LIBS + THIRD_PARTY_LIBS
def gcb_force_default_encoding(encoding):
"""Force default encoding to a specific value."""
# Eclipse silently sets default encoding to 'utf-8', while GAE forces
# 'ascii'. We need to control this directly for consistency.
if sys.getdefaultencoding() != encoding:
reload(sys)
sys.setdefaultencoding(encoding)
def _third_party_libs_from_env():
ret = []
for lib_config in os.environ.get('GCB_THIRD_PARTY_LIBRARIES', '').split():
parts = lib_config.split(':')
if len(parts) == 1:
ret.append(_Library(parts[0]))
else:
ret.append(_Library(parts[0], relative_path=parts[1]))
return ret
def gcb_init_third_party():
"""Add all third party libraries to system path."""
for lib in ALL_LIBS + _third_party_libs_from_env():
if not os.path.exists(lib.file_path):
raise Exception('Library does not exist: %s' % lib.file_path)
sys.path.insert(0, lib.full_path)
def gcb_appstats_enabled():
return 'True' == os.environ.get('GCB_APPSTATS_ENABLED')
def webapp_add_wsgi_middleware(app):
"""Enable AppStats if requested."""
if gcb_appstats_enabled():
logging.info('Enabling AppStats.')
from google.appengine.ext.appstats import recording
app = recording.appstats_wsgi_middleware(app)
return app
def _import_and_enable_modules(env_var, reraise=False):
for module_name in os.environ.get(env_var, '').split():
option = 'enabled'
if module_name.count('='):
module_name, option = module_name.split('=', 1)
try:
operation = 'importing'
module = importlib.import_module(module_name)
operation = 'registering'
custom_module = module.register_module()
if option is 'enabled':
operation = 'enabling'
custom_module.enable()
except Exception, ex: # pylint: disable=broad-except
logging.exception('Problem %s module "%s"', operation, module_name)
if reraise:
raise ex
def import_and_enable_modules():
global MODULE_REGISTRATION_IN_PROGRESS # pylint: disable=global-statement
MODULE_REGISTRATION_IN_PROGRESS = True
_import_and_enable_modules('GCB_REGISTERED_MODULES')
_import_and_enable_modules('GCB_REGISTERED_MODULES_CUSTOM')
_import_and_enable_modules('GCB_THIRD_PARTY_MODULES')
MODULE_REGISTRATION_IN_PROGRESS = False
def time_delta_to_millis(delta):
"""Converts time delta into total number of milliseconds."""
millis = delta.days * 24 * 60 * 60 * 1000
millis += delta.seconds * 1000
millis += delta.microseconds / 1000
return millis
def timeandlog(name, duration_only=False):
"""Times and logs execution of decorated method."""
def timed_1(func):
def timed_2(*args, **kwargs):
_name = name
if args and isinstance(args[0], type):
_name += '.' + str(args[0].__name__)
before = datetime.datetime.utcnow()
if not duration_only:
log_appstats_event(_name + '.enter')
result = func(*args, **kwargs)
after = datetime.datetime.utcnow()
millis = time_delta_to_millis(after - before)
if duration_only:
logging.info(_name + ': duration=%sms' % millis)
log_appstats_event(_name, {'millis': millis})
else:
logging.info(_name + '.leave: duration=%sms' % millis)
log_appstats_event(_name + '.leave', {'millis': millis})
return result
if gcb_appstats_enabled():
return timed_2
else:
return func
return timed_1
def log_appstats_event(label, data=None):
if gcb_appstats_enabled():
try:
from google.appengine.ext.appstats.recording import recorder_proxy
if recorder_proxy and (
recorder_proxy.has_recorder_for_current_request()):
recorder_proxy.record_custom_event(label=label, data=data)
except Exception: # pylint: disable=broad-except
logging.exception('Failed to record Appstats event %s.', label)
gcb_init_third_party()
|
tushevorg/namebench
|
refs/heads/master
|
libnamebench/conn_quality.py
|
173
|
# Copyright 2009 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests to determine connection quality."""
__author__ = '[email protected] (Thomas Stromberg)'
import time
import nameserver
import providers
import sys_nameservers
import util
EXPECTED_CONGESTION_DURATION = 40.0
CONGESTION_OFFSET_MULTIPLIER = 1
MAX_CONGESTION_MULTIPLIER = 6
class OfflineConnection(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return str(self.value)
class ConnectionQuality(object):
"""Methods related to connection quality detection."""
def __init__(self, status_callback=None):
self.status_callback = status_callback
self.primary = providers.SystemResolver()
def msg(self, msg, **kwargs):
if self.status_callback:
self.status_callback(msg, **kwargs)
else:
print '- %s' % msg
def GetNegativeResponseDuration(self):
"""Use the built-in DNS server to query for a negative response."""
if self.primary:
self.primary.health_timeout = 20
return self.primary.TestNegativeResponse()
def GetGoogleResponseDuration(self):
"""See how quickly we can query for www.google.com using a remote nameserver."""
gdns = providers.GooglePublicDNS()
gdns.health_timeout = 20
return gdns.TimedRequest('A', 'www.google.com.')
def CheckConnectionQuality(self):
"""Look how healthy our DNS connection quality. Averages check durations."""
is_connection_offline = True
self.msg('Checking query interception status...')
odns = providers.OpenDNS()
(intercepted, i_duration) = odns.InterceptionStateWithDuration()
if i_duration:
is_connection_offline = False
durations = []
try_count = 3
for i in range(try_count):
self.msg('Checking connection quality', count=i+1, total=try_count)
if self.primary:
(broken, unused_warning, n_duration) = self.GetNegativeResponseDuration()
if not broken:
is_connection_offline = False
durations.append(n_duration)
(unused_response, g_duration, error_msg) = self.GetGoogleResponseDuration()
if not error_msg:
durations.append(g_duration)
is_connection_offline = False
if is_connection_offline and (i+1) != try_count:
self.msg('The internet connection appears to be offline (%s of %s)' % (i+1, try_count))
time.sleep(0.2)
if is_connection_offline:
raise OfflineConnection('It would appear that your internet connection is offline.'
'namebench is not gettng a response for DNS queries to '
'%s, %s, or %s.' % (self.primary.ip, providers.GOOGLE_IP,
providers.OPENDNS_IP))
avg_latency_s = util.CalculateListAverage(durations) / 1000.0
max_latency_s = max(durations) / 1000.0
self.msg("Average DNS lookup latency: %.2fs Maximum: %.2fs" % (avg_latency_s, max_latency_s))
return (intercepted, avg_latency_s, max_latency_s)
|
chkir/django-cms
|
refs/heads/develop
|
cms/south_migrations/0019_public_table_renames.py
|
1680
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
try:
from django.contrib.auth import get_user_model
except ImportError: # django < 1.5
from django.contrib.auth.models import User
else:
User = get_user_model()
user_orm_label = '%s.%s' % (User._meta.app_label, User._meta.object_name)
user_model_label = '%s.%s' % (User._meta.app_label, User._meta.model_name)
user_ptr_name = '%s_ptr' % User._meta.object_name.lower()
class Migration(SchemaMigration):
def forwards(self, orm):
# Dummy migration
pass
def backwards(self, orm):
# Dummy migration
pass
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': (
'django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [],
{'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [],
{'to': "orm['auth.Permission']", 'symmetrical': 'False',
'blank': 'True'})
},
'auth.permission': {
'Meta': {
'ordering': "('content_type__app_label', 'content_type__model', 'codename')",
'unique_together': "(('content_type', 'codename'),)",
'object_name': 'Permission'},
'codename': (
'django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [],
{'to': "orm['contenttypes.ContentType']"}),
'id': (
'django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
user_model_label: {
'Meta': {'object_name': User.__name__, 'db_table': "'%s'" % User._meta.db_table},
'date_joined': ('django.db.models.fields.DateTimeField', [],
{'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [],
{'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [],
{'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [],
{'to': "orm['auth.Group']", 'symmetrical': 'False',
'blank': 'True'}),
'id': (
'django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': (
'django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': (
'django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': (
'django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [],
{'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [],
{'max_length': '30', 'blank': 'True'}),
'password': (
'django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': (
'django.db.models.fields.related.ManyToManyField', [],
{'to': "orm['auth.Permission']", 'symmetrical': 'False',
'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [],
{'unique': 'True', 'max_length': '30'})
},
'cms.cmsplugin': {
'Meta': {'object_name': 'CMSPlugin'},
'changed_date': ('django.db.models.fields.DateTimeField', [],
{'auto_now': 'True', 'blank': 'True'}),
'creation_date': ('django.db.models.fields.DateTimeField', [],
{'default': 'datetime.datetime.now'}),
'id': (
'django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [],
{'max_length': '15', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [],
{'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [],
{'db_index': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [],
{'to': "orm['cms.CMSPlugin']", 'null': 'True',
'blank': 'True'}),
'placeholder': ('django.db.models.fields.related.ForeignKey', [],
{'to': "orm['cms.Placeholder']", 'null': 'True'}),
'plugin_type': ('django.db.models.fields.CharField', [],
{'max_length': '50', 'db_index': 'True'}),
'position': ('django.db.models.fields.PositiveSmallIntegerField', [],
{'null': 'True', 'blank': 'True'}),
'rght': ('django.db.models.fields.PositiveIntegerField', [],
{'db_index': 'True'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [],
{'db_index': 'True'})
},
'cms.globalpagepermission': {
'Meta': {'object_name': 'GlobalPagePermission'},
'can_add': (
'django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_change': (
'django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_change_advanced_settings': (
'django.db.models.fields.BooleanField', [], {'default': 'False'}),
'can_change_permissions': (
'django.db.models.fields.BooleanField', [], {'default': 'False'}),
'can_delete': (
'django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_moderate': (
'django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_move_page': (
'django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_publish': (
'django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_recover_page': (
'django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_view': (
'django.db.models.fields.BooleanField', [], {'default': 'False'}),
'group': ('django.db.models.fields.related.ForeignKey', [],
{'to': "orm['auth.Group']", 'null': 'True', 'blank': 'True'}),
'id': (
'django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'sites': ('django.db.models.fields.related.ManyToManyField', [],
{'symmetrical': 'False', 'to': "orm['sites.Site']",
'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [],
{'to': "orm['%s']" % user_orm_label, 'null': 'True', 'blank': 'True'})
},
'cms.page': {
'Meta': {'ordering': "('site', 'tree_id', 'lft')",
'object_name': 'Page'},
'changed_by': (
'django.db.models.fields.CharField', [], {'max_length': '70'}),
'changed_date': ('django.db.models.fields.DateTimeField', [],
{'auto_now': 'True', 'blank': 'True'}),
'created_by': (
'django.db.models.fields.CharField', [], {'max_length': '70'}),
'creation_date': ('django.db.models.fields.DateTimeField', [],
{'auto_now_add': 'True', 'blank': 'True'}),
'id': (
'django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'in_navigation': ('django.db.models.fields.BooleanField', [],
{'default': 'True', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [],
{'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [],
{'db_index': 'True'}),
'limit_visibility_in_menu': (
'django.db.models.fields.SmallIntegerField', [],
{'default': 'None', 'null': 'True', 'db_index': 'True',
'blank': 'True'}),
'login_required': (
'django.db.models.fields.BooleanField', [], {'default': 'False'}),
'moderator_state': ('django.db.models.fields.SmallIntegerField', [],
{'default': '1', 'blank': 'True'}),
'navigation_extenders': ('django.db.models.fields.CharField', [],
{'db_index': 'True', 'max_length': '80',
'null': 'True', 'blank': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [],
{'blank': 'True', 'related_name': "'children'",
'null': 'True', 'to': "orm['cms.Page']"}),
'placeholders': ('django.db.models.fields.related.ManyToManyField', [],
{'to': "orm['cms.Placeholder']",
'symmetrical': 'False'}),
'publication_date': ('django.db.models.fields.DateTimeField', [],
{'db_index': 'True', 'null': 'True',
'blank': 'True'}),
'publication_end_date': ('django.db.models.fields.DateTimeField', [],
{'db_index': 'True', 'null': 'True',
'blank': 'True'}),
'published': (
'django.db.models.fields.BooleanField', [], {'default': 'False'}),
'publisher_is_draft': ('django.db.models.fields.BooleanField', [],
{'default': 'True', 'db_index': 'True'}),
'publisher_public': (
'django.db.models.fields.related.OneToOneField', [],
{'related_name': "'publisher_draft'", 'unique': 'True', 'null': 'True',
'to': "orm['cms.Page']"}),
'publisher_state': ('django.db.models.fields.SmallIntegerField', [],
{'default': '0', 'db_index': 'True'}),
'reverse_id': ('django.db.models.fields.CharField', [],
{'db_index': 'True', 'max_length': '40', 'null': 'True',
'blank': 'True'}),
'rght': ('django.db.models.fields.PositiveIntegerField', [],
{'db_index': 'True'}),
'site': ('django.db.models.fields.related.ForeignKey', [],
{'to': "orm['sites.Site']"}),
'soft_root': ('django.db.models.fields.BooleanField', [],
{'default': 'False', 'db_index': 'True'}),
'template': (
'django.db.models.fields.CharField', [], {'max_length': '100'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [],
{'db_index': 'True'})
},
'cms.pagemoderator': {
'Meta': {'object_name': 'PageModerator'},
'id': (
'django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'moderate_children': (
'django.db.models.fields.BooleanField', [], {'default': 'False'}),
'moderate_descendants': (
'django.db.models.fields.BooleanField', [], {'default': 'False'}),
'moderate_page': (
'django.db.models.fields.BooleanField', [], {'default': 'False'}),
'page': ('django.db.models.fields.related.ForeignKey', [],
{'to': "orm['cms.Page']"}),
'user': ('django.db.models.fields.related.ForeignKey', [],
{'to': "orm['%s']" % user_orm_label})
},
'cms.pagemoderatorstate': {
'Meta': {'ordering': "('page', 'action', '-created')",
'object_name': 'PageModeratorState'},
'action': ('django.db.models.fields.CharField', [],
{'max_length': '3', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [],
{'auto_now_add': 'True', 'blank': 'True'}),
'id': (
'django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.TextField', [],
{'default': "''", 'max_length': '1000', 'blank': 'True'}),
'page': ('django.db.models.fields.related.ForeignKey', [],
{'to': "orm['cms.Page']"}),
'user': ('django.db.models.fields.related.ForeignKey', [],
{'to': "orm['%s']" % user_orm_label, 'null': 'True'})
},
'cms.pagepermission': {
'Meta': {'object_name': 'PagePermission'},
'can_add': (
'django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_change': (
'django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_change_advanced_settings': (
'django.db.models.fields.BooleanField', [], {'default': 'False'}),
'can_change_permissions': (
'django.db.models.fields.BooleanField', [], {'default': 'False'}),
'can_delete': (
'django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_moderate': (
'django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_move_page': (
'django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_publish': (
'django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_view': (
'django.db.models.fields.BooleanField', [], {'default': 'False'}),
'grant_on': (
'django.db.models.fields.IntegerField', [], {'default': '5'}),
'group': ('django.db.models.fields.related.ForeignKey', [],
{'to': "orm['auth.Group']", 'null': 'True', 'blank': 'True'}),
'id': (
'django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'page': ('django.db.models.fields.related.ForeignKey', [],
{'to': "orm['cms.Page']", 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [],
{'to': "orm['%s']" % user_orm_label, 'null': 'True', 'blank': 'True'})
},
'cms.pageuser': {
'Meta': {'object_name': 'PageUser', '_ormbases': [user_orm_label]},
'created_by': ('django.db.models.fields.related.ForeignKey', [],
{'related_name': "'created_users'",
'to': "orm['%s']" % user_orm_label}),
'user_ptr': ('django.db.models.fields.related.OneToOneField', [],
{'to': "orm['%s']" % user_orm_label, 'unique': 'True',
'primary_key': 'True'})
},
'cms.pageusergroup': {
'Meta': {'object_name': 'PageUserGroup', '_ormbases': ['auth.Group']},
'created_by': ('django.db.models.fields.related.ForeignKey', [],
{'related_name': "'created_usergroups'",
'to': "orm['%s']" % user_orm_label}),
'group_ptr': ('django.db.models.fields.related.OneToOneField', [],
{'to': "orm['auth.Group']", 'unique': 'True',
'primary_key': 'True'})
},
'cms.placeholder': {
'Meta': {'object_name': 'Placeholder'},
'default_width': (
'django.db.models.fields.PositiveSmallIntegerField', [],
{'null': 'True'}),
'id': (
'django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slot': ('django.db.models.fields.CharField', [],
{'max_length': '50', 'db_index': 'True'})
},
'cms.title': {
'Meta': {'unique_together': "(('language', 'page'),)",
'object_name': 'Title'},
'application_urls': ('django.db.models.fields.CharField', [],
{'db_index': 'True', 'max_length': '200',
'null': 'True', 'blank': 'True'}),
'creation_date': ('django.db.models.fields.DateTimeField', [],
{'default': 'datetime.datetime.now'}),
'has_url_overwrite': ('django.db.models.fields.BooleanField', [],
{'default': 'False', 'db_index': 'True'}),
'id': (
'django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [],
{'max_length': '15', 'db_index': 'True'}),
'menu_title': ('django.db.models.fields.CharField', [],
{'max_length': '255', 'null': 'True', 'blank': 'True'}),
'meta_description': ('django.db.models.fields.TextField', [],
{'max_length': '255', 'null': 'True',
'blank': 'True'}),
'meta_keywords': ('django.db.models.fields.CharField', [],
{'max_length': '255', 'null': 'True',
'blank': 'True'}),
'page': ('django.db.models.fields.related.ForeignKey', [],
{'related_name': "'title_set'", 'to': "orm['cms.Page']"}),
'page_title': ('django.db.models.fields.CharField', [],
{'max_length': '255', 'null': 'True', 'blank': 'True'}),
'path': ('django.db.models.fields.CharField', [],
{'max_length': '255', 'db_index': 'True'}),
'redirect': ('django.db.models.fields.CharField', [],
{'max_length': '255', 'null': 'True', 'blank': 'True'}),
'slug': (
'django.db.models.fields.SlugField', [], {'max_length': '255'}),
'title': (
'django.db.models.fields.CharField', [], {'max_length': '255'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)",
'unique_together': "(('app_label', 'model'),)",
'object_name': 'ContentType',
'db_table': "'django_content_type'"},
'app_label': (
'django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': (
'django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': (
'django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'sites.site': {
'Meta': {'ordering': "('domain',)", 'object_name': 'Site',
'db_table': "'django_site'"},
'domain': (
'django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': (
'django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
}
}
complete_apps = ['cms']
|
JensGrabner/mpmath
|
refs/heads/master
|
doc/source/plots/coulombg_c.py
|
6
|
# Irregular Coulomb wave function in the complex plane
cplot(lambda z: coulombg(1,1,z), points=50000)
|
frankiecjunle/yunblog
|
refs/heads/master
|
venv/lib/python2.7/site-packages/flask_migrate/templates/flask/env.py
|
557
|
from __future__ import with_statement
from alembic import context
from sqlalchemy import engine_from_config, pool
from logging.config import fileConfig
import logging
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
fileConfig(config.config_file_name)
logger = logging.getLogger('alembic.env')
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
from flask import current_app
config.set_main_option('sqlalchemy.url',
current_app.config.get('SQLALCHEMY_DATABASE_URI'))
target_metadata = current_app.extensions['migrate'].db.metadata
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline():
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(url=url)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
# this callback is used to prevent an auto-migration from being generated
# when there are no changes to the schema
# reference: http://alembic.readthedocs.org/en/latest/cookbook.html
def process_revision_directives(context, revision, directives):
if getattr(config.cmd_opts, 'autogenerate', False):
script = directives[0]
if script.upgrade_ops.is_empty():
directives[:] = []
logger.info('No changes in schema detected.')
engine = engine_from_config(config.get_section(config.config_ini_section),
prefix='sqlalchemy.',
poolclass=pool.NullPool)
connection = engine.connect()
context.configure(connection=connection,
target_metadata=target_metadata,
process_revision_directives=process_revision_directives,
**current_app.extensions['migrate'].configure_args)
try:
with context.begin_transaction():
context.run_migrations()
finally:
connection.close()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()
|
JeffHeard/terrapyn_docker
|
refs/heads/master
|
pysqlite-2.6.3/doc/includes/sqlite3/shared_cache.py
|
49
|
from pysqlite2 import dbapi2 as sqlite3
# The shared cache is only available in SQLite versions 3.3.3 or later
# See the SQLite documentaton for details.
sqlite3.enable_shared_cache(True)
|
Karosuo/Linux_tools
|
refs/heads/master
|
xls_handlers/xls_sum_venv/lib/python3.6/site-packages/pip/_vendor/msgpack/_version.py
|
41
|
version = (0, 5, 6)
|
SM-G920P/kernel_samsung_exynos7420
|
refs/heads/cm-13.0
|
tools/perf/scripts/python/sctop.py
|
11180
|
# system call top
# (c) 2010, Tom Zanussi <[email protected]>
# Licensed under the terms of the GNU GPL License version 2
#
# Periodically displays system-wide system call totals, broken down by
# syscall. If a [comm] arg is specified, only syscalls called by
# [comm] are displayed. If an [interval] arg is specified, the display
# will be refreshed every [interval] seconds. The default interval is
# 3 seconds.
import os, sys, thread, time
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import *
usage = "perf script -s sctop.py [comm] [interval]\n";
for_comm = None
default_interval = 3
interval = default_interval
if len(sys.argv) > 3:
sys.exit(usage)
if len(sys.argv) > 2:
for_comm = sys.argv[1]
interval = int(sys.argv[2])
elif len(sys.argv) > 1:
try:
interval = int(sys.argv[1])
except ValueError:
for_comm = sys.argv[1]
interval = default_interval
syscalls = autodict()
def trace_begin():
thread.start_new_thread(print_syscall_totals, (interval,))
pass
def raw_syscalls__sys_enter(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
id, args):
if for_comm is not None:
if common_comm != for_comm:
return
try:
syscalls[id] += 1
except TypeError:
syscalls[id] = 1
def print_syscall_totals(interval):
while 1:
clear_term()
if for_comm is not None:
print "\nsyscall events for %s:\n\n" % (for_comm),
else:
print "\nsyscall events:\n\n",
print "%-40s %10s\n" % ("event", "count"),
print "%-40s %10s\n" % ("----------------------------------------", \
"----------"),
for id, val in sorted(syscalls.iteritems(), key = lambda(k, v): (v, k), \
reverse = True):
try:
print "%-40s %10d\n" % (syscall_name(id), val),
except TypeError:
pass
syscalls.clear()
time.sleep(interval)
|
jabadabadu/cassango
|
refs/heads/master
|
cassango/creation.py
|
1
|
#-*- coding: iso-8859-2 -*-
import sys
import time
from django.conf import settings
from django.core.management import call_command
from django.db.utils import load_backend
from pycassa.types import *
TEST_DATABASE_PREFIX = 'test_'
class CassandraDatabaseCreation(object):
data_types = {
'AutoField' : 'text',
'BooleanField' : 'boolean',
'CharField' : 'text',
'CommaSeparatedIntegerField' : 'text',
'DateField' : 'date',
'DateTimeField' : 'datetime',
'DecimalField' : 'decimal',
'EmailField' : 'text',
'FileField' : 'text',
'FilePathField' : 'text',
'FloatField' : 'float',
'IntegerField' : 'integer',
'BigIntegerField' : 'integer',
'IPAddressField' : 'text',
'GenericIPAddressField' : 'text',
'NullBooleanField' : 'boolean',
'OneToOneField' : 'integer',
'PositiveIntegerField' : 'integer',
'PositiveSmallIntegerField' : 'integer',
'SlugField' : 'text',
'SmallIntegerField' : 'integer',
'TextField' : 'text',
'TimeField' : 'time',
}
def __init__(self, manager):
self.manager = manager
def sql_create_model(self, model, style, known_models=set()):
keyspace_name = self.connection.settings_dict['NAME']
opts = model._meta
if not opts.managed or opts.proxy or opts.swapped:
return [], {}
column_validators = {}
for f in opts.local_fields:
col_name = str(f.column)
col_type = f.db_type(connection=self.connection)
if col_type in ['CharField', 'CommaSeparatedIntegerField', 'EmailField', 'FileField', 'FilePathField',
'IPAddressField', 'GenericIPAddressField', 'SlugField', 'TextField']:
col_type = UTF8Type
if col_type in ['IntegerField', 'OneToOneField', 'PositiveIntegerField',
'PositiveSmallIntegerField', 'SmallIntegerField']:
col_type = IntegerType
if col_type in ['BooleanField', 'NullBooleanField']:
col_type = AsciiType
if col_type == 'DecimalField':
col_type = DecimalType
if col_type == 'DateTimeField':
col_type = DateType
if col_type == 'FloatField':
col_type = FloatType
if col_type == 'BigIntegerField':
col_type = LongType
column_validators[col_name] = data_types[col_type]
column_family_name = opts.db_table
if not self.connection.settings_dict['comparator_type']:
comparator_type = 'UTF8Type'
self.manager.create_column_family(keyspace=keyspace_name,
name=column_family_name,
comparator_type=comparator_type,
column_validation_classes=column_validators)
return [], {}
def create_test_db(self, verbosity=1, autoclobber=False):
test_database_name = self.get_test_db_name()
self.connection.reconnect()
self.drop_db(test_database_name)
call_command('syncdb',
verbosity=max(verbosity-1, 0),
interactive=False,
database=self.connection.alias
)
return test_database_name
def destroy_test_db(self, old_database_name, verbosity=1):
if verbosity >= 1:
print "Destroying test database for alias '%s'..." % self.connection.alias
test_database_name = self.connection.settings_dict['NAME']
self.drop_db()
def drop_db(self, database_name, verbosity):
self.manager.drop_keyspace(database_name)
def delete_test_cassandra_keyspace(self, keyspace_name):
settings_dict = self.connection.settings_dict
test_keyspace_name = settings_dict.get('NAME')
self.drop_cassandra_keyspace(keyspace_name)
self.connection.settings_dict['NAME'] = old_database_name
def get_test_db_name(self):
settings_dict = self.connection.settings_dict
if settings_dict.has_key('TEST_NAME'):
test_keyspace_name = settings_dict['TEST_NAME']
else:
test_keyspace_name = TEST_DATABASE_PREFIX + settings_dict['NAME']
return test_keyspace_name
def test_db_signature(self):
settings_dict = self.connection.settings_dict
return (
settings_dict['HOST'],
settings_dict['PORT'],
settings_dict['ENGINE'],
settings_dict['NAME']
)
|
benoitsteiner/tensorflow-xsmm
|
refs/heads/master
|
tensorflow/contrib/autograph/converters/builtin_functions.py
|
11
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Handles builtins and other special functions."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import gast
from tensorflow.contrib.autograph.core import converter
from tensorflow.contrib.autograph.pyct import templates
class BuiltinFunctionTransformer(converter.Base):
"""Handles builtin functions.
This transformer only covers functions that are translated into a
TF equivalent, like `len`.
"""
def _convert_builtin(self, node):
template = """
ag__.utils.dynamic_builtin(func, args)
"""
return templates.replace(template, func=node.func, args=node.args)[0].value
def _convert_print(self, node):
template = """
ag__.utils.dynamic_print(args)
"""
return templates.replace(template, args=node.args)[0].value
def visit_Call(self, node):
self.generic_visit(node)
# TODO(mdan): This won't work if the function was hidden.
# TODO(mdan): Rely on the live_val and use inspect_utils.is_builtin instead.
if (isinstance(node.func, gast.Name) and
node.func.id in ('len', 'range', 'xrange', 'float', 'int')):
return self._convert_builtin(node)
# Print needs to be handled separately because it can be read as statement.
if isinstance(node.func, gast.Name) and node.func.id == 'print':
return self._convert_print(node)
return node
def visit_Print(self, node):
self.generic_visit(node)
args = node.values
# Following is the case when calling print(a, b)
if len(args) == 1 and isinstance(args[0], gast.Tuple):
args = args[0].elts
template = """
fname(args)
"""
function_call = templates.replace(template, fname='print', args=args)[0]
return self.visit(function_call)
def transform(node, ctx):
return BuiltinFunctionTransformer(ctx).visit(node)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.